* config/i386/i386.h (TARGET_FPMATH_DEFAULT): New.
[official-gcc.git] / gcc / fold-const.c
blob9487d3c49701e7d2ece453cf5db47d65fffd26f0
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_relational_hi_lo (enum tree_code *, const tree,
137 tree *, tree *);
138 static bool tree_expr_nonzero_p (tree);
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
143 addition.
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 sign. */
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
155 #define LOWPART(x) \
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
165 static void
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178 static void
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
180 HOST_WIDE_INT *hi)
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is nonzero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is nonzero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
201 tree
202 force_fit_type (tree t, int overflowable,
203 bool overflowed, bool overflowed_const)
205 unsigned HOST_WIDE_INT low;
206 HOST_WIDE_INT high;
207 unsigned int prec;
208 int sign_extended_type;
210 gcc_assert (TREE_CODE (t) == INTEGER_CST);
212 low = TREE_INT_CST_LOW (t);
213 high = TREE_INT_CST_HIGH (t);
215 if (POINTER_TYPE_P (TREE_TYPE (t))
216 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
217 prec = POINTER_SIZE;
218 else
219 prec = TYPE_PRECISION (TREE_TYPE (t));
220 /* Size types *are* sign extended. */
221 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
222 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
223 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
225 /* First clear all bits that are beyond the type's precision. */
227 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
229 else if (prec > HOST_BITS_PER_WIDE_INT)
230 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 else
233 high = 0;
234 if (prec < HOST_BITS_PER_WIDE_INT)
235 low &= ~((HOST_WIDE_INT) (-1) << prec);
238 if (!sign_extended_type)
239 /* No sign extension */;
240 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
241 /* Correct width already. */;
242 else if (prec > HOST_BITS_PER_WIDE_INT)
244 /* Sign extend top half? */
245 if (high & ((unsigned HOST_WIDE_INT)1
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
249 else if (prec == HOST_BITS_PER_WIDE_INT)
251 if ((HOST_WIDE_INT)low < 0)
252 high = -1;
254 else
256 /* Sign extend bottom half? */
257 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
259 high = -1;
260 low |= (HOST_WIDE_INT)(-1) << prec;
264 /* If the value changed, return a new node. */
265 if (overflowed || overflowed_const
266 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
268 t = build_int_cst_wide (TREE_TYPE (t), low, high);
270 if (overflowed
271 || overflowable < 0
272 || (overflowable > 0 && sign_extended_type))
274 t = copy_node (t);
275 TREE_OVERFLOW (t) = 1;
276 TREE_CONSTANT_OVERFLOW (t) = 1;
278 else if (overflowed_const)
280 t = copy_node (t);
281 TREE_CONSTANT_OVERFLOW (t) = 1;
285 return t;
288 /* Add two doubleword integers with doubleword result.
289 Each argument is given as two `HOST_WIDE_INT' pieces.
290 One argument is L1 and H1; the other, L2 and H2.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
294 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
295 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
296 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
298 unsigned HOST_WIDE_INT l;
299 HOST_WIDE_INT h;
301 l = l1 + l2;
302 h = h1 + h2 + (l < l1);
304 *lv = l;
305 *hv = h;
306 return OVERFLOW_SUM_SIGN (h1, h2, h);
309 /* Negate a doubleword integer with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
312 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
315 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
316 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 if (l1 == 0)
320 *lv = 0;
321 *hv = - h1;
322 return (*hv & h1) < 0;
324 else
326 *lv = -l1;
327 *hv = ~h1;
328 return 0;
332 /* Multiply two doubleword integers with doubleword result.
333 Return nonzero if the operation overflows, assuming it's signed.
334 Each argument is given as two `HOST_WIDE_INT' pieces.
335 One argument is L1 and H1; the other, L2 and H2.
336 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
339 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
340 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
341 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
343 HOST_WIDE_INT arg1[4];
344 HOST_WIDE_INT arg2[4];
345 HOST_WIDE_INT prod[4 * 2];
346 unsigned HOST_WIDE_INT carry;
347 int i, j, k;
348 unsigned HOST_WIDE_INT toplow, neglow;
349 HOST_WIDE_INT tophigh, neghigh;
351 encode (arg1, l1, h1);
352 encode (arg2, l2, h2);
354 memset (prod, 0, sizeof prod);
356 for (i = 0; i < 4; i++)
358 carry = 0;
359 for (j = 0; j < 4; j++)
361 k = i + j;
362 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
363 carry += arg1[i] * arg2[j];
364 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
365 carry += prod[k];
366 prod[k] = LOWPART (carry);
367 carry = HIGHPART (carry);
369 prod[i + 4] = carry;
372 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
374 /* Check for overflow by calculating the top half of the answer in full;
375 it should agree with the low half's sign bit. */
376 decode (prod + 4, &toplow, &tophigh);
377 if (h1 < 0)
379 neg_double (l2, h2, &neglow, &neghigh);
380 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
382 if (h2 < 0)
384 neg_double (l1, h1, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
387 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
390 /* Shift the doubleword integer in L1, H1 left by COUNT places
391 keeping only PREC bits of result.
392 Shift right if COUNT is negative.
393 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
394 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
396 void
397 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
398 HOST_WIDE_INT count, unsigned int prec,
399 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
401 unsigned HOST_WIDE_INT signmask;
403 if (count < 0)
405 rshift_double (l1, h1, -count, prec, lv, hv, arith);
406 return;
409 if (SHIFT_COUNT_TRUNCATED)
410 count %= prec;
412 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
414 /* Shifting by the host word size is undefined according to the
415 ANSI standard, so we must handle this as a special case. */
416 *hv = 0;
417 *lv = 0;
419 else if (count >= HOST_BITS_PER_WIDE_INT)
421 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *lv = 0;
424 else
426 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
427 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
428 *lv = l1 << count;
431 /* Sign extend all bits that are beyond the precision. */
433 signmask = -((prec > HOST_BITS_PER_WIDE_INT
434 ? ((unsigned HOST_WIDE_INT) *hv
435 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
436 : (*lv >> (prec - 1))) & 1);
438 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
440 else if (prec >= HOST_BITS_PER_WIDE_INT)
442 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
443 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
445 else
447 *hv = signmask;
448 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
449 *lv |= signmask << prec;
453 /* Shift the doubleword integer in L1, H1 right by COUNT places
454 keeping only PREC bits of result. COUNT must be positive.
455 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
456 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
458 void
459 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
460 HOST_WIDE_INT count, unsigned int prec,
461 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
462 int arith)
464 unsigned HOST_WIDE_INT signmask;
466 signmask = (arith
467 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
468 : 0);
470 if (SHIFT_COUNT_TRUNCATED)
471 count %= prec;
473 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
475 /* Shifting by the host word size is undefined according to the
476 ANSI standard, so we must handle this as a special case. */
477 *hv = 0;
478 *lv = 0;
480 else if (count >= HOST_BITS_PER_WIDE_INT)
482 *hv = 0;
483 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
488 *lv = ((l1 >> count)
489 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
492 /* Zero / sign extend all bits that are beyond the precision. */
494 if (count >= (HOST_WIDE_INT)prec)
496 *hv = signmask;
497 *lv = signmask;
499 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
501 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
503 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
504 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
506 else
508 *hv = signmask;
509 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
510 *lv |= signmask << (prec - count);
514 /* Rotate the doubleword integer in L1, H1 left by COUNT places
515 keeping only PREC bits of result.
516 Rotate right if COUNT is negative.
517 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
519 void
520 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
521 HOST_WIDE_INT count, unsigned int prec,
522 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
524 unsigned HOST_WIDE_INT s1l, s2l;
525 HOST_WIDE_INT s1h, s2h;
527 count %= prec;
528 if (count < 0)
529 count += prec;
531 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
532 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 *lv = s1l | s2l;
534 *hv = s1h | s2h;
537 /* Rotate the doubleword integer in L1, H1 left by COUNT places
538 keeping only PREC bits of result. COUNT must be positive.
539 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
541 void
542 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
543 HOST_WIDE_INT count, unsigned int prec,
544 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
546 unsigned HOST_WIDE_INT s1l, s2l;
547 HOST_WIDE_INT s1h, s2h;
549 count %= prec;
550 if (count < 0)
551 count += prec;
553 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
554 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 *lv = s1l | s2l;
556 *hv = s1h | s2h;
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 or EXACT_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
569 div_and_round_double (enum tree_code code, int uns,
570 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
571 HOST_WIDE_INT hnum_orig,
572 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
573 HOST_WIDE_INT hden_orig,
574 unsigned HOST_WIDE_INT *lquo,
575 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
576 HOST_WIDE_INT *hrem)
578 int quo_neg = 0;
579 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den[4], quo[4];
581 int i, j;
582 unsigned HOST_WIDE_INT work;
583 unsigned HOST_WIDE_INT carry = 0;
584 unsigned HOST_WIDE_INT lnum = lnum_orig;
585 HOST_WIDE_INT hnum = hnum_orig;
586 unsigned HOST_WIDE_INT lden = lden_orig;
587 HOST_WIDE_INT hden = hden_orig;
588 int overflow = 0;
590 if (hden == 0 && lden == 0)
591 overflow = 1, lden = 1;
593 /* Calculate quotient sign and convert operands to unsigned. */
594 if (!uns)
596 if (hnum < 0)
598 quo_neg = ~ quo_neg;
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum, hnum, &lnum, &hnum)
601 && ((HOST_WIDE_INT) lden & hden) == -1)
602 overflow = 1;
604 if (hden < 0)
606 quo_neg = ~ quo_neg;
607 neg_double (lden, hden, &lden, &hden);
611 if (hnum == 0 && hden == 0)
612 { /* single precision */
613 *hquo = *hrem = 0;
614 /* This unsigned division rounds toward zero. */
615 *lquo = lnum / lden;
616 goto finish_up;
619 if (hnum == 0)
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
622 *hquo = *lquo = 0;
623 *hrem = hnum;
624 *lrem = lnum;
625 goto finish_up;
628 memset (quo, 0, sizeof quo);
630 memset (num, 0, sizeof num); /* to zero 9th element */
631 memset (den, 0, sizeof den);
633 encode (num, lnum, hnum);
634 encode (den, lden, hden);
636 /* Special code for when the divisor < BASE. */
637 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
639 /* hnum != 0 already checked. */
640 for (i = 4 - 1; i >= 0; i--)
642 work = num[i] + carry * BASE;
643 quo[i] = work / lden;
644 carry = work % lden;
647 else
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig, den_hi_sig;
652 unsigned HOST_WIDE_INT quo_est, scale;
654 /* Find the highest nonzero divisor digit. */
655 for (i = 4 - 1;; i--)
656 if (den[i] != 0)
658 den_hi_sig = i;
659 break;
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
665 scale = BASE / (den[den_hi_sig] + 1);
666 if (scale > 1)
667 { /* scale divisor and dividend */
668 carry = 0;
669 for (i = 0; i <= 4 - 1; i++)
671 work = (num[i] * scale) + carry;
672 num[i] = LOWPART (work);
673 carry = HIGHPART (work);
676 num[4] = carry;
677 carry = 0;
678 for (i = 0; i <= 4 - 1; i++)
680 work = (den[i] * scale) + carry;
681 den[i] = LOWPART (work);
682 carry = HIGHPART (work);
683 if (den[i] != 0) den_hi_sig = i;
687 num_hi_sig = 4;
689 /* Main loop */
690 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp;
697 num_hi_sig = i + den_hi_sig + 1;
698 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
699 if (num[num_hi_sig] != den[den_hi_sig])
700 quo_est = work / den[den_hi_sig];
701 else
702 quo_est = BASE - 1;
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp = work - quo_est * den[den_hi_sig];
706 if (tmp < BASE
707 && (den[den_hi_sig - 1] * quo_est
708 > (tmp * BASE + num[num_hi_sig - 2])))
709 quo_est--;
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
715 carry = 0;
716 for (j = 0; j <= den_hi_sig; j++)
718 work = quo_est * den[j] + carry;
719 carry = HIGHPART (work);
720 work = num[i + j] - LOWPART (work);
721 num[i + j] = LOWPART (work);
722 carry += HIGHPART (work) != 0;
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
729 quo_est--;
730 carry = 0; /* add divisor back in */
731 for (j = 0; j <= den_hi_sig; j++)
733 work = num[i + j] + den[j] + carry;
734 carry = HIGHPART (work);
735 num[i + j] = LOWPART (work);
738 num [num_hi_sig] += carry;
741 /* Store the quotient digit. */
742 quo[i] = quo_est;
746 decode (quo, lquo, hquo);
748 finish_up:
749 /* If result is negative, make it so. */
750 if (quo_neg)
751 neg_double (*lquo, *hquo, lquo, hquo);
753 /* Compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
755 neg_double (*lrem, *hrem, lrem, hrem);
756 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
758 switch (code)
760 case TRUNC_DIV_EXPR:
761 case TRUNC_MOD_EXPR: /* round toward zero */
762 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 return overflow;
765 case FLOOR_DIV_EXPR:
766 case FLOOR_MOD_EXPR: /* round toward negative infinity */
767 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
769 /* quo = quo - 1; */
770 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
771 lquo, hquo);
773 else
774 return overflow;
775 break;
777 case CEIL_DIV_EXPR:
778 case CEIL_MOD_EXPR: /* round toward positive infinity */
779 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
781 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
782 lquo, hquo);
784 else
785 return overflow;
786 break;
788 case ROUND_DIV_EXPR:
789 case ROUND_MOD_EXPR: /* round to closest integer */
791 unsigned HOST_WIDE_INT labs_rem = *lrem;
792 HOST_WIDE_INT habs_rem = *hrem;
793 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
794 HOST_WIDE_INT habs_den = hden, htwice;
796 /* Get absolute values. */
797 if (*hrem < 0)
798 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 if (hden < 0)
800 neg_double (lden, hden, &labs_den, &habs_den);
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
804 labs_rem, habs_rem, &ltwice, &htwice);
806 if (((unsigned HOST_WIDE_INT) habs_den
807 < (unsigned HOST_WIDE_INT) htwice)
808 || (((unsigned HOST_WIDE_INT) habs_den
809 == (unsigned HOST_WIDE_INT) htwice)
810 && (labs_den < ltwice)))
812 if (*hquo < 0)
813 /* quo = quo - 1; */
814 add_double (*lquo, *hquo,
815 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
816 else
817 /* quo = quo + 1; */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
821 else
822 return overflow;
824 break;
826 default:
827 gcc_unreachable ();
830 /* Compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
832 neg_double (*lrem, *hrem, lrem, hrem);
833 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 return overflow;
837 /* Return true if built-in mathematical function specified by CODE
838 preserves the sign of it argument, i.e. -f(x) == f(-x). */
840 static bool
841 negate_mathfn_p (enum built_in_function code)
843 switch (code)
845 case BUILT_IN_ASIN:
846 case BUILT_IN_ASINF:
847 case BUILT_IN_ASINL:
848 case BUILT_IN_ATAN:
849 case BUILT_IN_ATANF:
850 case BUILT_IN_ATANL:
851 case BUILT_IN_SIN:
852 case BUILT_IN_SINF:
853 case BUILT_IN_SINL:
854 case BUILT_IN_TAN:
855 case BUILT_IN_TANF:
856 case BUILT_IN_TANL:
857 return true;
859 default:
860 break;
862 return false;
865 /* Check whether we may negate an integer constant T without causing
866 overflow. */
868 bool
869 may_negate_without_overflow_p (tree t)
871 unsigned HOST_WIDE_INT val;
872 unsigned int prec;
873 tree type;
875 gcc_assert (TREE_CODE (t) == INTEGER_CST);
877 type = TREE_TYPE (t);
878 if (TYPE_UNSIGNED (type))
879 return false;
881 prec = TYPE_PRECISION (type);
882 if (prec > HOST_BITS_PER_WIDE_INT)
884 if (TREE_INT_CST_LOW (t) != 0)
885 return true;
886 prec -= HOST_BITS_PER_WIDE_INT;
887 val = TREE_INT_CST_HIGH (t);
889 else
890 val = TREE_INT_CST_LOW (t);
891 if (prec < HOST_BITS_PER_WIDE_INT)
892 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
893 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
896 /* Determine whether an expression T can be cheaply negated using
897 the function negate_expr. */
899 static bool
900 negate_expr_p (tree t)
902 tree type;
904 if (t == 0)
905 return false;
907 type = TREE_TYPE (t);
909 STRIP_SIGN_NOPS (t);
910 switch (TREE_CODE (t))
912 case INTEGER_CST:
913 if (TYPE_UNSIGNED (type) || ! flag_trapv)
914 return true;
916 /* Check that -CST will not overflow type. */
917 return may_negate_without_overflow_p (t);
919 case REAL_CST:
920 case NEGATE_EXPR:
921 return true;
923 case COMPLEX_CST:
924 return negate_expr_p (TREE_REALPART (t))
925 && negate_expr_p (TREE_IMAGPART (t));
927 case PLUS_EXPR:
928 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
929 return false;
930 /* -(A + B) -> (-B) - A. */
931 if (negate_expr_p (TREE_OPERAND (t, 1))
932 && reorder_operands_p (TREE_OPERAND (t, 0),
933 TREE_OPERAND (t, 1)))
934 return true;
935 /* -(A + B) -> (-A) - B. */
936 return negate_expr_p (TREE_OPERAND (t, 0));
938 case MINUS_EXPR:
939 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
940 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
941 && reorder_operands_p (TREE_OPERAND (t, 0),
942 TREE_OPERAND (t, 1));
944 case MULT_EXPR:
945 if (TYPE_UNSIGNED (TREE_TYPE (t)))
946 break;
948 /* Fall through. */
950 case RDIV_EXPR:
951 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
952 return negate_expr_p (TREE_OPERAND (t, 1))
953 || negate_expr_p (TREE_OPERAND (t, 0));
954 break;
956 case NOP_EXPR:
957 /* Negate -((double)float) as (double)(-float). */
958 if (TREE_CODE (type) == REAL_TYPE)
960 tree tem = strip_float_extensions (t);
961 if (tem != t)
962 return negate_expr_p (tem);
964 break;
966 case CALL_EXPR:
967 /* Negate -f(x) as f(-x). */
968 if (negate_mathfn_p (builtin_mathfn_code (t)))
969 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
970 break;
972 case RSHIFT_EXPR:
973 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
974 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
976 tree op1 = TREE_OPERAND (t, 1);
977 if (TREE_INT_CST_HIGH (op1) == 0
978 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
979 == TREE_INT_CST_LOW (op1))
980 return true;
982 break;
984 default:
985 break;
987 return false;
990 /* Given T, an expression, return the negation of T. Allow for T to be
991 null, in which case return null. */
993 static tree
994 negate_expr (tree t)
996 tree type;
997 tree tem;
999 if (t == 0)
1000 return 0;
1002 type = TREE_TYPE (t);
1003 STRIP_SIGN_NOPS (t);
1005 switch (TREE_CODE (t))
1007 case INTEGER_CST:
1008 tem = fold_negate_const (t, type);
1009 if (! TREE_OVERFLOW (tem)
1010 || TYPE_UNSIGNED (type)
1011 || ! flag_trapv)
1012 return tem;
1013 break;
1015 case REAL_CST:
1016 tem = fold_negate_const (t, type);
1017 /* Two's complement FP formats, such as c4x, may overflow. */
1018 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1019 return fold_convert (type, tem);
1020 break;
1022 case COMPLEX_CST:
1024 tree rpart = negate_expr (TREE_REALPART (t));
1025 tree ipart = negate_expr (TREE_IMAGPART (t));
1027 if ((TREE_CODE (rpart) == REAL_CST
1028 && TREE_CODE (ipart) == REAL_CST)
1029 || (TREE_CODE (rpart) == INTEGER_CST
1030 && TREE_CODE (ipart) == INTEGER_CST))
1031 return build_complex (type, rpart, ipart);
1033 break;
1035 case NEGATE_EXPR:
1036 return fold_convert (type, TREE_OPERAND (t, 0));
1038 case PLUS_EXPR:
1039 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1041 /* -(A + B) -> (-B) - A. */
1042 if (negate_expr_p (TREE_OPERAND (t, 1))
1043 && reorder_operands_p (TREE_OPERAND (t, 0),
1044 TREE_OPERAND (t, 1)))
1046 tem = negate_expr (TREE_OPERAND (t, 1));
1047 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1048 tem, TREE_OPERAND (t, 0));
1049 return fold_convert (type, tem);
1052 /* -(A + B) -> (-A) - B. */
1053 if (negate_expr_p (TREE_OPERAND (t, 0)))
1055 tem = negate_expr (TREE_OPERAND (t, 0));
1056 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1057 tem, TREE_OPERAND (t, 1));
1058 return fold_convert (type, tem);
1061 break;
1063 case MINUS_EXPR:
1064 /* - (A - B) -> B - A */
1065 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1066 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1067 return fold_convert (type,
1068 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1069 TREE_OPERAND (t, 1),
1070 TREE_OPERAND (t, 0)));
1071 break;
1073 case MULT_EXPR:
1074 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1075 break;
1077 /* Fall through. */
1079 case RDIV_EXPR:
1080 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1082 tem = TREE_OPERAND (t, 1);
1083 if (negate_expr_p (tem))
1084 return fold_convert (type,
1085 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1086 TREE_OPERAND (t, 0),
1087 negate_expr (tem)));
1088 tem = TREE_OPERAND (t, 0);
1089 if (negate_expr_p (tem))
1090 return fold_convert (type,
1091 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1092 negate_expr (tem),
1093 TREE_OPERAND (t, 1)));
1095 break;
1097 case NOP_EXPR:
1098 /* Convert -((double)float) into (double)(-float). */
1099 if (TREE_CODE (type) == REAL_TYPE)
1101 tem = strip_float_extensions (t);
1102 if (tem != t && negate_expr_p (tem))
1103 return fold_convert (type, negate_expr (tem));
1105 break;
1107 case CALL_EXPR:
1108 /* Negate -f(x) as f(-x). */
1109 if (negate_mathfn_p (builtin_mathfn_code (t))
1110 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1112 tree fndecl, arg, arglist;
1114 fndecl = get_callee_fndecl (t);
1115 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1116 arglist = build_tree_list (NULL_TREE, arg);
1117 return build_function_call_expr (fndecl, arglist);
1119 break;
1121 case RSHIFT_EXPR:
1122 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1123 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1125 tree op1 = TREE_OPERAND (t, 1);
1126 if (TREE_INT_CST_HIGH (op1) == 0
1127 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1128 == TREE_INT_CST_LOW (op1))
1130 tree ntype = TYPE_UNSIGNED (type)
1131 ? lang_hooks.types.signed_type (type)
1132 : lang_hooks.types.unsigned_type (type);
1133 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1134 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1135 return fold_convert (type, temp);
1138 break;
1140 default:
1141 break;
1144 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1145 return fold_convert (type, tem);
1148 /* Split a tree IN into a constant, literal and variable parts that could be
1149 combined with CODE to make IN. "constant" means an expression with
1150 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1151 commutative arithmetic operation. Store the constant part into *CONP,
1152 the literal in *LITP and return the variable part. If a part isn't
1153 present, set it to null. If the tree does not decompose in this way,
1154 return the entire tree as the variable part and the other parts as null.
1156 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1157 case, we negate an operand that was subtracted. Except if it is a
1158 literal for which we use *MINUS_LITP instead.
1160 If NEGATE_P is true, we are negating all of IN, again except a literal
1161 for which we use *MINUS_LITP instead.
1163 If IN is itself a literal or constant, return it as appropriate.
1165 Note that we do not guarantee that any of the three values will be the
1166 same type as IN, but they will have the same signedness and mode. */
1168 static tree
1169 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1170 tree *minus_litp, int negate_p)
1172 tree var = 0;
1174 *conp = 0;
1175 *litp = 0;
1176 *minus_litp = 0;
1178 /* Strip any conversions that don't change the machine mode or signedness. */
1179 STRIP_SIGN_NOPS (in);
1181 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1182 *litp = in;
1183 else if (TREE_CODE (in) == code
1184 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1185 /* We can associate addition and subtraction together (even
1186 though the C standard doesn't say so) for integers because
1187 the value is not affected. For reals, the value might be
1188 affected, so we can't. */
1189 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1190 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1192 tree op0 = TREE_OPERAND (in, 0);
1193 tree op1 = TREE_OPERAND (in, 1);
1194 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1195 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1197 /* First see if either of the operands is a literal, then a constant. */
1198 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1199 *litp = op0, op0 = 0;
1200 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1201 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1203 if (op0 != 0 && TREE_CONSTANT (op0))
1204 *conp = op0, op0 = 0;
1205 else if (op1 != 0 && TREE_CONSTANT (op1))
1206 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1208 /* If we haven't dealt with either operand, this is not a case we can
1209 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1210 if (op0 != 0 && op1 != 0)
1211 var = in;
1212 else if (op0 != 0)
1213 var = op0;
1214 else
1215 var = op1, neg_var_p = neg1_p;
1217 /* Now do any needed negations. */
1218 if (neg_litp_p)
1219 *minus_litp = *litp, *litp = 0;
1220 if (neg_conp_p)
1221 *conp = negate_expr (*conp);
1222 if (neg_var_p)
1223 var = negate_expr (var);
1225 else if (TREE_CONSTANT (in))
1226 *conp = in;
1227 else
1228 var = in;
1230 if (negate_p)
1232 if (*litp)
1233 *minus_litp = *litp, *litp = 0;
1234 else if (*minus_litp)
1235 *litp = *minus_litp, *minus_litp = 0;
1236 *conp = negate_expr (*conp);
1237 var = negate_expr (var);
1240 return var;
1243 /* Re-associate trees split by the above function. T1 and T2 are either
1244 expressions to associate or null. Return the new expression, if any. If
1245 we build an operation, do it in TYPE and with CODE. */
1247 static tree
1248 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1250 if (t1 == 0)
1251 return t2;
1252 else if (t2 == 0)
1253 return t1;
1255 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1256 try to fold this since we will have infinite recursion. But do
1257 deal with any NEGATE_EXPRs. */
1258 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1259 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1261 if (code == PLUS_EXPR)
1263 if (TREE_CODE (t1) == NEGATE_EXPR)
1264 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1265 fold_convert (type, TREE_OPERAND (t1, 0)));
1266 else if (TREE_CODE (t2) == NEGATE_EXPR)
1267 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1268 fold_convert (type, TREE_OPERAND (t2, 0)));
1269 else if (integer_zerop (t2))
1270 return fold_convert (type, t1);
1272 else if (code == MINUS_EXPR)
1274 if (integer_zerop (t2))
1275 return fold_convert (type, t1);
1278 return build2 (code, type, fold_convert (type, t1),
1279 fold_convert (type, t2));
1282 return fold_build2 (code, type, fold_convert (type, t1),
1283 fold_convert (type, t2));
1286 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1287 to produce a new constant.
1289 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1291 tree
1292 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1294 unsigned HOST_WIDE_INT int1l, int2l;
1295 HOST_WIDE_INT int1h, int2h;
1296 unsigned HOST_WIDE_INT low;
1297 HOST_WIDE_INT hi;
1298 unsigned HOST_WIDE_INT garbagel;
1299 HOST_WIDE_INT garbageh;
1300 tree t;
1301 tree type = TREE_TYPE (arg1);
1302 int uns = TYPE_UNSIGNED (type);
1303 int is_sizetype
1304 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1305 int overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1312 switch (code)
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 break;
1336 case RROTATE_EXPR:
1337 int2l = - int2l;
1338 case LROTATE_EXPR:
1339 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 &low, &hi);
1341 break;
1343 case PLUS_EXPR:
1344 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1345 break;
1347 case MINUS_EXPR:
1348 neg_double (int2l, int2h, &low, &hi);
1349 add_double (int1l, int1h, low, hi, &low, &hi);
1350 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1351 break;
1353 case MULT_EXPR:
1354 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1355 break;
1357 case TRUNC_DIV_EXPR:
1358 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1359 case EXACT_DIV_EXPR:
1360 /* This is a shortcut for a common special case. */
1361 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1362 && ! TREE_CONSTANT_OVERFLOW (arg1)
1363 && ! TREE_CONSTANT_OVERFLOW (arg2)
1364 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1366 if (code == CEIL_DIV_EXPR)
1367 int1l += int2l - 1;
1369 low = int1l / int2l, hi = 0;
1370 break;
1373 /* ... fall through ... */
1375 case ROUND_DIV_EXPR:
1376 if (int2h == 0 && int2l == 1)
1378 low = int1l, hi = int1h;
1379 break;
1381 if (int1l == int2l && int1h == int2h
1382 && ! (int1l == 0 && int1h == 0))
1384 low = 1, hi = 0;
1385 break;
1387 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1388 &low, &hi, &garbagel, &garbageh);
1389 break;
1391 case TRUNC_MOD_EXPR:
1392 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1393 /* This is a shortcut for a common special case. */
1394 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1395 && ! TREE_CONSTANT_OVERFLOW (arg1)
1396 && ! TREE_CONSTANT_OVERFLOW (arg2)
1397 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1399 if (code == CEIL_MOD_EXPR)
1400 int1l += int2l - 1;
1401 low = int1l % int2l, hi = 0;
1402 break;
1405 /* ... fall through ... */
1407 case ROUND_MOD_EXPR:
1408 overflow = div_and_round_double (code, uns,
1409 int1l, int1h, int2l, int2h,
1410 &garbagel, &garbageh, &low, &hi);
1411 break;
1413 case MIN_EXPR:
1414 case MAX_EXPR:
1415 if (uns)
1416 low = (((unsigned HOST_WIDE_INT) int1h
1417 < (unsigned HOST_WIDE_INT) int2h)
1418 || (((unsigned HOST_WIDE_INT) int1h
1419 == (unsigned HOST_WIDE_INT) int2h)
1420 && int1l < int2l));
1421 else
1422 low = (int1h < int2h
1423 || (int1h == int2h && int1l < int2l));
1425 if (low == (code == MIN_EXPR))
1426 low = int1l, hi = int1h;
1427 else
1428 low = int2l, hi = int2h;
1429 break;
1431 default:
1432 gcc_unreachable ();
1435 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1437 if (notrunc)
1439 /* Propagate overflow flags ourselves. */
1440 if (((!uns || is_sizetype) && overflow)
1441 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1443 t = copy_node (t);
1444 TREE_OVERFLOW (t) = 1;
1445 TREE_CONSTANT_OVERFLOW (t) = 1;
1447 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1449 t = copy_node (t);
1450 TREE_CONSTANT_OVERFLOW (t) = 1;
1453 else
1454 t = force_fit_type (t, 1,
1455 ((!uns || is_sizetype) && overflow)
1456 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1457 TREE_CONSTANT_OVERFLOW (arg1)
1458 | TREE_CONSTANT_OVERFLOW (arg2));
1460 return t;
1463 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1464 constant. We assume ARG1 and ARG2 have the same data type, or at least
1465 are the same kind of constant and the same machine mode.
1467 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1469 static tree
1470 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1472 STRIP_NOPS (arg1);
1473 STRIP_NOPS (arg2);
1475 if (TREE_CODE (arg1) == INTEGER_CST)
1476 return int_const_binop (code, arg1, arg2, notrunc);
1478 if (TREE_CODE (arg1) == REAL_CST)
1480 enum machine_mode mode;
1481 REAL_VALUE_TYPE d1;
1482 REAL_VALUE_TYPE d2;
1483 REAL_VALUE_TYPE value;
1484 REAL_VALUE_TYPE result;
1485 bool inexact;
1486 tree t, type;
1488 d1 = TREE_REAL_CST (arg1);
1489 d2 = TREE_REAL_CST (arg2);
1491 type = TREE_TYPE (arg1);
1492 mode = TYPE_MODE (type);
1494 /* Don't perform operation if we honor signaling NaNs and
1495 either operand is a NaN. */
1496 if (HONOR_SNANS (mode)
1497 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1498 return NULL_TREE;
1500 /* Don't perform operation if it would raise a division
1501 by zero exception. */
1502 if (code == RDIV_EXPR
1503 && REAL_VALUES_EQUAL (d2, dconst0)
1504 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1505 return NULL_TREE;
1507 /* If either operand is a NaN, just return it. Otherwise, set up
1508 for floating-point trap; we return an overflow. */
1509 if (REAL_VALUE_ISNAN (d1))
1510 return arg1;
1511 else if (REAL_VALUE_ISNAN (d2))
1512 return arg2;
1514 inexact = real_arithmetic (&value, code, &d1, &d2);
1515 real_convert (&result, mode, &value);
1517 /* Don't constant fold this floating point operation if the
1518 result may dependent upon the run-time rounding mode and
1519 flag_rounding_math is set, or if GCC's software emulation
1520 is unable to accurately represent the result. */
1522 if ((flag_rounding_math
1523 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1524 && !flag_unsafe_math_optimizations))
1525 && (inexact || !real_identical (&result, &value)))
1526 return NULL_TREE;
1528 t = build_real (type, result);
1530 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1531 TREE_CONSTANT_OVERFLOW (t)
1532 = TREE_OVERFLOW (t)
1533 | TREE_CONSTANT_OVERFLOW (arg1)
1534 | TREE_CONSTANT_OVERFLOW (arg2);
1535 return t;
1537 if (TREE_CODE (arg1) == COMPLEX_CST)
1539 tree type = TREE_TYPE (arg1);
1540 tree r1 = TREE_REALPART (arg1);
1541 tree i1 = TREE_IMAGPART (arg1);
1542 tree r2 = TREE_REALPART (arg2);
1543 tree i2 = TREE_IMAGPART (arg2);
1544 tree t;
1546 switch (code)
1548 case PLUS_EXPR:
1549 t = build_complex (type,
1550 const_binop (PLUS_EXPR, r1, r2, notrunc),
1551 const_binop (PLUS_EXPR, i1, i2, notrunc));
1552 break;
1554 case MINUS_EXPR:
1555 t = build_complex (type,
1556 const_binop (MINUS_EXPR, r1, r2, notrunc),
1557 const_binop (MINUS_EXPR, i1, i2, notrunc));
1558 break;
1560 case MULT_EXPR:
1561 t = build_complex (type,
1562 const_binop (MINUS_EXPR,
1563 const_binop (MULT_EXPR,
1564 r1, r2, notrunc),
1565 const_binop (MULT_EXPR,
1566 i1, i2, notrunc),
1567 notrunc),
1568 const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR,
1570 r1, i2, notrunc),
1571 const_binop (MULT_EXPR,
1572 i1, r2, notrunc),
1573 notrunc));
1574 break;
1576 case RDIV_EXPR:
1578 tree magsquared
1579 = const_binop (PLUS_EXPR,
1580 const_binop (MULT_EXPR, r2, r2, notrunc),
1581 const_binop (MULT_EXPR, i2, i2, notrunc),
1582 notrunc);
1584 t = build_complex (type,
1585 const_binop
1586 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1587 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1588 const_binop (PLUS_EXPR,
1589 const_binop (MULT_EXPR, r1, r2,
1590 notrunc),
1591 const_binop (MULT_EXPR, i1, i2,
1592 notrunc),
1593 notrunc),
1594 magsquared, notrunc),
1595 const_binop
1596 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1597 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1598 const_binop (MINUS_EXPR,
1599 const_binop (MULT_EXPR, i1, r2,
1600 notrunc),
1601 const_binop (MULT_EXPR, r1, i2,
1602 notrunc),
1603 notrunc),
1604 magsquared, notrunc));
1606 break;
1608 default:
1609 gcc_unreachable ();
1611 return t;
1613 return 0;
1616 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1617 indicates which particular sizetype to create. */
1619 tree
1620 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1622 return build_int_cst (sizetype_tab[(int) kind], number);
1625 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1626 is a tree code. The type of the result is taken from the operands.
1627 Both must be the same type integer type and it must be a size type.
1628 If the operands are constant, so is the result. */
1630 tree
1631 size_binop (enum tree_code code, tree arg0, tree arg1)
1633 tree type = TREE_TYPE (arg0);
1635 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1636 && type == TREE_TYPE (arg1));
1638 /* Handle the special case of two integer constants faster. */
1639 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1641 /* And some specific cases even faster than that. */
1642 if (code == PLUS_EXPR && integer_zerop (arg0))
1643 return arg1;
1644 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1645 && integer_zerop (arg1))
1646 return arg0;
1647 else if (code == MULT_EXPR && integer_onep (arg0))
1648 return arg1;
1650 /* Handle general case of two integer constants. */
1651 return int_const_binop (code, arg0, arg1, 0);
1654 if (arg0 == error_mark_node || arg1 == error_mark_node)
1655 return error_mark_node;
1657 return fold_build2 (code, type, arg0, arg1);
1660 /* Given two values, either both of sizetype or both of bitsizetype,
1661 compute the difference between the two values. Return the value
1662 in signed type corresponding to the type of the operands. */
1664 tree
1665 size_diffop (tree arg0, tree arg1)
1667 tree type = TREE_TYPE (arg0);
1668 tree ctype;
1670 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1671 && type == TREE_TYPE (arg1));
1673 /* If the type is already signed, just do the simple thing. */
1674 if (!TYPE_UNSIGNED (type))
1675 return size_binop (MINUS_EXPR, arg0, arg1);
1677 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1679 /* If either operand is not a constant, do the conversions to the signed
1680 type and subtract. The hardware will do the right thing with any
1681 overflow in the subtraction. */
1682 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1683 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1684 fold_convert (ctype, arg1));
1686 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1687 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1688 overflow) and negate (which can't either). Special-case a result
1689 of zero while we're here. */
1690 if (tree_int_cst_equal (arg0, arg1))
1691 return fold_convert (ctype, integer_zero_node);
1692 else if (tree_int_cst_lt (arg1, arg0))
1693 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1694 else
1695 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1696 fold_convert (ctype, size_binop (MINUS_EXPR,
1697 arg1, arg0)));
1700 /* A subroutine of fold_convert_const handling conversions of an
1701 INTEGER_CST to another integer type. */
1703 static tree
1704 fold_convert_const_int_from_int (tree type, tree arg1)
1706 tree t;
1708 /* Given an integer constant, make new constant with new type,
1709 appropriately sign-extended or truncated. */
1710 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1711 TREE_INT_CST_HIGH (arg1));
1713 t = force_fit_type (t,
1714 /* Don't set the overflow when
1715 converting a pointer */
1716 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1717 (TREE_INT_CST_HIGH (arg1) < 0
1718 && (TYPE_UNSIGNED (type)
1719 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1720 | TREE_OVERFLOW (arg1),
1721 TREE_CONSTANT_OVERFLOW (arg1));
1723 return t;
1726 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1727 to an integer type. */
1729 static tree
1730 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1732 int overflow = 0;
1733 tree t;
1735 /* The following code implements the floating point to integer
1736 conversion rules required by the Java Language Specification,
1737 that IEEE NaNs are mapped to zero and values that overflow
1738 the target precision saturate, i.e. values greater than
1739 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1740 are mapped to INT_MIN. These semantics are allowed by the
1741 C and C++ standards that simply state that the behavior of
1742 FP-to-integer conversion is unspecified upon overflow. */
1744 HOST_WIDE_INT high, low;
1745 REAL_VALUE_TYPE r;
1746 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1748 switch (code)
1750 case FIX_TRUNC_EXPR:
1751 real_trunc (&r, VOIDmode, &x);
1752 break;
1754 case FIX_CEIL_EXPR:
1755 real_ceil (&r, VOIDmode, &x);
1756 break;
1758 case FIX_FLOOR_EXPR:
1759 real_floor (&r, VOIDmode, &x);
1760 break;
1762 case FIX_ROUND_EXPR:
1763 real_round (&r, VOIDmode, &x);
1764 break;
1766 default:
1767 gcc_unreachable ();
1770 /* If R is NaN, return zero and show we have an overflow. */
1771 if (REAL_VALUE_ISNAN (r))
1773 overflow = 1;
1774 high = 0;
1775 low = 0;
1778 /* See if R is less than the lower bound or greater than the
1779 upper bound. */
1781 if (! overflow)
1783 tree lt = TYPE_MIN_VALUE (type);
1784 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1785 if (REAL_VALUES_LESS (r, l))
1787 overflow = 1;
1788 high = TREE_INT_CST_HIGH (lt);
1789 low = TREE_INT_CST_LOW (lt);
1793 if (! overflow)
1795 tree ut = TYPE_MAX_VALUE (type);
1796 if (ut)
1798 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1799 if (REAL_VALUES_LESS (u, r))
1801 overflow = 1;
1802 high = TREE_INT_CST_HIGH (ut);
1803 low = TREE_INT_CST_LOW (ut);
1808 if (! overflow)
1809 REAL_VALUE_TO_INT (&low, &high, r);
1811 t = build_int_cst_wide (type, low, high);
1813 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1814 TREE_CONSTANT_OVERFLOW (arg1));
1815 return t;
1818 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1819 to another floating point type. */
1821 static tree
1822 fold_convert_const_real_from_real (tree type, tree arg1)
1824 REAL_VALUE_TYPE value;
1825 tree t;
1827 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1828 t = build_real (type, value);
1830 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1831 TREE_CONSTANT_OVERFLOW (t)
1832 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1833 return t;
1836 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1837 type TYPE. If no simplification can be done return NULL_TREE. */
1839 static tree
1840 fold_convert_const (enum tree_code code, tree type, tree arg1)
1842 if (TREE_TYPE (arg1) == type)
1843 return arg1;
1845 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return fold_convert_const_int_from_int (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_int_from_real (code, type, arg1);
1852 else if (TREE_CODE (type) == REAL_TYPE)
1854 if (TREE_CODE (arg1) == INTEGER_CST)
1855 return build_real_from_int_cst (type, arg1);
1856 if (TREE_CODE (arg1) == REAL_CST)
1857 return fold_convert_const_real_from_real (type, arg1);
1859 return NULL_TREE;
1862 /* Construct a vector of zero elements of vector type TYPE. */
1864 static tree
1865 build_zero_vector (tree type)
1867 tree elem, list;
1868 int i, units;
1870 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1871 units = TYPE_VECTOR_SUBPARTS (type);
1873 list = NULL_TREE;
1874 for (i = 0; i < units; i++)
1875 list = tree_cons (NULL_TREE, elem, list);
1876 return build_vector (type, list);
1879 /* Convert expression ARG to type TYPE. Used by the middle-end for
1880 simple conversions in preference to calling the front-end's convert. */
1882 tree
1883 fold_convert (tree type, tree arg)
1885 tree orig = TREE_TYPE (arg);
1886 tree tem;
1888 if (type == orig)
1889 return arg;
1891 if (TREE_CODE (arg) == ERROR_MARK
1892 || TREE_CODE (type) == ERROR_MARK
1893 || TREE_CODE (orig) == ERROR_MARK)
1894 return error_mark_node;
1896 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1897 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1898 TYPE_MAIN_VARIANT (orig)))
1899 return fold_build1 (NOP_EXPR, type, arg);
1901 switch (TREE_CODE (type))
1903 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1904 case POINTER_TYPE: case REFERENCE_TYPE:
1905 case OFFSET_TYPE:
1906 if (TREE_CODE (arg) == INTEGER_CST)
1908 tem = fold_convert_const (NOP_EXPR, type, arg);
1909 if (tem != NULL_TREE)
1910 return tem;
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1913 || TREE_CODE (orig) == OFFSET_TYPE)
1914 return fold_build1 (NOP_EXPR, type, arg);
1915 if (TREE_CODE (orig) == COMPLEX_TYPE)
1917 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1918 return fold_convert (type, tem);
1920 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922 return fold_build1 (NOP_EXPR, type, arg);
1924 case REAL_TYPE:
1925 if (TREE_CODE (arg) == INTEGER_CST)
1927 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1931 else if (TREE_CODE (arg) == REAL_CST)
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1938 switch (TREE_CODE (orig))
1940 case INTEGER_TYPE: case CHAR_TYPE:
1941 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1942 case POINTER_TYPE: case REFERENCE_TYPE:
1943 return fold_build1 (FLOAT_EXPR, type, arg);
1945 case REAL_TYPE:
1946 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1947 type, arg);
1949 case COMPLEX_TYPE:
1950 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1951 return fold_convert (type, tem);
1953 default:
1954 gcc_unreachable ();
1957 case COMPLEX_TYPE:
1958 switch (TREE_CODE (orig))
1960 case INTEGER_TYPE: case CHAR_TYPE:
1961 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1962 case POINTER_TYPE: case REFERENCE_TYPE:
1963 case REAL_TYPE:
1964 return build2 (COMPLEX_EXPR, type,
1965 fold_convert (TREE_TYPE (type), arg),
1966 fold_convert (TREE_TYPE (type), integer_zero_node));
1967 case COMPLEX_TYPE:
1969 tree rpart, ipart;
1971 if (TREE_CODE (arg) == COMPLEX_EXPR)
1973 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1974 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1975 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
1978 arg = save_expr (arg);
1979 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1980 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
1981 rpart = fold_convert (TREE_TYPE (type), rpart);
1982 ipart = fold_convert (TREE_TYPE (type), ipart);
1983 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
1986 default:
1987 gcc_unreachable ();
1990 case VECTOR_TYPE:
1991 if (integer_zerop (arg))
1992 return build_zero_vector (type);
1993 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1994 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1995 || TREE_CODE (orig) == VECTOR_TYPE);
1996 return fold_build1 (NOP_EXPR, type, arg);
1998 case VOID_TYPE:
1999 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2001 default:
2002 gcc_unreachable ();
2006 /* Return false if expr can be assumed not to be an value, true
2007 otherwise. */
2009 static bool
2010 maybe_lvalue_p (tree x)
2012 /* We only need to wrap lvalue tree codes. */
2013 switch (TREE_CODE (x))
2015 case VAR_DECL:
2016 case PARM_DECL:
2017 case RESULT_DECL:
2018 case LABEL_DECL:
2019 case FUNCTION_DECL:
2020 case SSA_NAME:
2022 case COMPONENT_REF:
2023 case INDIRECT_REF:
2024 case ALIGN_INDIRECT_REF:
2025 case MISALIGNED_INDIRECT_REF:
2026 case ARRAY_REF:
2027 case ARRAY_RANGE_REF:
2028 case BIT_FIELD_REF:
2029 case OBJ_TYPE_REF:
2031 case REALPART_EXPR:
2032 case IMAGPART_EXPR:
2033 case PREINCREMENT_EXPR:
2034 case PREDECREMENT_EXPR:
2035 case SAVE_EXPR:
2036 case TRY_CATCH_EXPR:
2037 case WITH_CLEANUP_EXPR:
2038 case COMPOUND_EXPR:
2039 case MODIFY_EXPR:
2040 case TARGET_EXPR:
2041 case COND_EXPR:
2042 case BIND_EXPR:
2043 case MIN_EXPR:
2044 case MAX_EXPR:
2045 break;
2047 default:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2050 break;
2051 return false;
2054 return true;
2057 /* Return an expr equal to X but certainly not valid as an lvalue. */
2059 tree
2060 non_lvalue (tree x)
2062 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2063 us. */
2064 if (in_gimple_form)
2065 return x;
2067 if (! maybe_lvalue_p (x))
2068 return x;
2069 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2072 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2073 Zero means allow extended lvalues. */
2075 int pedantic_lvalues;
2077 /* When pedantic, return an expr equal to X but certainly not valid as a
2078 pedantic lvalue. Otherwise, return X. */
2080 static tree
2081 pedantic_non_lvalue (tree x)
2083 if (pedantic_lvalues)
2084 return non_lvalue (x);
2085 else
2086 return x;
2089 /* Given a tree comparison code, return the code that is the logical inverse
2090 of the given code. It is not safe to do this for floating-point
2091 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2092 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2094 static enum tree_code
2095 invert_tree_comparison (enum tree_code code, bool honor_nans)
2097 if (honor_nans && flag_trapping_math)
2098 return ERROR_MARK;
2100 switch (code)
2102 case EQ_EXPR:
2103 return NE_EXPR;
2104 case NE_EXPR:
2105 return EQ_EXPR;
2106 case GT_EXPR:
2107 return honor_nans ? UNLE_EXPR : LE_EXPR;
2108 case GE_EXPR:
2109 return honor_nans ? UNLT_EXPR : LT_EXPR;
2110 case LT_EXPR:
2111 return honor_nans ? UNGE_EXPR : GE_EXPR;
2112 case LE_EXPR:
2113 return honor_nans ? UNGT_EXPR : GT_EXPR;
2114 case LTGT_EXPR:
2115 return UNEQ_EXPR;
2116 case UNEQ_EXPR:
2117 return LTGT_EXPR;
2118 case UNGT_EXPR:
2119 return LE_EXPR;
2120 case UNGE_EXPR:
2121 return LT_EXPR;
2122 case UNLT_EXPR:
2123 return GE_EXPR;
2124 case UNLE_EXPR:
2125 return GT_EXPR;
2126 case ORDERED_EXPR:
2127 return UNORDERED_EXPR;
2128 case UNORDERED_EXPR:
2129 return ORDERED_EXPR;
2130 default:
2131 gcc_unreachable ();
2135 /* Similar, but return the comparison that results if the operands are
2136 swapped. This is safe for floating-point. */
2138 enum tree_code
2139 swap_tree_comparison (enum tree_code code)
2141 switch (code)
2143 case EQ_EXPR:
2144 case NE_EXPR:
2145 return code;
2146 case GT_EXPR:
2147 return LT_EXPR;
2148 case GE_EXPR:
2149 return LE_EXPR;
2150 case LT_EXPR:
2151 return GT_EXPR;
2152 case LE_EXPR:
2153 return GE_EXPR;
2154 default:
2155 gcc_unreachable ();
2160 /* Convert a comparison tree code from an enum tree_code representation
2161 into a compcode bit-based encoding. This function is the inverse of
2162 compcode_to_comparison. */
2164 static enum comparison_code
2165 comparison_to_compcode (enum tree_code code)
2167 switch (code)
2169 case LT_EXPR:
2170 return COMPCODE_LT;
2171 case EQ_EXPR:
2172 return COMPCODE_EQ;
2173 case LE_EXPR:
2174 return COMPCODE_LE;
2175 case GT_EXPR:
2176 return COMPCODE_GT;
2177 case NE_EXPR:
2178 return COMPCODE_NE;
2179 case GE_EXPR:
2180 return COMPCODE_GE;
2181 case ORDERED_EXPR:
2182 return COMPCODE_ORD;
2183 case UNORDERED_EXPR:
2184 return COMPCODE_UNORD;
2185 case UNLT_EXPR:
2186 return COMPCODE_UNLT;
2187 case UNEQ_EXPR:
2188 return COMPCODE_UNEQ;
2189 case UNLE_EXPR:
2190 return COMPCODE_UNLE;
2191 case UNGT_EXPR:
2192 return COMPCODE_UNGT;
2193 case LTGT_EXPR:
2194 return COMPCODE_LTGT;
2195 case UNGE_EXPR:
2196 return COMPCODE_UNGE;
2197 default:
2198 gcc_unreachable ();
2202 /* Convert a compcode bit-based encoding of a comparison operator back
2203 to GCC's enum tree_code representation. This function is the
2204 inverse of comparison_to_compcode. */
2206 static enum tree_code
2207 compcode_to_comparison (enum comparison_code code)
2209 switch (code)
2211 case COMPCODE_LT:
2212 return LT_EXPR;
2213 case COMPCODE_EQ:
2214 return EQ_EXPR;
2215 case COMPCODE_LE:
2216 return LE_EXPR;
2217 case COMPCODE_GT:
2218 return GT_EXPR;
2219 case COMPCODE_NE:
2220 return NE_EXPR;
2221 case COMPCODE_GE:
2222 return GE_EXPR;
2223 case COMPCODE_ORD:
2224 return ORDERED_EXPR;
2225 case COMPCODE_UNORD:
2226 return UNORDERED_EXPR;
2227 case COMPCODE_UNLT:
2228 return UNLT_EXPR;
2229 case COMPCODE_UNEQ:
2230 return UNEQ_EXPR;
2231 case COMPCODE_UNLE:
2232 return UNLE_EXPR;
2233 case COMPCODE_UNGT:
2234 return UNGT_EXPR;
2235 case COMPCODE_LTGT:
2236 return LTGT_EXPR;
2237 case COMPCODE_UNGE:
2238 return UNGE_EXPR;
2239 default:
2240 gcc_unreachable ();
2244 /* Return a tree for the comparison which is the combination of
2245 doing the AND or OR (depending on CODE) of the two operations LCODE
2246 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2247 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2248 if this makes the transformation invalid. */
2250 tree
2251 combine_comparisons (enum tree_code code, enum tree_code lcode,
2252 enum tree_code rcode, tree truth_type,
2253 tree ll_arg, tree lr_arg)
2255 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2256 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2257 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2258 enum comparison_code compcode;
2260 switch (code)
2262 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2263 compcode = lcompcode & rcompcode;
2264 break;
2266 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2267 compcode = lcompcode | rcompcode;
2268 break;
2270 default:
2271 return NULL_TREE;
2274 if (!honor_nans)
2276 /* Eliminate unordered comparisons, as well as LTGT and ORD
2277 which are not used unless the mode has NaNs. */
2278 compcode &= ~COMPCODE_UNORD;
2279 if (compcode == COMPCODE_LTGT)
2280 compcode = COMPCODE_NE;
2281 else if (compcode == COMPCODE_ORD)
2282 compcode = COMPCODE_TRUE;
2284 else if (flag_trapping_math)
2286 /* Check that the original operation and the optimized ones will trap
2287 under the same condition. */
2288 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2289 && (lcompcode != COMPCODE_EQ)
2290 && (lcompcode != COMPCODE_ORD);
2291 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2292 && (rcompcode != COMPCODE_EQ)
2293 && (rcompcode != COMPCODE_ORD);
2294 bool trap = (compcode & COMPCODE_UNORD) == 0
2295 && (compcode != COMPCODE_EQ)
2296 && (compcode != COMPCODE_ORD);
2298 /* In a short-circuited boolean expression the LHS might be
2299 such that the RHS, if evaluated, will never trap. For
2300 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2301 if neither x nor y is NaN. (This is a mixed blessing: for
2302 example, the expression above will never trap, hence
2303 optimizing it to x < y would be invalid). */
2304 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2305 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2306 rtrap = false;
2308 /* If the comparison was short-circuited, and only the RHS
2309 trapped, we may now generate a spurious trap. */
2310 if (rtrap && !ltrap
2311 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2312 return NULL_TREE;
2314 /* If we changed the conditions that cause a trap, we lose. */
2315 if ((ltrap || rtrap) != trap)
2316 return NULL_TREE;
2319 if (compcode == COMPCODE_TRUE)
2320 return constant_boolean_node (true, truth_type);
2321 else if (compcode == COMPCODE_FALSE)
2322 return constant_boolean_node (false, truth_type);
2323 else
2324 return fold_build2 (compcode_to_comparison (compcode),
2325 truth_type, ll_arg, lr_arg);
2328 /* Return nonzero if CODE is a tree code that represents a truth value. */
2330 static int
2331 truth_value_p (enum tree_code code)
2333 return (TREE_CODE_CLASS (code) == tcc_comparison
2334 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2335 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2336 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2339 /* Return nonzero if two operands (typically of the same tree node)
2340 are necessarily equal. If either argument has side-effects this
2341 function returns zero. FLAGS modifies behavior as follows:
2343 If OEP_ONLY_CONST is set, only return nonzero for constants.
2344 This function tests whether the operands are indistinguishable;
2345 it does not test whether they are equal using C's == operation.
2346 The distinction is important for IEEE floating point, because
2347 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2348 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2350 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2351 even though it may hold multiple values during a function.
2352 This is because a GCC tree node guarantees that nothing else is
2353 executed between the evaluation of its "operands" (which may often
2354 be evaluated in arbitrary order). Hence if the operands themselves
2355 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2356 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2357 unset means assuming isochronic (or instantaneous) tree equivalence.
2358 Unless comparing arbitrary expression trees, such as from different
2359 statements, this flag can usually be left unset.
2361 If OEP_PURE_SAME is set, then pure functions with identical arguments
2362 are considered the same. It is used when the caller has other ways
2363 to ensure that global memory is unchanged in between. */
2366 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2368 /* If either is ERROR_MARK, they aren't equal. */
2369 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2370 return 0;
2372 /* If both types don't have the same signedness, then we can't consider
2373 them equal. We must check this before the STRIP_NOPS calls
2374 because they may change the signedness of the arguments. */
2375 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2376 return 0;
2378 STRIP_NOPS (arg0);
2379 STRIP_NOPS (arg1);
2381 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2382 /* This is needed for conversions and for COMPONENT_REF.
2383 Might as well play it safe and always test this. */
2384 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2385 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2386 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2387 return 0;
2389 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2390 We don't care about side effects in that case because the SAVE_EXPR
2391 takes care of that for us. In all other cases, two expressions are
2392 equal if they have no side effects. If we have two identical
2393 expressions with side effects that should be treated the same due
2394 to the only side effects being identical SAVE_EXPR's, that will
2395 be detected in the recursive calls below. */
2396 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2397 && (TREE_CODE (arg0) == SAVE_EXPR
2398 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2399 return 1;
2401 /* Next handle constant cases, those for which we can return 1 even
2402 if ONLY_CONST is set. */
2403 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2404 switch (TREE_CODE (arg0))
2406 case INTEGER_CST:
2407 return (! TREE_CONSTANT_OVERFLOW (arg0)
2408 && ! TREE_CONSTANT_OVERFLOW (arg1)
2409 && tree_int_cst_equal (arg0, arg1));
2411 case REAL_CST:
2412 return (! TREE_CONSTANT_OVERFLOW (arg0)
2413 && ! TREE_CONSTANT_OVERFLOW (arg1)
2414 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2415 TREE_REAL_CST (arg1)));
2417 case VECTOR_CST:
2419 tree v1, v2;
2421 if (TREE_CONSTANT_OVERFLOW (arg0)
2422 || TREE_CONSTANT_OVERFLOW (arg1))
2423 return 0;
2425 v1 = TREE_VECTOR_CST_ELTS (arg0);
2426 v2 = TREE_VECTOR_CST_ELTS (arg1);
2427 while (v1 && v2)
2429 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2430 flags))
2431 return 0;
2432 v1 = TREE_CHAIN (v1);
2433 v2 = TREE_CHAIN (v2);
2436 return 1;
2439 case COMPLEX_CST:
2440 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2441 flags)
2442 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2443 flags));
2445 case STRING_CST:
2446 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2447 && ! memcmp (TREE_STRING_POINTER (arg0),
2448 TREE_STRING_POINTER (arg1),
2449 TREE_STRING_LENGTH (arg0)));
2451 case ADDR_EXPR:
2452 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2454 default:
2455 break;
2458 if (flags & OEP_ONLY_CONST)
2459 return 0;
2461 /* Define macros to test an operand from arg0 and arg1 for equality and a
2462 variant that allows null and views null as being different from any
2463 non-null value. In the latter case, if either is null, the both
2464 must be; otherwise, do the normal comparison. */
2465 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2466 TREE_OPERAND (arg1, N), flags)
2468 #define OP_SAME_WITH_NULL(N) \
2469 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2470 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2472 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2474 case tcc_unary:
2475 /* Two conversions are equal only if signedness and modes match. */
2476 switch (TREE_CODE (arg0))
2478 case NOP_EXPR:
2479 case CONVERT_EXPR:
2480 case FIX_CEIL_EXPR:
2481 case FIX_TRUNC_EXPR:
2482 case FIX_FLOOR_EXPR:
2483 case FIX_ROUND_EXPR:
2484 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2485 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2486 return 0;
2487 break;
2488 default:
2489 break;
2492 return OP_SAME (0);
2495 case tcc_comparison:
2496 case tcc_binary:
2497 if (OP_SAME (0) && OP_SAME (1))
2498 return 1;
2500 /* For commutative ops, allow the other order. */
2501 return (commutative_tree_code (TREE_CODE (arg0))
2502 && operand_equal_p (TREE_OPERAND (arg0, 0),
2503 TREE_OPERAND (arg1, 1), flags)
2504 && operand_equal_p (TREE_OPERAND (arg0, 1),
2505 TREE_OPERAND (arg1, 0), flags));
2507 case tcc_reference:
2508 /* If either of the pointer (or reference) expressions we are
2509 dereferencing contain a side effect, these cannot be equal. */
2510 if (TREE_SIDE_EFFECTS (arg0)
2511 || TREE_SIDE_EFFECTS (arg1))
2512 return 0;
2514 switch (TREE_CODE (arg0))
2516 case INDIRECT_REF:
2517 case ALIGN_INDIRECT_REF:
2518 case MISALIGNED_INDIRECT_REF:
2519 case REALPART_EXPR:
2520 case IMAGPART_EXPR:
2521 return OP_SAME (0);
2523 case ARRAY_REF:
2524 case ARRAY_RANGE_REF:
2525 /* Operands 2 and 3 may be null. */
2526 return (OP_SAME (0)
2527 && OP_SAME (1)
2528 && OP_SAME_WITH_NULL (2)
2529 && OP_SAME_WITH_NULL (3));
2531 case COMPONENT_REF:
2532 /* Handle operand 2 the same as for ARRAY_REF. */
2533 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2535 case BIT_FIELD_REF:
2536 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2538 default:
2539 return 0;
2542 case tcc_expression:
2543 switch (TREE_CODE (arg0))
2545 case ADDR_EXPR:
2546 case TRUTH_NOT_EXPR:
2547 return OP_SAME (0);
2549 case TRUTH_ANDIF_EXPR:
2550 case TRUTH_ORIF_EXPR:
2551 return OP_SAME (0) && OP_SAME (1);
2553 case TRUTH_AND_EXPR:
2554 case TRUTH_OR_EXPR:
2555 case TRUTH_XOR_EXPR:
2556 if (OP_SAME (0) && OP_SAME (1))
2557 return 1;
2559 /* Otherwise take into account this is a commutative operation. */
2560 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2561 TREE_OPERAND (arg1, 1), flags)
2562 && operand_equal_p (TREE_OPERAND (arg0, 1),
2563 TREE_OPERAND (arg1, 0), flags));
2565 case CALL_EXPR:
2566 /* If the CALL_EXPRs call different functions, then they
2567 clearly can not be equal. */
2568 if (!OP_SAME (0))
2569 return 0;
2572 unsigned int cef = call_expr_flags (arg0);
2573 if (flags & OEP_PURE_SAME)
2574 cef &= ECF_CONST | ECF_PURE;
2575 else
2576 cef &= ECF_CONST;
2577 if (!cef)
2578 return 0;
2581 /* Now see if all the arguments are the same. operand_equal_p
2582 does not handle TREE_LIST, so we walk the operands here
2583 feeding them to operand_equal_p. */
2584 arg0 = TREE_OPERAND (arg0, 1);
2585 arg1 = TREE_OPERAND (arg1, 1);
2586 while (arg0 && arg1)
2588 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2589 flags))
2590 return 0;
2592 arg0 = TREE_CHAIN (arg0);
2593 arg1 = TREE_CHAIN (arg1);
2596 /* If we get here and both argument lists are exhausted
2597 then the CALL_EXPRs are equal. */
2598 return ! (arg0 || arg1);
2600 default:
2601 return 0;
2604 case tcc_declaration:
2605 /* Consider __builtin_sqrt equal to sqrt. */
2606 return (TREE_CODE (arg0) == FUNCTION_DECL
2607 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2608 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2609 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2611 default:
2612 return 0;
2615 #undef OP_SAME
2616 #undef OP_SAME_WITH_NULL
2619 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2620 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2622 When in doubt, return 0. */
2624 static int
2625 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2627 int unsignedp1, unsignedpo;
2628 tree primarg0, primarg1, primother;
2629 unsigned int correct_width;
2631 if (operand_equal_p (arg0, arg1, 0))
2632 return 1;
2634 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2635 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2636 return 0;
2638 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2639 and see if the inner values are the same. This removes any
2640 signedness comparison, which doesn't matter here. */
2641 primarg0 = arg0, primarg1 = arg1;
2642 STRIP_NOPS (primarg0);
2643 STRIP_NOPS (primarg1);
2644 if (operand_equal_p (primarg0, primarg1, 0))
2645 return 1;
2647 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2648 actual comparison operand, ARG0.
2650 First throw away any conversions to wider types
2651 already present in the operands. */
2653 primarg1 = get_narrower (arg1, &unsignedp1);
2654 primother = get_narrower (other, &unsignedpo);
2656 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2657 if (unsignedp1 == unsignedpo
2658 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2659 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2661 tree type = TREE_TYPE (arg0);
2663 /* Make sure shorter operand is extended the right way
2664 to match the longer operand. */
2665 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2666 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2668 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2669 return 1;
2672 return 0;
2675 /* See if ARG is an expression that is either a comparison or is performing
2676 arithmetic on comparisons. The comparisons must only be comparing
2677 two different values, which will be stored in *CVAL1 and *CVAL2; if
2678 they are nonzero it means that some operands have already been found.
2679 No variables may be used anywhere else in the expression except in the
2680 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2681 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2683 If this is true, return 1. Otherwise, return zero. */
2685 static int
2686 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2688 enum tree_code code = TREE_CODE (arg);
2689 enum tree_code_class class = TREE_CODE_CLASS (code);
2691 /* We can handle some of the tcc_expression cases here. */
2692 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2693 class = tcc_unary;
2694 else if (class == tcc_expression
2695 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2696 || code == COMPOUND_EXPR))
2697 class = tcc_binary;
2699 else if (class == tcc_expression && code == SAVE_EXPR
2700 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2702 /* If we've already found a CVAL1 or CVAL2, this expression is
2703 two complex to handle. */
2704 if (*cval1 || *cval2)
2705 return 0;
2707 class = tcc_unary;
2708 *save_p = 1;
2711 switch (class)
2713 case tcc_unary:
2714 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2716 case tcc_binary:
2717 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2718 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2719 cval1, cval2, save_p));
2721 case tcc_constant:
2722 return 1;
2724 case tcc_expression:
2725 if (code == COND_EXPR)
2726 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2727 cval1, cval2, save_p)
2728 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2729 cval1, cval2, save_p)
2730 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2731 cval1, cval2, save_p));
2732 return 0;
2734 case tcc_comparison:
2735 /* First see if we can handle the first operand, then the second. For
2736 the second operand, we know *CVAL1 can't be zero. It must be that
2737 one side of the comparison is each of the values; test for the
2738 case where this isn't true by failing if the two operands
2739 are the same. */
2741 if (operand_equal_p (TREE_OPERAND (arg, 0),
2742 TREE_OPERAND (arg, 1), 0))
2743 return 0;
2745 if (*cval1 == 0)
2746 *cval1 = TREE_OPERAND (arg, 0);
2747 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2749 else if (*cval2 == 0)
2750 *cval2 = TREE_OPERAND (arg, 0);
2751 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2753 else
2754 return 0;
2756 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2758 else if (*cval2 == 0)
2759 *cval2 = TREE_OPERAND (arg, 1);
2760 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2762 else
2763 return 0;
2765 return 1;
2767 default:
2768 return 0;
2772 /* ARG is a tree that is known to contain just arithmetic operations and
2773 comparisons. Evaluate the operations in the tree substituting NEW0 for
2774 any occurrence of OLD0 as an operand of a comparison and likewise for
2775 NEW1 and OLD1. */
2777 static tree
2778 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2780 tree type = TREE_TYPE (arg);
2781 enum tree_code code = TREE_CODE (arg);
2782 enum tree_code_class class = TREE_CODE_CLASS (code);
2784 /* We can handle some of the tcc_expression cases here. */
2785 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2786 class = tcc_unary;
2787 else if (class == tcc_expression
2788 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2789 class = tcc_binary;
2791 switch (class)
2793 case tcc_unary:
2794 return fold_build1 (code, type,
2795 eval_subst (TREE_OPERAND (arg, 0),
2796 old0, new0, old1, new1));
2798 case tcc_binary:
2799 return fold_build2 (code, type,
2800 eval_subst (TREE_OPERAND (arg, 0),
2801 old0, new0, old1, new1),
2802 eval_subst (TREE_OPERAND (arg, 1),
2803 old0, new0, old1, new1));
2805 case tcc_expression:
2806 switch (code)
2808 case SAVE_EXPR:
2809 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2811 case COMPOUND_EXPR:
2812 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2814 case COND_EXPR:
2815 return fold_build3 (code, type,
2816 eval_subst (TREE_OPERAND (arg, 0),
2817 old0, new0, old1, new1),
2818 eval_subst (TREE_OPERAND (arg, 1),
2819 old0, new0, old1, new1),
2820 eval_subst (TREE_OPERAND (arg, 2),
2821 old0, new0, old1, new1));
2822 default:
2823 break;
2825 /* Fall through - ??? */
2827 case tcc_comparison:
2829 tree arg0 = TREE_OPERAND (arg, 0);
2830 tree arg1 = TREE_OPERAND (arg, 1);
2832 /* We need to check both for exact equality and tree equality. The
2833 former will be true if the operand has a side-effect. In that
2834 case, we know the operand occurred exactly once. */
2836 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2837 arg0 = new0;
2838 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2839 arg0 = new1;
2841 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2842 arg1 = new0;
2843 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2844 arg1 = new1;
2846 return fold_build2 (code, type, arg0, arg1);
2849 default:
2850 return arg;
2854 /* Return a tree for the case when the result of an expression is RESULT
2855 converted to TYPE and OMITTED was previously an operand of the expression
2856 but is now not needed (e.g., we folded OMITTED * 0).
2858 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2859 the conversion of RESULT to TYPE. */
2861 tree
2862 omit_one_operand (tree type, tree result, tree omitted)
2864 tree t = fold_convert (type, result);
2866 if (TREE_SIDE_EFFECTS (omitted))
2867 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2869 return non_lvalue (t);
2872 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2874 static tree
2875 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2877 tree t = fold_convert (type, result);
2879 if (TREE_SIDE_EFFECTS (omitted))
2880 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2882 return pedantic_non_lvalue (t);
2885 /* Return a tree for the case when the result of an expression is RESULT
2886 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2887 of the expression but are now not needed.
2889 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2890 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2891 evaluated before OMITTED2. Otherwise, if neither has side effects,
2892 just do the conversion of RESULT to TYPE. */
2894 tree
2895 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2897 tree t = fold_convert (type, result);
2899 if (TREE_SIDE_EFFECTS (omitted2))
2900 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2901 if (TREE_SIDE_EFFECTS (omitted1))
2902 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2904 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2908 /* Return a simplified tree node for the truth-negation of ARG. This
2909 never alters ARG itself. We assume that ARG is an operation that
2910 returns a truth value (0 or 1).
2912 FIXME: one would think we would fold the result, but it causes
2913 problems with the dominator optimizer. */
2914 tree
2915 invert_truthvalue (tree arg)
2917 tree type = TREE_TYPE (arg);
2918 enum tree_code code = TREE_CODE (arg);
2920 if (code == ERROR_MARK)
2921 return arg;
2923 /* If this is a comparison, we can simply invert it, except for
2924 floating-point non-equality comparisons, in which case we just
2925 enclose a TRUTH_NOT_EXPR around what we have. */
2927 if (TREE_CODE_CLASS (code) == tcc_comparison)
2929 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2930 if (FLOAT_TYPE_P (op_type)
2931 && flag_trapping_math
2932 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2933 && code != NE_EXPR && code != EQ_EXPR)
2934 return build1 (TRUTH_NOT_EXPR, type, arg);
2935 else
2937 code = invert_tree_comparison (code,
2938 HONOR_NANS (TYPE_MODE (op_type)));
2939 if (code == ERROR_MARK)
2940 return build1 (TRUTH_NOT_EXPR, type, arg);
2941 else
2942 return build2 (code, type,
2943 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2947 switch (code)
2949 case INTEGER_CST:
2950 return constant_boolean_node (integer_zerop (arg), type);
2952 case TRUTH_AND_EXPR:
2953 return build2 (TRUTH_OR_EXPR, type,
2954 invert_truthvalue (TREE_OPERAND (arg, 0)),
2955 invert_truthvalue (TREE_OPERAND (arg, 1)));
2957 case TRUTH_OR_EXPR:
2958 return build2 (TRUTH_AND_EXPR, type,
2959 invert_truthvalue (TREE_OPERAND (arg, 0)),
2960 invert_truthvalue (TREE_OPERAND (arg, 1)));
2962 case TRUTH_XOR_EXPR:
2963 /* Here we can invert either operand. We invert the first operand
2964 unless the second operand is a TRUTH_NOT_EXPR in which case our
2965 result is the XOR of the first operand with the inside of the
2966 negation of the second operand. */
2968 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2969 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2970 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2971 else
2972 return build2 (TRUTH_XOR_EXPR, type,
2973 invert_truthvalue (TREE_OPERAND (arg, 0)),
2974 TREE_OPERAND (arg, 1));
2976 case TRUTH_ANDIF_EXPR:
2977 return build2 (TRUTH_ORIF_EXPR, type,
2978 invert_truthvalue (TREE_OPERAND (arg, 0)),
2979 invert_truthvalue (TREE_OPERAND (arg, 1)));
2981 case TRUTH_ORIF_EXPR:
2982 return build2 (TRUTH_ANDIF_EXPR, type,
2983 invert_truthvalue (TREE_OPERAND (arg, 0)),
2984 invert_truthvalue (TREE_OPERAND (arg, 1)));
2986 case TRUTH_NOT_EXPR:
2987 return TREE_OPERAND (arg, 0);
2989 case COND_EXPR:
2990 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2991 invert_truthvalue (TREE_OPERAND (arg, 1)),
2992 invert_truthvalue (TREE_OPERAND (arg, 2)));
2994 case COMPOUND_EXPR:
2995 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2996 invert_truthvalue (TREE_OPERAND (arg, 1)));
2998 case NON_LVALUE_EXPR:
2999 return invert_truthvalue (TREE_OPERAND (arg, 0));
3001 case NOP_EXPR:
3002 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3003 break;
3005 case CONVERT_EXPR:
3006 case FLOAT_EXPR:
3007 return build1 (TREE_CODE (arg), type,
3008 invert_truthvalue (TREE_OPERAND (arg, 0)));
3010 case BIT_AND_EXPR:
3011 if (!integer_onep (TREE_OPERAND (arg, 1)))
3012 break;
3013 return build2 (EQ_EXPR, type, arg,
3014 fold_convert (type, integer_zero_node));
3016 case SAVE_EXPR:
3017 return build1 (TRUTH_NOT_EXPR, type, arg);
3019 case CLEANUP_POINT_EXPR:
3020 return build1 (CLEANUP_POINT_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)));
3023 default:
3024 break;
3026 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3027 return build1 (TRUTH_NOT_EXPR, type, arg);
3030 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3031 operands are another bit-wise operation with a common input. If so,
3032 distribute the bit operations to save an operation and possibly two if
3033 constants are involved. For example, convert
3034 (A | B) & (A | C) into A | (B & C)
3035 Further simplification will occur if B and C are constants.
3037 If this optimization cannot be done, 0 will be returned. */
3039 static tree
3040 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3042 tree common;
3043 tree left, right;
3045 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3046 || TREE_CODE (arg0) == code
3047 || (TREE_CODE (arg0) != BIT_AND_EXPR
3048 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3049 return 0;
3051 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3053 common = TREE_OPERAND (arg0, 0);
3054 left = TREE_OPERAND (arg0, 1);
3055 right = TREE_OPERAND (arg1, 1);
3057 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3059 common = TREE_OPERAND (arg0, 0);
3060 left = TREE_OPERAND (arg0, 1);
3061 right = TREE_OPERAND (arg1, 0);
3063 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3065 common = TREE_OPERAND (arg0, 1);
3066 left = TREE_OPERAND (arg0, 0);
3067 right = TREE_OPERAND (arg1, 1);
3069 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3071 common = TREE_OPERAND (arg0, 1);
3072 left = TREE_OPERAND (arg0, 0);
3073 right = TREE_OPERAND (arg1, 0);
3075 else
3076 return 0;
3078 return fold_build2 (TREE_CODE (arg0), type, common,
3079 fold_build2 (code, type, left, right));
3082 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3083 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3085 static tree
3086 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3087 int unsignedp)
3089 tree result;
3091 if (bitpos == 0)
3093 tree size = TYPE_SIZE (TREE_TYPE (inner));
3094 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3095 || POINTER_TYPE_P (TREE_TYPE (inner)))
3096 && host_integerp (size, 0)
3097 && tree_low_cst (size, 0) == bitsize)
3098 return fold_convert (type, inner);
3101 result = build3 (BIT_FIELD_REF, type, inner,
3102 size_int (bitsize), bitsize_int (bitpos));
3104 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3106 return result;
3109 /* Optimize a bit-field compare.
3111 There are two cases: First is a compare against a constant and the
3112 second is a comparison of two items where the fields are at the same
3113 bit position relative to the start of a chunk (byte, halfword, word)
3114 large enough to contain it. In these cases we can avoid the shift
3115 implicit in bitfield extractions.
3117 For constants, we emit a compare of the shifted constant with the
3118 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3119 compared. For two fields at the same position, we do the ANDs with the
3120 similar mask and compare the result of the ANDs.
3122 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3123 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3124 are the left and right operands of the comparison, respectively.
3126 If the optimization described above can be done, we return the resulting
3127 tree. Otherwise we return zero. */
3129 static tree
3130 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3131 tree lhs, tree rhs)
3133 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3134 tree type = TREE_TYPE (lhs);
3135 tree signed_type, unsigned_type;
3136 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3137 enum machine_mode lmode, rmode, nmode;
3138 int lunsignedp, runsignedp;
3139 int lvolatilep = 0, rvolatilep = 0;
3140 tree linner, rinner = NULL_TREE;
3141 tree mask;
3142 tree offset;
3144 /* Get all the information about the extractions being done. If the bit size
3145 if the same as the size of the underlying object, we aren't doing an
3146 extraction at all and so can do nothing. We also don't want to
3147 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3148 then will no longer be able to replace it. */
3149 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3150 &lunsignedp, &lvolatilep, false);
3151 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3152 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3153 return 0;
3155 if (!const_p)
3157 /* If this is not a constant, we can only do something if bit positions,
3158 sizes, and signedness are the same. */
3159 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3160 &runsignedp, &rvolatilep, false);
3162 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3163 || lunsignedp != runsignedp || offset != 0
3164 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3165 return 0;
3168 /* See if we can find a mode to refer to this field. We should be able to,
3169 but fail if we can't. */
3170 nmode = get_best_mode (lbitsize, lbitpos,
3171 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3172 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3173 TYPE_ALIGN (TREE_TYPE (rinner))),
3174 word_mode, lvolatilep || rvolatilep);
3175 if (nmode == VOIDmode)
3176 return 0;
3178 /* Set signed and unsigned types of the precision of this mode for the
3179 shifts below. */
3180 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3181 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3183 /* Compute the bit position and size for the new reference and our offset
3184 within it. If the new reference is the same size as the original, we
3185 won't optimize anything, so return zero. */
3186 nbitsize = GET_MODE_BITSIZE (nmode);
3187 nbitpos = lbitpos & ~ (nbitsize - 1);
3188 lbitpos -= nbitpos;
3189 if (nbitsize == lbitsize)
3190 return 0;
3192 if (BYTES_BIG_ENDIAN)
3193 lbitpos = nbitsize - lbitsize - lbitpos;
3195 /* Make the mask to be used against the extracted field. */
3196 mask = build_int_cst (unsigned_type, -1);
3197 mask = force_fit_type (mask, 0, false, false);
3198 mask = fold_convert (unsigned_type, mask);
3199 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3200 mask = const_binop (RSHIFT_EXPR, mask,
3201 size_int (nbitsize - lbitsize - lbitpos), 0);
3203 if (! const_p)
3204 /* If not comparing with constant, just rework the comparison
3205 and return. */
3206 return build2 (code, compare_type,
3207 build2 (BIT_AND_EXPR, unsigned_type,
3208 make_bit_field_ref (linner, unsigned_type,
3209 nbitsize, nbitpos, 1),
3210 mask),
3211 build2 (BIT_AND_EXPR, unsigned_type,
3212 make_bit_field_ref (rinner, unsigned_type,
3213 nbitsize, nbitpos, 1),
3214 mask));
3216 /* Otherwise, we are handling the constant case. See if the constant is too
3217 big for the field. Warn and return a tree of for 0 (false) if so. We do
3218 this not only for its own sake, but to avoid having to test for this
3219 error case below. If we didn't, we might generate wrong code.
3221 For unsigned fields, the constant shifted right by the field length should
3222 be all zero. For signed fields, the high-order bits should agree with
3223 the sign bit. */
3225 if (lunsignedp)
3227 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3228 fold_convert (unsigned_type, rhs),
3229 size_int (lbitsize), 0)))
3231 warning ("comparison is always %d due to width of bit-field",
3232 code == NE_EXPR);
3233 return constant_boolean_node (code == NE_EXPR, compare_type);
3236 else
3238 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3239 size_int (lbitsize - 1), 0);
3240 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3242 warning ("comparison is always %d due to width of bit-field",
3243 code == NE_EXPR);
3244 return constant_boolean_node (code == NE_EXPR, compare_type);
3248 /* Single-bit compares should always be against zero. */
3249 if (lbitsize == 1 && ! integer_zerop (rhs))
3251 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3252 rhs = fold_convert (type, integer_zero_node);
3255 /* Make a new bitfield reference, shift the constant over the
3256 appropriate number of bits and mask it with the computed mask
3257 (in case this was a signed field). If we changed it, make a new one. */
3258 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3259 if (lvolatilep)
3261 TREE_SIDE_EFFECTS (lhs) = 1;
3262 TREE_THIS_VOLATILE (lhs) = 1;
3265 rhs = fold (const_binop (BIT_AND_EXPR,
3266 const_binop (LSHIFT_EXPR,
3267 fold_convert (unsigned_type, rhs),
3268 size_int (lbitpos), 0),
3269 mask, 0));
3271 return build2 (code, compare_type,
3272 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3273 rhs);
3276 /* Subroutine for fold_truthop: decode a field reference.
3278 If EXP is a comparison reference, we return the innermost reference.
3280 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3281 set to the starting bit number.
3283 If the innermost field can be completely contained in a mode-sized
3284 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3286 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3287 otherwise it is not changed.
3289 *PUNSIGNEDP is set to the signedness of the field.
3291 *PMASK is set to the mask used. This is either contained in a
3292 BIT_AND_EXPR or derived from the width of the field.
3294 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3296 Return 0 if this is not a component reference or is one that we can't
3297 do anything with. */
3299 static tree
3300 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3301 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3302 int *punsignedp, int *pvolatilep,
3303 tree *pmask, tree *pand_mask)
3305 tree outer_type = 0;
3306 tree and_mask = 0;
3307 tree mask, inner, offset;
3308 tree unsigned_type;
3309 unsigned int precision;
3311 /* All the optimizations using this function assume integer fields.
3312 There are problems with FP fields since the type_for_size call
3313 below can fail for, e.g., XFmode. */
3314 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3315 return 0;
3317 /* We are interested in the bare arrangement of bits, so strip everything
3318 that doesn't affect the machine mode. However, record the type of the
3319 outermost expression if it may matter below. */
3320 if (TREE_CODE (exp) == NOP_EXPR
3321 || TREE_CODE (exp) == CONVERT_EXPR
3322 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3323 outer_type = TREE_TYPE (exp);
3324 STRIP_NOPS (exp);
3326 if (TREE_CODE (exp) == BIT_AND_EXPR)
3328 and_mask = TREE_OPERAND (exp, 1);
3329 exp = TREE_OPERAND (exp, 0);
3330 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3331 if (TREE_CODE (and_mask) != INTEGER_CST)
3332 return 0;
3335 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3336 punsignedp, pvolatilep, false);
3337 if ((inner == exp && and_mask == 0)
3338 || *pbitsize < 0 || offset != 0
3339 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3340 return 0;
3342 /* If the number of bits in the reference is the same as the bitsize of
3343 the outer type, then the outer type gives the signedness. Otherwise
3344 (in case of a small bitfield) the signedness is unchanged. */
3345 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3346 *punsignedp = TYPE_UNSIGNED (outer_type);
3348 /* Compute the mask to access the bitfield. */
3349 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3350 precision = TYPE_PRECISION (unsigned_type);
3352 mask = build_int_cst (unsigned_type, -1);
3353 mask = force_fit_type (mask, 0, false, false);
3355 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3356 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3358 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3359 if (and_mask != 0)
3360 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3361 fold_convert (unsigned_type, and_mask), mask);
3363 *pmask = mask;
3364 *pand_mask = and_mask;
3365 return inner;
3368 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3369 bit positions. */
3371 static int
3372 all_ones_mask_p (tree mask, int size)
3374 tree type = TREE_TYPE (mask);
3375 unsigned int precision = TYPE_PRECISION (type);
3376 tree tmask;
3378 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3379 tmask = force_fit_type (tmask, 0, false, false);
3381 return
3382 tree_int_cst_equal (mask,
3383 const_binop (RSHIFT_EXPR,
3384 const_binop (LSHIFT_EXPR, tmask,
3385 size_int (precision - size),
3387 size_int (precision - size), 0));
3390 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3391 represents the sign bit of EXP's type. If EXP represents a sign
3392 or zero extension, also test VAL against the unextended type.
3393 The return value is the (sub)expression whose sign bit is VAL,
3394 or NULL_TREE otherwise. */
3396 static tree
3397 sign_bit_p (tree exp, tree val)
3399 unsigned HOST_WIDE_INT mask_lo, lo;
3400 HOST_WIDE_INT mask_hi, hi;
3401 int width;
3402 tree t;
3404 /* Tree EXP must have an integral type. */
3405 t = TREE_TYPE (exp);
3406 if (! INTEGRAL_TYPE_P (t))
3407 return NULL_TREE;
3409 /* Tree VAL must be an integer constant. */
3410 if (TREE_CODE (val) != INTEGER_CST
3411 || TREE_CONSTANT_OVERFLOW (val))
3412 return NULL_TREE;
3414 width = TYPE_PRECISION (t);
3415 if (width > HOST_BITS_PER_WIDE_INT)
3417 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3418 lo = 0;
3420 mask_hi = ((unsigned HOST_WIDE_INT) -1
3421 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3422 mask_lo = -1;
3424 else
3426 hi = 0;
3427 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3429 mask_hi = 0;
3430 mask_lo = ((unsigned HOST_WIDE_INT) -1
3431 >> (HOST_BITS_PER_WIDE_INT - width));
3434 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3435 treat VAL as if it were unsigned. */
3436 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3437 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3438 return exp;
3440 /* Handle extension from a narrower type. */
3441 if (TREE_CODE (exp) == NOP_EXPR
3442 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3443 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3445 return NULL_TREE;
3448 /* Subroutine for fold_truthop: determine if an operand is simple enough
3449 to be evaluated unconditionally. */
3451 static int
3452 simple_operand_p (tree exp)
3454 /* Strip any conversions that don't change the machine mode. */
3455 STRIP_NOPS (exp);
3457 return (CONSTANT_CLASS_P (exp)
3458 || TREE_CODE (exp) == SSA_NAME
3459 || (DECL_P (exp)
3460 && ! TREE_ADDRESSABLE (exp)
3461 && ! TREE_THIS_VOLATILE (exp)
3462 && ! DECL_NONLOCAL (exp)
3463 /* Don't regard global variables as simple. They may be
3464 allocated in ways unknown to the compiler (shared memory,
3465 #pragma weak, etc). */
3466 && ! TREE_PUBLIC (exp)
3467 && ! DECL_EXTERNAL (exp)
3468 /* Loading a static variable is unduly expensive, but global
3469 registers aren't expensive. */
3470 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3473 /* The following functions are subroutines to fold_range_test and allow it to
3474 try to change a logical combination of comparisons into a range test.
3476 For example, both
3477 X == 2 || X == 3 || X == 4 || X == 5
3479 X >= 2 && X <= 5
3480 are converted to
3481 (unsigned) (X - 2) <= 3
3483 We describe each set of comparisons as being either inside or outside
3484 a range, using a variable named like IN_P, and then describe the
3485 range with a lower and upper bound. If one of the bounds is omitted,
3486 it represents either the highest or lowest value of the type.
3488 In the comments below, we represent a range by two numbers in brackets
3489 preceded by a "+" to designate being inside that range, or a "-" to
3490 designate being outside that range, so the condition can be inverted by
3491 flipping the prefix. An omitted bound is represented by a "-". For
3492 example, "- [-, 10]" means being outside the range starting at the lowest
3493 possible value and ending at 10, in other words, being greater than 10.
3494 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3495 always false.
3497 We set up things so that the missing bounds are handled in a consistent
3498 manner so neither a missing bound nor "true" and "false" need to be
3499 handled using a special case. */
3501 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3502 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3503 and UPPER1_P are nonzero if the respective argument is an upper bound
3504 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3505 must be specified for a comparison. ARG1 will be converted to ARG0's
3506 type if both are specified. */
3508 static tree
3509 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3510 tree arg1, int upper1_p)
3512 tree tem;
3513 int result;
3514 int sgn0, sgn1;
3516 /* If neither arg represents infinity, do the normal operation.
3517 Else, if not a comparison, return infinity. Else handle the special
3518 comparison rules. Note that most of the cases below won't occur, but
3519 are handled for consistency. */
3521 if (arg0 != 0 && arg1 != 0)
3523 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3524 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3525 STRIP_NOPS (tem);
3526 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3529 if (TREE_CODE_CLASS (code) != tcc_comparison)
3530 return 0;
3532 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3533 for neither. In real maths, we cannot assume open ended ranges are
3534 the same. But, this is computer arithmetic, where numbers are finite.
3535 We can therefore make the transformation of any unbounded range with
3536 the value Z, Z being greater than any representable number. This permits
3537 us to treat unbounded ranges as equal. */
3538 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3539 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3540 switch (code)
3542 case EQ_EXPR:
3543 result = sgn0 == sgn1;
3544 break;
3545 case NE_EXPR:
3546 result = sgn0 != sgn1;
3547 break;
3548 case LT_EXPR:
3549 result = sgn0 < sgn1;
3550 break;
3551 case LE_EXPR:
3552 result = sgn0 <= sgn1;
3553 break;
3554 case GT_EXPR:
3555 result = sgn0 > sgn1;
3556 break;
3557 case GE_EXPR:
3558 result = sgn0 >= sgn1;
3559 break;
3560 default:
3561 gcc_unreachable ();
3564 return constant_boolean_node (result, type);
3567 /* Given EXP, a logical expression, set the range it is testing into
3568 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3569 actually being tested. *PLOW and *PHIGH will be made of the same type
3570 as the returned expression. If EXP is not a comparison, we will most
3571 likely not be returning a useful value and range. */
3573 static tree
3574 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3576 enum tree_code code;
3577 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3578 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3579 int in_p, n_in_p;
3580 tree low, high, n_low, n_high;
3582 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3583 and see if we can refine the range. Some of the cases below may not
3584 happen, but it doesn't seem worth worrying about this. We "continue"
3585 the outer loop when we've changed something; otherwise we "break"
3586 the switch, which will "break" the while. */
3588 in_p = 0;
3589 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3591 while (1)
3593 code = TREE_CODE (exp);
3594 exp_type = TREE_TYPE (exp);
3596 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3598 if (TREE_CODE_LENGTH (code) > 0)
3599 arg0 = TREE_OPERAND (exp, 0);
3600 if (TREE_CODE_CLASS (code) == tcc_comparison
3601 || TREE_CODE_CLASS (code) == tcc_unary
3602 || TREE_CODE_CLASS (code) == tcc_binary)
3603 arg0_type = TREE_TYPE (arg0);
3604 if (TREE_CODE_CLASS (code) == tcc_binary
3605 || TREE_CODE_CLASS (code) == tcc_comparison
3606 || (TREE_CODE_CLASS (code) == tcc_expression
3607 && TREE_CODE_LENGTH (code) > 1))
3608 arg1 = TREE_OPERAND (exp, 1);
3611 switch (code)
3613 case TRUTH_NOT_EXPR:
3614 in_p = ! in_p, exp = arg0;
3615 continue;
3617 case EQ_EXPR: case NE_EXPR:
3618 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3619 /* We can only do something if the range is testing for zero
3620 and if the second operand is an integer constant. Note that
3621 saying something is "in" the range we make is done by
3622 complementing IN_P since it will set in the initial case of
3623 being not equal to zero; "out" is leaving it alone. */
3624 if (low == 0 || high == 0
3625 || ! integer_zerop (low) || ! integer_zerop (high)
3626 || TREE_CODE (arg1) != INTEGER_CST)
3627 break;
3629 switch (code)
3631 case NE_EXPR: /* - [c, c] */
3632 low = high = arg1;
3633 break;
3634 case EQ_EXPR: /* + [c, c] */
3635 in_p = ! in_p, low = high = arg1;
3636 break;
3637 case GT_EXPR: /* - [-, c] */
3638 low = 0, high = arg1;
3639 break;
3640 case GE_EXPR: /* + [c, -] */
3641 in_p = ! in_p, low = arg1, high = 0;
3642 break;
3643 case LT_EXPR: /* - [c, -] */
3644 low = arg1, high = 0;
3645 break;
3646 case LE_EXPR: /* + [-, c] */
3647 in_p = ! in_p, low = 0, high = arg1;
3648 break;
3649 default:
3650 gcc_unreachable ();
3653 /* If this is an unsigned comparison, we also know that EXP is
3654 greater than or equal to zero. We base the range tests we make
3655 on that fact, so we record it here so we can parse existing
3656 range tests. We test arg0_type since often the return type
3657 of, e.g. EQ_EXPR, is boolean. */
3658 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3660 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3661 in_p, low, high, 1,
3662 fold_convert (arg0_type, integer_zero_node),
3663 NULL_TREE))
3664 break;
3666 in_p = n_in_p, low = n_low, high = n_high;
3668 /* If the high bound is missing, but we have a nonzero low
3669 bound, reverse the range so it goes from zero to the low bound
3670 minus 1. */
3671 if (high == 0 && low && ! integer_zerop (low))
3673 in_p = ! in_p;
3674 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3675 integer_one_node, 0);
3676 low = fold_convert (arg0_type, integer_zero_node);
3680 exp = arg0;
3681 continue;
3683 case NEGATE_EXPR:
3684 /* (-x) IN [a,b] -> x in [-b, -a] */
3685 n_low = range_binop (MINUS_EXPR, exp_type,
3686 fold_convert (exp_type, integer_zero_node),
3687 0, high, 1);
3688 n_high = range_binop (MINUS_EXPR, exp_type,
3689 fold_convert (exp_type, integer_zero_node),
3690 0, low, 0);
3691 low = n_low, high = n_high;
3692 exp = arg0;
3693 continue;
3695 case BIT_NOT_EXPR:
3696 /* ~ X -> -X - 1 */
3697 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3698 fold_convert (exp_type, integer_one_node));
3699 continue;
3701 case PLUS_EXPR: case MINUS_EXPR:
3702 if (TREE_CODE (arg1) != INTEGER_CST)
3703 break;
3705 /* If EXP is signed, any overflow in the computation is undefined,
3706 so we don't worry about it so long as our computations on
3707 the bounds don't overflow. For unsigned, overflow is defined
3708 and this is exactly the right thing. */
3709 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3710 arg0_type, low, 0, arg1, 0);
3711 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3712 arg0_type, high, 1, arg1, 0);
3713 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3714 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3715 break;
3717 /* Check for an unsigned range which has wrapped around the maximum
3718 value thus making n_high < n_low, and normalize it. */
3719 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3721 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3722 integer_one_node, 0);
3723 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3724 integer_one_node, 0);
3726 /* If the range is of the form +/- [ x+1, x ], we won't
3727 be able to normalize it. But then, it represents the
3728 whole range or the empty set, so make it
3729 +/- [ -, - ]. */
3730 if (tree_int_cst_equal (n_low, low)
3731 && tree_int_cst_equal (n_high, high))
3732 low = high = 0;
3733 else
3734 in_p = ! in_p;
3736 else
3737 low = n_low, high = n_high;
3739 exp = arg0;
3740 continue;
3742 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3743 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3744 break;
3746 if (! INTEGRAL_TYPE_P (arg0_type)
3747 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3748 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3749 break;
3751 n_low = low, n_high = high;
3753 if (n_low != 0)
3754 n_low = fold_convert (arg0_type, n_low);
3756 if (n_high != 0)
3757 n_high = fold_convert (arg0_type, n_high);
3760 /* If we're converting arg0 from an unsigned type, to exp,
3761 a signed type, we will be doing the comparison as unsigned.
3762 The tests above have already verified that LOW and HIGH
3763 are both positive.
3765 So we have to ensure that we will handle large unsigned
3766 values the same way that the current signed bounds treat
3767 negative values. */
3769 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3771 tree high_positive;
3772 tree equiv_type = lang_hooks.types.type_for_mode
3773 (TYPE_MODE (arg0_type), 1);
3775 /* A range without an upper bound is, naturally, unbounded.
3776 Since convert would have cropped a very large value, use
3777 the max value for the destination type. */
3778 high_positive
3779 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3780 : TYPE_MAX_VALUE (arg0_type);
3782 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3783 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3784 fold_convert (arg0_type,
3785 high_positive),
3786 fold_convert (arg0_type,
3787 integer_one_node));
3789 /* If the low bound is specified, "and" the range with the
3790 range for which the original unsigned value will be
3791 positive. */
3792 if (low != 0)
3794 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3795 1, n_low, n_high, 1,
3796 fold_convert (arg0_type,
3797 integer_zero_node),
3798 high_positive))
3799 break;
3801 in_p = (n_in_p == in_p);
3803 else
3805 /* Otherwise, "or" the range with the range of the input
3806 that will be interpreted as negative. */
3807 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3808 0, n_low, n_high, 1,
3809 fold_convert (arg0_type,
3810 integer_zero_node),
3811 high_positive))
3812 break;
3814 in_p = (in_p != n_in_p);
3818 exp = arg0;
3819 low = n_low, high = n_high;
3820 continue;
3822 default:
3823 break;
3826 break;
3829 /* If EXP is a constant, we can evaluate whether this is true or false. */
3830 if (TREE_CODE (exp) == INTEGER_CST)
3832 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3833 exp, 0, low, 0))
3834 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3835 exp, 1, high, 1)));
3836 low = high = 0;
3837 exp = 0;
3840 *pin_p = in_p, *plow = low, *phigh = high;
3841 return exp;
3844 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3845 type, TYPE, return an expression to test if EXP is in (or out of, depending
3846 on IN_P) the range. Return 0 if the test couldn't be created. */
3848 static tree
3849 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3851 tree etype = TREE_TYPE (exp);
3852 tree value;
3854 if (! in_p)
3856 value = build_range_check (type, exp, 1, low, high);
3857 if (value != 0)
3858 return invert_truthvalue (value);
3860 return 0;
3863 if (low == 0 && high == 0)
3864 return fold_convert (type, integer_one_node);
3866 if (low == 0)
3867 return fold_build2 (LE_EXPR, type, exp, high);
3869 if (high == 0)
3870 return fold_build2 (GE_EXPR, type, exp, low);
3872 if (operand_equal_p (low, high, 0))
3873 return fold_build2 (EQ_EXPR, type, exp, low);
3875 if (integer_zerop (low))
3877 if (! TYPE_UNSIGNED (etype))
3879 etype = lang_hooks.types.unsigned_type (etype);
3880 high = fold_convert (etype, high);
3881 exp = fold_convert (etype, exp);
3883 return build_range_check (type, exp, 1, 0, high);
3886 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3887 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3889 unsigned HOST_WIDE_INT lo;
3890 HOST_WIDE_INT hi;
3891 int prec;
3893 prec = TYPE_PRECISION (etype);
3894 if (prec <= HOST_BITS_PER_WIDE_INT)
3896 hi = 0;
3897 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3899 else
3901 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3902 lo = (unsigned HOST_WIDE_INT) -1;
3905 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3907 if (TYPE_UNSIGNED (etype))
3909 etype = lang_hooks.types.signed_type (etype);
3910 exp = fold_convert (etype, exp);
3912 return fold_build2 (GT_EXPR, type, exp,
3913 fold_convert (etype, integer_zero_node));
3917 value = const_binop (MINUS_EXPR, high, low, 0);
3918 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3920 tree utype, minv, maxv;
3922 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3923 for the type in question, as we rely on this here. */
3924 switch (TREE_CODE (etype))
3926 case INTEGER_TYPE:
3927 case ENUMERAL_TYPE:
3928 case CHAR_TYPE:
3929 utype = lang_hooks.types.unsigned_type (etype);
3930 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3931 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3932 integer_one_node, 1);
3933 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3934 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3935 minv, 1, maxv, 1)))
3937 etype = utype;
3938 high = fold_convert (etype, high);
3939 low = fold_convert (etype, low);
3940 exp = fold_convert (etype, exp);
3941 value = const_binop (MINUS_EXPR, high, low, 0);
3943 break;
3944 default:
3945 break;
3949 if (value != 0 && ! TREE_OVERFLOW (value))
3950 return build_range_check (type,
3951 fold_build2 (MINUS_EXPR, etype, exp, low),
3952 1, fold_convert (etype, integer_zero_node),
3953 value);
3955 return 0;
3958 /* Given two ranges, see if we can merge them into one. Return 1 if we
3959 can, 0 if we can't. Set the output range into the specified parameters. */
3961 static int
3962 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3963 tree high0, int in1_p, tree low1, tree high1)
3965 int no_overlap;
3966 int subset;
3967 int temp;
3968 tree tem;
3969 int in_p;
3970 tree low, high;
3971 int lowequal = ((low0 == 0 && low1 == 0)
3972 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3973 low0, 0, low1, 0)));
3974 int highequal = ((high0 == 0 && high1 == 0)
3975 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3976 high0, 1, high1, 1)));
3978 /* Make range 0 be the range that starts first, or ends last if they
3979 start at the same value. Swap them if it isn't. */
3980 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3981 low0, 0, low1, 0))
3982 || (lowequal
3983 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3984 high1, 1, high0, 1))))
3986 temp = in0_p, in0_p = in1_p, in1_p = temp;
3987 tem = low0, low0 = low1, low1 = tem;
3988 tem = high0, high0 = high1, high1 = tem;
3991 /* Now flag two cases, whether the ranges are disjoint or whether the
3992 second range is totally subsumed in the first. Note that the tests
3993 below are simplified by the ones above. */
3994 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3995 high0, 1, low1, 0));
3996 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3997 high1, 1, high0, 1));
3999 /* We now have four cases, depending on whether we are including or
4000 excluding the two ranges. */
4001 if (in0_p && in1_p)
4003 /* If they don't overlap, the result is false. If the second range
4004 is a subset it is the result. Otherwise, the range is from the start
4005 of the second to the end of the first. */
4006 if (no_overlap)
4007 in_p = 0, low = high = 0;
4008 else if (subset)
4009 in_p = 1, low = low1, high = high1;
4010 else
4011 in_p = 1, low = low1, high = high0;
4014 else if (in0_p && ! in1_p)
4016 /* If they don't overlap, the result is the first range. If they are
4017 equal, the result is false. If the second range is a subset of the
4018 first, and the ranges begin at the same place, we go from just after
4019 the end of the first range to the end of the second. If the second
4020 range is not a subset of the first, or if it is a subset and both
4021 ranges end at the same place, the range starts at the start of the
4022 first range and ends just before the second range.
4023 Otherwise, we can't describe this as a single range. */
4024 if (no_overlap)
4025 in_p = 1, low = low0, high = high0;
4026 else if (lowequal && highequal)
4027 in_p = 0, low = high = 0;
4028 else if (subset && lowequal)
4030 in_p = 1, high = high0;
4031 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4032 integer_one_node, 0);
4034 else if (! subset || highequal)
4036 in_p = 1, low = low0;
4037 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4038 integer_one_node, 0);
4040 else
4041 return 0;
4044 else if (! in0_p && in1_p)
4046 /* If they don't overlap, the result is the second range. If the second
4047 is a subset of the first, the result is false. Otherwise,
4048 the range starts just after the first range and ends at the
4049 end of the second. */
4050 if (no_overlap)
4051 in_p = 1, low = low1, high = high1;
4052 else if (subset || highequal)
4053 in_p = 0, low = high = 0;
4054 else
4056 in_p = 1, high = high1;
4057 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4058 integer_one_node, 0);
4062 else
4064 /* The case where we are excluding both ranges. Here the complex case
4065 is if they don't overlap. In that case, the only time we have a
4066 range is if they are adjacent. If the second is a subset of the
4067 first, the result is the first. Otherwise, the range to exclude
4068 starts at the beginning of the first range and ends at the end of the
4069 second. */
4070 if (no_overlap)
4072 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4073 range_binop (PLUS_EXPR, NULL_TREE,
4074 high0, 1,
4075 integer_one_node, 1),
4076 1, low1, 0)))
4077 in_p = 0, low = low0, high = high1;
4078 else
4080 /* Canonicalize - [min, x] into - [-, x]. */
4081 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4082 switch (TREE_CODE (TREE_TYPE (low0)))
4084 case ENUMERAL_TYPE:
4085 if (TYPE_PRECISION (TREE_TYPE (low0))
4086 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4087 break;
4088 /* FALLTHROUGH */
4089 case INTEGER_TYPE:
4090 case CHAR_TYPE:
4091 if (tree_int_cst_equal (low0,
4092 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4093 low0 = 0;
4094 break;
4095 case POINTER_TYPE:
4096 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4097 && integer_zerop (low0))
4098 low0 = 0;
4099 break;
4100 default:
4101 break;
4104 /* Canonicalize - [x, max] into - [x, -]. */
4105 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4106 switch (TREE_CODE (TREE_TYPE (high1)))
4108 case ENUMERAL_TYPE:
4109 if (TYPE_PRECISION (TREE_TYPE (high1))
4110 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4111 break;
4112 /* FALLTHROUGH */
4113 case INTEGER_TYPE:
4114 case CHAR_TYPE:
4115 if (tree_int_cst_equal (high1,
4116 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4117 high1 = 0;
4118 break;
4119 case POINTER_TYPE:
4120 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4121 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4122 high1, 1,
4123 integer_one_node, 1)))
4124 high1 = 0;
4125 break;
4126 default:
4127 break;
4130 /* The ranges might be also adjacent between the maximum and
4131 minimum values of the given type. For
4132 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4133 return + [x + 1, y - 1]. */
4134 if (low0 == 0 && high1 == 0)
4136 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4137 integer_one_node, 1);
4138 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4139 integer_one_node, 0);
4140 if (low == 0 || high == 0)
4141 return 0;
4143 in_p = 1;
4145 else
4146 return 0;
4149 else if (subset)
4150 in_p = 0, low = low0, high = high0;
4151 else
4152 in_p = 0, low = low0, high = high1;
4155 *pin_p = in_p, *plow = low, *phigh = high;
4156 return 1;
4160 /* Subroutine of fold, looking inside expressions of the form
4161 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4162 of the COND_EXPR. This function is being used also to optimize
4163 A op B ? C : A, by reversing the comparison first.
4165 Return a folded expression whose code is not a COND_EXPR
4166 anymore, or NULL_TREE if no folding opportunity is found. */
4168 static tree
4169 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4171 enum tree_code comp_code = TREE_CODE (arg0);
4172 tree arg00 = TREE_OPERAND (arg0, 0);
4173 tree arg01 = TREE_OPERAND (arg0, 1);
4174 tree arg1_type = TREE_TYPE (arg1);
4175 tree tem;
4177 STRIP_NOPS (arg1);
4178 STRIP_NOPS (arg2);
4180 /* If we have A op 0 ? A : -A, consider applying the following
4181 transformations:
4183 A == 0? A : -A same as -A
4184 A != 0? A : -A same as A
4185 A >= 0? A : -A same as abs (A)
4186 A > 0? A : -A same as abs (A)
4187 A <= 0? A : -A same as -abs (A)
4188 A < 0? A : -A same as -abs (A)
4190 None of these transformations work for modes with signed
4191 zeros. If A is +/-0, the first two transformations will
4192 change the sign of the result (from +0 to -0, or vice
4193 versa). The last four will fix the sign of the result,
4194 even though the original expressions could be positive or
4195 negative, depending on the sign of A.
4197 Note that all these transformations are correct if A is
4198 NaN, since the two alternatives (A and -A) are also NaNs. */
4199 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4200 ? real_zerop (arg01)
4201 : integer_zerop (arg01))
4202 && ((TREE_CODE (arg2) == NEGATE_EXPR
4203 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4204 /* In the case that A is of the form X-Y, '-A' (arg2) may
4205 have already been folded to Y-X, check for that. */
4206 || (TREE_CODE (arg1) == MINUS_EXPR
4207 && TREE_CODE (arg2) == MINUS_EXPR
4208 && operand_equal_p (TREE_OPERAND (arg1, 0),
4209 TREE_OPERAND (arg2, 1), 0)
4210 && operand_equal_p (TREE_OPERAND (arg1, 1),
4211 TREE_OPERAND (arg2, 0), 0))))
4212 switch (comp_code)
4214 case EQ_EXPR:
4215 case UNEQ_EXPR:
4216 tem = fold_convert (arg1_type, arg1);
4217 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4218 case NE_EXPR:
4219 case LTGT_EXPR:
4220 return pedantic_non_lvalue (fold_convert (type, arg1));
4221 case UNGE_EXPR:
4222 case UNGT_EXPR:
4223 if (flag_trapping_math)
4224 break;
4225 /* Fall through. */
4226 case GE_EXPR:
4227 case GT_EXPR:
4228 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4229 arg1 = fold_convert (lang_hooks.types.signed_type
4230 (TREE_TYPE (arg1)), arg1);
4231 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4232 return pedantic_non_lvalue (fold_convert (type, tem));
4233 case UNLE_EXPR:
4234 case UNLT_EXPR:
4235 if (flag_trapping_math)
4236 break;
4237 case LE_EXPR:
4238 case LT_EXPR:
4239 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4240 arg1 = fold_convert (lang_hooks.types.signed_type
4241 (TREE_TYPE (arg1)), arg1);
4242 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4243 return negate_expr (fold_convert (type, tem));
4244 default:
4245 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4246 break;
4249 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4250 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4251 both transformations are correct when A is NaN: A != 0
4252 is then true, and A == 0 is false. */
4254 if (integer_zerop (arg01) && integer_zerop (arg2))
4256 if (comp_code == NE_EXPR)
4257 return pedantic_non_lvalue (fold_convert (type, arg1));
4258 else if (comp_code == EQ_EXPR)
4259 return fold_convert (type, integer_zero_node);
4262 /* Try some transformations of A op B ? A : B.
4264 A == B? A : B same as B
4265 A != B? A : B same as A
4266 A >= B? A : B same as max (A, B)
4267 A > B? A : B same as max (B, A)
4268 A <= B? A : B same as min (A, B)
4269 A < B? A : B same as min (B, A)
4271 As above, these transformations don't work in the presence
4272 of signed zeros. For example, if A and B are zeros of
4273 opposite sign, the first two transformations will change
4274 the sign of the result. In the last four, the original
4275 expressions give different results for (A=+0, B=-0) and
4276 (A=-0, B=+0), but the transformed expressions do not.
4278 The first two transformations are correct if either A or B
4279 is a NaN. In the first transformation, the condition will
4280 be false, and B will indeed be chosen. In the case of the
4281 second transformation, the condition A != B will be true,
4282 and A will be chosen.
4284 The conversions to max() and min() are not correct if B is
4285 a number and A is not. The conditions in the original
4286 expressions will be false, so all four give B. The min()
4287 and max() versions would give a NaN instead. */
4288 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4289 /* Avoid these transformations if the COND_EXPR may be used
4290 as an lvalue in the C++ front-end. PR c++/19199. */
4291 && (in_gimple_form
4292 || strcmp (lang_hooks.name, "GNU C++") != 0
4293 || ! maybe_lvalue_p (arg1)
4294 || ! maybe_lvalue_p (arg2)))
4296 tree comp_op0 = arg00;
4297 tree comp_op1 = arg01;
4298 tree comp_type = TREE_TYPE (comp_op0);
4300 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4301 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4303 comp_type = type;
4304 comp_op0 = arg1;
4305 comp_op1 = arg2;
4308 switch (comp_code)
4310 case EQ_EXPR:
4311 return pedantic_non_lvalue (fold_convert (type, arg2));
4312 case NE_EXPR:
4313 return pedantic_non_lvalue (fold_convert (type, arg1));
4314 case LE_EXPR:
4315 case LT_EXPR:
4316 case UNLE_EXPR:
4317 case UNLT_EXPR:
4318 /* In C++ a ?: expression can be an lvalue, so put the
4319 operand which will be used if they are equal first
4320 so that we can convert this back to the
4321 corresponding COND_EXPR. */
4322 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4324 comp_op0 = fold_convert (comp_type, comp_op0);
4325 comp_op1 = fold_convert (comp_type, comp_op1);
4326 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4327 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4328 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4329 return pedantic_non_lvalue (fold_convert (type, tem));
4331 break;
4332 case GE_EXPR:
4333 case GT_EXPR:
4334 case UNGE_EXPR:
4335 case UNGT_EXPR:
4336 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4338 comp_op0 = fold_convert (comp_type, comp_op0);
4339 comp_op1 = fold_convert (comp_type, comp_op1);
4340 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4341 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4342 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4343 return pedantic_non_lvalue (fold_convert (type, tem));
4345 break;
4346 case UNEQ_EXPR:
4347 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4348 return pedantic_non_lvalue (fold_convert (type, arg2));
4349 break;
4350 case LTGT_EXPR:
4351 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4352 return pedantic_non_lvalue (fold_convert (type, arg1));
4353 break;
4354 default:
4355 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4356 break;
4360 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4361 we might still be able to simplify this. For example,
4362 if C1 is one less or one more than C2, this might have started
4363 out as a MIN or MAX and been transformed by this function.
4364 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4366 if (INTEGRAL_TYPE_P (type)
4367 && TREE_CODE (arg01) == INTEGER_CST
4368 && TREE_CODE (arg2) == INTEGER_CST)
4369 switch (comp_code)
4371 case EQ_EXPR:
4372 /* We can replace A with C1 in this case. */
4373 arg1 = fold_convert (type, arg01);
4374 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4376 case LT_EXPR:
4377 /* If C1 is C2 + 1, this is min(A, C2). */
4378 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4379 OEP_ONLY_CONST)
4380 && operand_equal_p (arg01,
4381 const_binop (PLUS_EXPR, arg2,
4382 integer_one_node, 0),
4383 OEP_ONLY_CONST))
4384 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4385 type, arg1, arg2));
4386 break;
4388 case LE_EXPR:
4389 /* If C1 is C2 - 1, this is min(A, C2). */
4390 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4391 OEP_ONLY_CONST)
4392 && operand_equal_p (arg01,
4393 const_binop (MINUS_EXPR, arg2,
4394 integer_one_node, 0),
4395 OEP_ONLY_CONST))
4396 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4397 type, arg1, arg2));
4398 break;
4400 case GT_EXPR:
4401 /* If C1 is C2 - 1, this is max(A, C2). */
4402 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4403 OEP_ONLY_CONST)
4404 && operand_equal_p (arg01,
4405 const_binop (MINUS_EXPR, arg2,
4406 integer_one_node, 0),
4407 OEP_ONLY_CONST))
4408 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4409 type, arg1, arg2));
4410 break;
4412 case GE_EXPR:
4413 /* If C1 is C2 + 1, this is max(A, C2). */
4414 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4415 OEP_ONLY_CONST)
4416 && operand_equal_p (arg01,
4417 const_binop (PLUS_EXPR, arg2,
4418 integer_one_node, 0),
4419 OEP_ONLY_CONST))
4420 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4421 type, arg1, arg2));
4422 break;
4423 case NE_EXPR:
4424 break;
4425 default:
4426 gcc_unreachable ();
4429 return NULL_TREE;
4434 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4435 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4436 #endif
4438 /* EXP is some logical combination of boolean tests. See if we can
4439 merge it into some range test. Return the new tree if so. */
4441 static tree
4442 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4444 int or_op = (code == TRUTH_ORIF_EXPR
4445 || code == TRUTH_OR_EXPR);
4446 int in0_p, in1_p, in_p;
4447 tree low0, low1, low, high0, high1, high;
4448 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4449 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4450 tree tem;
4452 /* If this is an OR operation, invert both sides; we will invert
4453 again at the end. */
4454 if (or_op)
4455 in0_p = ! in0_p, in1_p = ! in1_p;
4457 /* If both expressions are the same, if we can merge the ranges, and we
4458 can build the range test, return it or it inverted. If one of the
4459 ranges is always true or always false, consider it to be the same
4460 expression as the other. */
4461 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4462 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4463 in1_p, low1, high1)
4464 && 0 != (tem = (build_range_check (type,
4465 lhs != 0 ? lhs
4466 : rhs != 0 ? rhs : integer_zero_node,
4467 in_p, low, high))))
4468 return or_op ? invert_truthvalue (tem) : tem;
4470 /* On machines where the branch cost is expensive, if this is a
4471 short-circuited branch and the underlying object on both sides
4472 is the same, make a non-short-circuit operation. */
4473 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4474 && lhs != 0 && rhs != 0
4475 && (code == TRUTH_ANDIF_EXPR
4476 || code == TRUTH_ORIF_EXPR)
4477 && operand_equal_p (lhs, rhs, 0))
4479 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4480 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4481 which cases we can't do this. */
4482 if (simple_operand_p (lhs))
4483 return build2 (code == TRUTH_ANDIF_EXPR
4484 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4485 type, op0, op1);
4487 else if (lang_hooks.decls.global_bindings_p () == 0
4488 && ! CONTAINS_PLACEHOLDER_P (lhs))
4490 tree common = save_expr (lhs);
4492 if (0 != (lhs = build_range_check (type, common,
4493 or_op ? ! in0_p : in0_p,
4494 low0, high0))
4495 && (0 != (rhs = build_range_check (type, common,
4496 or_op ? ! in1_p : in1_p,
4497 low1, high1))))
4498 return build2 (code == TRUTH_ANDIF_EXPR
4499 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4500 type, lhs, rhs);
4504 return 0;
4507 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4508 bit value. Arrange things so the extra bits will be set to zero if and
4509 only if C is signed-extended to its full width. If MASK is nonzero,
4510 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4512 static tree
4513 unextend (tree c, int p, int unsignedp, tree mask)
4515 tree type = TREE_TYPE (c);
4516 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4517 tree temp;
4519 if (p == modesize || unsignedp)
4520 return c;
4522 /* We work by getting just the sign bit into the low-order bit, then
4523 into the high-order bit, then sign-extend. We then XOR that value
4524 with C. */
4525 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4526 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4528 /* We must use a signed type in order to get an arithmetic right shift.
4529 However, we must also avoid introducing accidental overflows, so that
4530 a subsequent call to integer_zerop will work. Hence we must
4531 do the type conversion here. At this point, the constant is either
4532 zero or one, and the conversion to a signed type can never overflow.
4533 We could get an overflow if this conversion is done anywhere else. */
4534 if (TYPE_UNSIGNED (type))
4535 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4537 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4538 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4539 if (mask != 0)
4540 temp = const_binop (BIT_AND_EXPR, temp,
4541 fold_convert (TREE_TYPE (c), mask), 0);
4542 /* If necessary, convert the type back to match the type of C. */
4543 if (TYPE_UNSIGNED (type))
4544 temp = fold_convert (type, temp);
4546 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4549 /* Find ways of folding logical expressions of LHS and RHS:
4550 Try to merge two comparisons to the same innermost item.
4551 Look for range tests like "ch >= '0' && ch <= '9'".
4552 Look for combinations of simple terms on machines with expensive branches
4553 and evaluate the RHS unconditionally.
4555 For example, if we have p->a == 2 && p->b == 4 and we can make an
4556 object large enough to span both A and B, we can do this with a comparison
4557 against the object ANDed with the a mask.
4559 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4560 operations to do this with one comparison.
4562 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4563 function and the one above.
4565 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4566 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4568 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4569 two operands.
4571 We return the simplified tree or 0 if no optimization is possible. */
4573 static tree
4574 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4576 /* If this is the "or" of two comparisons, we can do something if
4577 the comparisons are NE_EXPR. If this is the "and", we can do something
4578 if the comparisons are EQ_EXPR. I.e.,
4579 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4581 WANTED_CODE is this operation code. For single bit fields, we can
4582 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4583 comparison for one-bit fields. */
4585 enum tree_code wanted_code;
4586 enum tree_code lcode, rcode;
4587 tree ll_arg, lr_arg, rl_arg, rr_arg;
4588 tree ll_inner, lr_inner, rl_inner, rr_inner;
4589 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4590 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4591 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4592 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4593 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4594 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4595 enum machine_mode lnmode, rnmode;
4596 tree ll_mask, lr_mask, rl_mask, rr_mask;
4597 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4598 tree l_const, r_const;
4599 tree lntype, rntype, result;
4600 int first_bit, end_bit;
4601 int volatilep;
4603 /* Start by getting the comparison codes. Fail if anything is volatile.
4604 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4605 it were surrounded with a NE_EXPR. */
4607 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4608 return 0;
4610 lcode = TREE_CODE (lhs);
4611 rcode = TREE_CODE (rhs);
4613 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4615 lhs = build2 (NE_EXPR, truth_type, lhs,
4616 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4617 lcode = NE_EXPR;
4620 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4622 rhs = build2 (NE_EXPR, truth_type, rhs,
4623 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4624 rcode = NE_EXPR;
4627 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4628 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4629 return 0;
4631 ll_arg = TREE_OPERAND (lhs, 0);
4632 lr_arg = TREE_OPERAND (lhs, 1);
4633 rl_arg = TREE_OPERAND (rhs, 0);
4634 rr_arg = TREE_OPERAND (rhs, 1);
4636 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4637 if (simple_operand_p (ll_arg)
4638 && simple_operand_p (lr_arg))
4640 tree result;
4641 if (operand_equal_p (ll_arg, rl_arg, 0)
4642 && operand_equal_p (lr_arg, rr_arg, 0))
4644 result = combine_comparisons (code, lcode, rcode,
4645 truth_type, ll_arg, lr_arg);
4646 if (result)
4647 return result;
4649 else if (operand_equal_p (ll_arg, rr_arg, 0)
4650 && operand_equal_p (lr_arg, rl_arg, 0))
4652 result = combine_comparisons (code, lcode,
4653 swap_tree_comparison (rcode),
4654 truth_type, ll_arg, lr_arg);
4655 if (result)
4656 return result;
4660 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4661 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4663 /* If the RHS can be evaluated unconditionally and its operands are
4664 simple, it wins to evaluate the RHS unconditionally on machines
4665 with expensive branches. In this case, this isn't a comparison
4666 that can be merged. Avoid doing this if the RHS is a floating-point
4667 comparison since those can trap. */
4669 if (BRANCH_COST >= 2
4670 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4671 && simple_operand_p (rl_arg)
4672 && simple_operand_p (rr_arg))
4674 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4675 if (code == TRUTH_OR_EXPR
4676 && lcode == NE_EXPR && integer_zerop (lr_arg)
4677 && rcode == NE_EXPR && integer_zerop (rr_arg)
4678 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4679 return build2 (NE_EXPR, truth_type,
4680 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4681 ll_arg, rl_arg),
4682 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4684 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4685 if (code == TRUTH_AND_EXPR
4686 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4687 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4688 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4689 return build2 (EQ_EXPR, truth_type,
4690 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4691 ll_arg, rl_arg),
4692 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4694 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4695 return build2 (code, truth_type, lhs, rhs);
4698 /* See if the comparisons can be merged. Then get all the parameters for
4699 each side. */
4701 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4702 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4703 return 0;
4705 volatilep = 0;
4706 ll_inner = decode_field_reference (ll_arg,
4707 &ll_bitsize, &ll_bitpos, &ll_mode,
4708 &ll_unsignedp, &volatilep, &ll_mask,
4709 &ll_and_mask);
4710 lr_inner = decode_field_reference (lr_arg,
4711 &lr_bitsize, &lr_bitpos, &lr_mode,
4712 &lr_unsignedp, &volatilep, &lr_mask,
4713 &lr_and_mask);
4714 rl_inner = decode_field_reference (rl_arg,
4715 &rl_bitsize, &rl_bitpos, &rl_mode,
4716 &rl_unsignedp, &volatilep, &rl_mask,
4717 &rl_and_mask);
4718 rr_inner = decode_field_reference (rr_arg,
4719 &rr_bitsize, &rr_bitpos, &rr_mode,
4720 &rr_unsignedp, &volatilep, &rr_mask,
4721 &rr_and_mask);
4723 /* It must be true that the inner operation on the lhs of each
4724 comparison must be the same if we are to be able to do anything.
4725 Then see if we have constants. If not, the same must be true for
4726 the rhs's. */
4727 if (volatilep || ll_inner == 0 || rl_inner == 0
4728 || ! operand_equal_p (ll_inner, rl_inner, 0))
4729 return 0;
4731 if (TREE_CODE (lr_arg) == INTEGER_CST
4732 && TREE_CODE (rr_arg) == INTEGER_CST)
4733 l_const = lr_arg, r_const = rr_arg;
4734 else if (lr_inner == 0 || rr_inner == 0
4735 || ! operand_equal_p (lr_inner, rr_inner, 0))
4736 return 0;
4737 else
4738 l_const = r_const = 0;
4740 /* If either comparison code is not correct for our logical operation,
4741 fail. However, we can convert a one-bit comparison against zero into
4742 the opposite comparison against that bit being set in the field. */
4744 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4745 if (lcode != wanted_code)
4747 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4749 /* Make the left operand unsigned, since we are only interested
4750 in the value of one bit. Otherwise we are doing the wrong
4751 thing below. */
4752 ll_unsignedp = 1;
4753 l_const = ll_mask;
4755 else
4756 return 0;
4759 /* This is analogous to the code for l_const above. */
4760 if (rcode != wanted_code)
4762 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4764 rl_unsignedp = 1;
4765 r_const = rl_mask;
4767 else
4768 return 0;
4771 /* After this point all optimizations will generate bit-field
4772 references, which we might not want. */
4773 if (! lang_hooks.can_use_bit_fields_p ())
4774 return 0;
4776 /* See if we can find a mode that contains both fields being compared on
4777 the left. If we can't, fail. Otherwise, update all constants and masks
4778 to be relative to a field of that size. */
4779 first_bit = MIN (ll_bitpos, rl_bitpos);
4780 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4781 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4782 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4783 volatilep);
4784 if (lnmode == VOIDmode)
4785 return 0;
4787 lnbitsize = GET_MODE_BITSIZE (lnmode);
4788 lnbitpos = first_bit & ~ (lnbitsize - 1);
4789 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4790 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4792 if (BYTES_BIG_ENDIAN)
4794 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4795 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4798 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4799 size_int (xll_bitpos), 0);
4800 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4801 size_int (xrl_bitpos), 0);
4803 if (l_const)
4805 l_const = fold_convert (lntype, l_const);
4806 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4807 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4808 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4809 fold_build1 (BIT_NOT_EXPR,
4810 lntype, ll_mask),
4811 0)))
4813 warning ("comparison is always %d", wanted_code == NE_EXPR);
4815 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4818 if (r_const)
4820 r_const = fold_convert (lntype, r_const);
4821 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4822 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4823 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4824 fold_build1 (BIT_NOT_EXPR,
4825 lntype, rl_mask),
4826 0)))
4828 warning ("comparison is always %d", wanted_code == NE_EXPR);
4830 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4834 /* If the right sides are not constant, do the same for it. Also,
4835 disallow this optimization if a size or signedness mismatch occurs
4836 between the left and right sides. */
4837 if (l_const == 0)
4839 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4840 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4841 /* Make sure the two fields on the right
4842 correspond to the left without being swapped. */
4843 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4844 return 0;
4846 first_bit = MIN (lr_bitpos, rr_bitpos);
4847 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4848 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4849 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4850 volatilep);
4851 if (rnmode == VOIDmode)
4852 return 0;
4854 rnbitsize = GET_MODE_BITSIZE (rnmode);
4855 rnbitpos = first_bit & ~ (rnbitsize - 1);
4856 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4857 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4859 if (BYTES_BIG_ENDIAN)
4861 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4862 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4865 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4866 size_int (xlr_bitpos), 0);
4867 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4868 size_int (xrr_bitpos), 0);
4870 /* Make a mask that corresponds to both fields being compared.
4871 Do this for both items being compared. If the operands are the
4872 same size and the bits being compared are in the same position
4873 then we can do this by masking both and comparing the masked
4874 results. */
4875 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4876 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4877 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4879 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4880 ll_unsignedp || rl_unsignedp);
4881 if (! all_ones_mask_p (ll_mask, lnbitsize))
4882 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4884 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4885 lr_unsignedp || rr_unsignedp);
4886 if (! all_ones_mask_p (lr_mask, rnbitsize))
4887 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4889 return build2 (wanted_code, truth_type, lhs, rhs);
4892 /* There is still another way we can do something: If both pairs of
4893 fields being compared are adjacent, we may be able to make a wider
4894 field containing them both.
4896 Note that we still must mask the lhs/rhs expressions. Furthermore,
4897 the mask must be shifted to account for the shift done by
4898 make_bit_field_ref. */
4899 if ((ll_bitsize + ll_bitpos == rl_bitpos
4900 && lr_bitsize + lr_bitpos == rr_bitpos)
4901 || (ll_bitpos == rl_bitpos + rl_bitsize
4902 && lr_bitpos == rr_bitpos + rr_bitsize))
4904 tree type;
4906 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4907 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4908 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4909 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4911 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4912 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4913 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4914 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4916 /* Convert to the smaller type before masking out unwanted bits. */
4917 type = lntype;
4918 if (lntype != rntype)
4920 if (lnbitsize > rnbitsize)
4922 lhs = fold_convert (rntype, lhs);
4923 ll_mask = fold_convert (rntype, ll_mask);
4924 type = rntype;
4926 else if (lnbitsize < rnbitsize)
4928 rhs = fold_convert (lntype, rhs);
4929 lr_mask = fold_convert (lntype, lr_mask);
4930 type = lntype;
4934 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4935 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4937 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4938 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4940 return build2 (wanted_code, truth_type, lhs, rhs);
4943 return 0;
4946 /* Handle the case of comparisons with constants. If there is something in
4947 common between the masks, those bits of the constants must be the same.
4948 If not, the condition is always false. Test for this to avoid generating
4949 incorrect code below. */
4950 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4951 if (! integer_zerop (result)
4952 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4953 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4955 if (wanted_code == NE_EXPR)
4957 warning ("%<or%> of unmatched not-equal tests is always 1");
4958 return constant_boolean_node (true, truth_type);
4960 else
4962 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4963 return constant_boolean_node (false, truth_type);
4967 /* Construct the expression we will return. First get the component
4968 reference we will make. Unless the mask is all ones the width of
4969 that field, perform the mask operation. Then compare with the
4970 merged constant. */
4971 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4972 ll_unsignedp || rl_unsignedp);
4974 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4975 if (! all_ones_mask_p (ll_mask, lnbitsize))
4976 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4978 return build2 (wanted_code, truth_type, result,
4979 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4982 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4983 constant. */
4985 static tree
4986 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
4988 tree arg0 = op0;
4989 enum tree_code op_code;
4990 tree comp_const = op1;
4991 tree minmax_const;
4992 int consts_equal, consts_lt;
4993 tree inner;
4995 STRIP_SIGN_NOPS (arg0);
4997 op_code = TREE_CODE (arg0);
4998 minmax_const = TREE_OPERAND (arg0, 1);
4999 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5000 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5001 inner = TREE_OPERAND (arg0, 0);
5003 /* If something does not permit us to optimize, return the original tree. */
5004 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5005 || TREE_CODE (comp_const) != INTEGER_CST
5006 || TREE_CONSTANT_OVERFLOW (comp_const)
5007 || TREE_CODE (minmax_const) != INTEGER_CST
5008 || TREE_CONSTANT_OVERFLOW (minmax_const))
5009 return NULL_TREE;
5011 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5012 and GT_EXPR, doing the rest with recursive calls using logical
5013 simplifications. */
5014 switch (code)
5016 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5018 /* FIXME: We should be able to invert code without building a
5019 scratch tree node, but doing so would require us to
5020 duplicate a part of invert_truthvalue here. */
5021 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5022 tem = optimize_minmax_comparison (TREE_CODE (tem),
5023 TREE_TYPE (tem),
5024 TREE_OPERAND (tem, 0),
5025 TREE_OPERAND (tem, 1));
5026 return invert_truthvalue (tem);
5029 case GE_EXPR:
5030 return
5031 fold_build2 (TRUTH_ORIF_EXPR, type,
5032 optimize_minmax_comparison
5033 (EQ_EXPR, type, arg0, comp_const),
5034 optimize_minmax_comparison
5035 (GT_EXPR, type, arg0, comp_const));
5037 case EQ_EXPR:
5038 if (op_code == MAX_EXPR && consts_equal)
5039 /* MAX (X, 0) == 0 -> X <= 0 */
5040 return fold_build2 (LE_EXPR, type, inner, comp_const);
5042 else if (op_code == MAX_EXPR && consts_lt)
5043 /* MAX (X, 0) == 5 -> X == 5 */
5044 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5046 else if (op_code == MAX_EXPR)
5047 /* MAX (X, 0) == -1 -> false */
5048 return omit_one_operand (type, integer_zero_node, inner);
5050 else if (consts_equal)
5051 /* MIN (X, 0) == 0 -> X >= 0 */
5052 return fold_build2 (GE_EXPR, type, inner, comp_const);
5054 else if (consts_lt)
5055 /* MIN (X, 0) == 5 -> false */
5056 return omit_one_operand (type, integer_zero_node, inner);
5058 else
5059 /* MIN (X, 0) == -1 -> X == -1 */
5060 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5062 case GT_EXPR:
5063 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5064 /* MAX (X, 0) > 0 -> X > 0
5065 MAX (X, 0) > 5 -> X > 5 */
5066 return fold_build2 (GT_EXPR, type, inner, comp_const);
5068 else if (op_code == MAX_EXPR)
5069 /* MAX (X, 0) > -1 -> true */
5070 return omit_one_operand (type, integer_one_node, inner);
5072 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5073 /* MIN (X, 0) > 0 -> false
5074 MIN (X, 0) > 5 -> false */
5075 return omit_one_operand (type, integer_zero_node, inner);
5077 else
5078 /* MIN (X, 0) > -1 -> X > -1 */
5079 return fold_build2 (GT_EXPR, type, inner, comp_const);
5081 default:
5082 return NULL_TREE;
5086 /* T is an integer expression that is being multiplied, divided, or taken a
5087 modulus (CODE says which and what kind of divide or modulus) by a
5088 constant C. See if we can eliminate that operation by folding it with
5089 other operations already in T. WIDE_TYPE, if non-null, is a type that
5090 should be used for the computation if wider than our type.
5092 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5093 (X * 2) + (Y * 4). We must, however, be assured that either the original
5094 expression would not overflow or that overflow is undefined for the type
5095 in the language in question.
5097 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5098 the machine has a multiply-accumulate insn or that this is part of an
5099 addressing calculation.
5101 If we return a non-null expression, it is an equivalent form of the
5102 original computation, but need not be in the original type. */
5104 static tree
5105 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5107 /* To avoid exponential search depth, refuse to allow recursion past
5108 three levels. Beyond that (1) it's highly unlikely that we'll find
5109 something interesting and (2) we've probably processed it before
5110 when we built the inner expression. */
5112 static int depth;
5113 tree ret;
5115 if (depth > 3)
5116 return NULL;
5118 depth++;
5119 ret = extract_muldiv_1 (t, c, code, wide_type);
5120 depth--;
5122 return ret;
5125 static tree
5126 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5128 tree type = TREE_TYPE (t);
5129 enum tree_code tcode = TREE_CODE (t);
5130 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5131 > GET_MODE_SIZE (TYPE_MODE (type)))
5132 ? wide_type : type);
5133 tree t1, t2;
5134 int same_p = tcode == code;
5135 tree op0 = NULL_TREE, op1 = NULL_TREE;
5137 /* Don't deal with constants of zero here; they confuse the code below. */
5138 if (integer_zerop (c))
5139 return NULL_TREE;
5141 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5142 op0 = TREE_OPERAND (t, 0);
5144 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5145 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5147 /* Note that we need not handle conditional operations here since fold
5148 already handles those cases. So just do arithmetic here. */
5149 switch (tcode)
5151 case INTEGER_CST:
5152 /* For a constant, we can always simplify if we are a multiply
5153 or (for divide and modulus) if it is a multiple of our constant. */
5154 if (code == MULT_EXPR
5155 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5156 return const_binop (code, fold_convert (ctype, t),
5157 fold_convert (ctype, c), 0);
5158 break;
5160 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5161 /* If op0 is an expression ... */
5162 if ((COMPARISON_CLASS_P (op0)
5163 || UNARY_CLASS_P (op0)
5164 || BINARY_CLASS_P (op0)
5165 || EXPRESSION_CLASS_P (op0))
5166 /* ... and is unsigned, and its type is smaller than ctype,
5167 then we cannot pass through as widening. */
5168 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5169 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5170 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5171 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5172 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5173 /* ... or this is a truncation (t is narrower than op0),
5174 then we cannot pass through this narrowing. */
5175 || (GET_MODE_SIZE (TYPE_MODE (type))
5176 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5177 /* ... or signedness changes for division or modulus,
5178 then we cannot pass through this conversion. */
5179 || (code != MULT_EXPR
5180 && (TYPE_UNSIGNED (ctype)
5181 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5182 break;
5184 /* Pass the constant down and see if we can make a simplification. If
5185 we can, replace this expression with the inner simplification for
5186 possible later conversion to our or some other type. */
5187 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5188 && TREE_CODE (t2) == INTEGER_CST
5189 && ! TREE_CONSTANT_OVERFLOW (t2)
5190 && (0 != (t1 = extract_muldiv (op0, t2, code,
5191 code == MULT_EXPR
5192 ? ctype : NULL_TREE))))
5193 return t1;
5194 break;
5196 case ABS_EXPR:
5197 /* If widening the type changes it from signed to unsigned, then we
5198 must avoid building ABS_EXPR itself as unsigned. */
5199 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5201 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5202 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5204 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5205 return fold_convert (ctype, t1);
5207 break;
5209 /* FALLTHROUGH */
5210 case NEGATE_EXPR:
5211 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5212 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5213 break;
5215 case MIN_EXPR: case MAX_EXPR:
5216 /* If widening the type changes the signedness, then we can't perform
5217 this optimization as that changes the result. */
5218 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5219 break;
5221 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5222 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5223 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5225 if (tree_int_cst_sgn (c) < 0)
5226 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5228 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5229 fold_convert (ctype, t2));
5231 break;
5233 case LSHIFT_EXPR: case RSHIFT_EXPR:
5234 /* If the second operand is constant, this is a multiplication
5235 or floor division, by a power of two, so we can treat it that
5236 way unless the multiplier or divisor overflows. Signed
5237 left-shift overflow is implementation-defined rather than
5238 undefined in C90, so do not convert signed left shift into
5239 multiplication. */
5240 if (TREE_CODE (op1) == INTEGER_CST
5241 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5242 /* const_binop may not detect overflow correctly,
5243 so check for it explicitly here. */
5244 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5245 && TREE_INT_CST_HIGH (op1) == 0
5246 && 0 != (t1 = fold_convert (ctype,
5247 const_binop (LSHIFT_EXPR,
5248 size_one_node,
5249 op1, 0)))
5250 && ! TREE_OVERFLOW (t1))
5251 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5252 ? MULT_EXPR : FLOOR_DIV_EXPR,
5253 ctype, fold_convert (ctype, op0), t1),
5254 c, code, wide_type);
5255 break;
5257 case PLUS_EXPR: case MINUS_EXPR:
5258 /* See if we can eliminate the operation on both sides. If we can, we
5259 can return a new PLUS or MINUS. If we can't, the only remaining
5260 cases where we can do anything are if the second operand is a
5261 constant. */
5262 t1 = extract_muldiv (op0, c, code, wide_type);
5263 t2 = extract_muldiv (op1, c, code, wide_type);
5264 if (t1 != 0 && t2 != 0
5265 && (code == MULT_EXPR
5266 /* If not multiplication, we can only do this if both operands
5267 are divisible by c. */
5268 || (multiple_of_p (ctype, op0, c)
5269 && multiple_of_p (ctype, op1, c))))
5270 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5271 fold_convert (ctype, t2));
5273 /* If this was a subtraction, negate OP1 and set it to be an addition.
5274 This simplifies the logic below. */
5275 if (tcode == MINUS_EXPR)
5276 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5278 if (TREE_CODE (op1) != INTEGER_CST)
5279 break;
5281 /* If either OP1 or C are negative, this optimization is not safe for
5282 some of the division and remainder types while for others we need
5283 to change the code. */
5284 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5286 if (code == CEIL_DIV_EXPR)
5287 code = FLOOR_DIV_EXPR;
5288 else if (code == FLOOR_DIV_EXPR)
5289 code = CEIL_DIV_EXPR;
5290 else if (code != MULT_EXPR
5291 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5292 break;
5295 /* If it's a multiply or a division/modulus operation of a multiple
5296 of our constant, do the operation and verify it doesn't overflow. */
5297 if (code == MULT_EXPR
5298 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5300 op1 = const_binop (code, fold_convert (ctype, op1),
5301 fold_convert (ctype, c), 0);
5302 /* We allow the constant to overflow with wrapping semantics. */
5303 if (op1 == 0
5304 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5305 break;
5307 else
5308 break;
5310 /* If we have an unsigned type is not a sizetype, we cannot widen
5311 the operation since it will change the result if the original
5312 computation overflowed. */
5313 if (TYPE_UNSIGNED (ctype)
5314 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5315 && ctype != type)
5316 break;
5318 /* If we were able to eliminate our operation from the first side,
5319 apply our operation to the second side and reform the PLUS. */
5320 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5321 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5323 /* The last case is if we are a multiply. In that case, we can
5324 apply the distributive law to commute the multiply and addition
5325 if the multiplication of the constants doesn't overflow. */
5326 if (code == MULT_EXPR)
5327 return fold_build2 (tcode, ctype,
5328 fold_build2 (code, ctype,
5329 fold_convert (ctype, op0),
5330 fold_convert (ctype, c)),
5331 op1);
5333 break;
5335 case MULT_EXPR:
5336 /* We have a special case here if we are doing something like
5337 (C * 8) % 4 since we know that's zero. */
5338 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5339 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5340 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5341 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5342 return omit_one_operand (type, integer_zero_node, op0);
5344 /* ... fall through ... */
5346 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5347 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5348 /* If we can extract our operation from the LHS, do so and return a
5349 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5350 do something only if the second operand is a constant. */
5351 if (same_p
5352 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5353 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5354 fold_convert (ctype, op1));
5355 else if (tcode == MULT_EXPR && code == MULT_EXPR
5356 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5357 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5358 fold_convert (ctype, t1));
5359 else if (TREE_CODE (op1) != INTEGER_CST)
5360 return 0;
5362 /* If these are the same operation types, we can associate them
5363 assuming no overflow. */
5364 if (tcode == code
5365 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5366 fold_convert (ctype, c), 0))
5367 && ! TREE_OVERFLOW (t1))
5368 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5370 /* If these operations "cancel" each other, we have the main
5371 optimizations of this pass, which occur when either constant is a
5372 multiple of the other, in which case we replace this with either an
5373 operation or CODE or TCODE.
5375 If we have an unsigned type that is not a sizetype, we cannot do
5376 this since it will change the result if the original computation
5377 overflowed. */
5378 if ((! TYPE_UNSIGNED (ctype)
5379 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5380 && ! flag_wrapv
5381 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5382 || (tcode == MULT_EXPR
5383 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5384 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5386 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5387 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5388 fold_convert (ctype,
5389 const_binop (TRUNC_DIV_EXPR,
5390 op1, c, 0)));
5391 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5392 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5393 fold_convert (ctype,
5394 const_binop (TRUNC_DIV_EXPR,
5395 c, op1, 0)));
5397 break;
5399 default:
5400 break;
5403 return 0;
5406 /* Return a node which has the indicated constant VALUE (either 0 or
5407 1), and is of the indicated TYPE. */
5409 tree
5410 constant_boolean_node (int value, tree type)
5412 if (type == integer_type_node)
5413 return value ? integer_one_node : integer_zero_node;
5414 else if (type == boolean_type_node)
5415 return value ? boolean_true_node : boolean_false_node;
5416 else
5417 return build_int_cst (type, value);
5421 /* Return true if expr looks like an ARRAY_REF and set base and
5422 offset to the appropriate trees. If there is no offset,
5423 offset is set to NULL_TREE. */
5425 static bool
5426 extract_array_ref (tree expr, tree *base, tree *offset)
5428 /* We have to be careful with stripping nops as with the
5429 base type the meaning of the offset can change. */
5430 tree inner_expr = expr;
5431 STRIP_NOPS (inner_expr);
5432 /* One canonical form is a PLUS_EXPR with the first
5433 argument being an ADDR_EXPR with a possible NOP_EXPR
5434 attached. */
5435 if (TREE_CODE (expr) == PLUS_EXPR)
5437 tree op0 = TREE_OPERAND (expr, 0);
5438 STRIP_NOPS (op0);
5439 if (TREE_CODE (op0) == ADDR_EXPR)
5441 *base = TREE_OPERAND (expr, 0);
5442 *offset = TREE_OPERAND (expr, 1);
5443 return true;
5446 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5447 which we transform into an ADDR_EXPR with appropriate
5448 offset. For other arguments to the ADDR_EXPR we assume
5449 zero offset and as such do not care about the ADDR_EXPR
5450 type and strip possible nops from it. */
5451 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5453 tree op0 = TREE_OPERAND (inner_expr, 0);
5454 if (TREE_CODE (op0) == ARRAY_REF)
5456 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5457 *offset = TREE_OPERAND (op0, 1);
5459 else
5461 *base = inner_expr;
5462 *offset = NULL_TREE;
5464 return true;
5467 return false;
5471 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5472 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5473 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5474 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5475 COND is the first argument to CODE; otherwise (as in the example
5476 given here), it is the second argument. TYPE is the type of the
5477 original expression. Return NULL_TREE if no simplification is
5478 possible. */
5480 static tree
5481 fold_binary_op_with_conditional_arg (enum tree_code code,
5482 tree type, tree op0, tree op1,
5483 tree cond, tree arg, int cond_first_p)
5485 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5486 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5487 tree test, true_value, false_value;
5488 tree lhs = NULL_TREE;
5489 tree rhs = NULL_TREE;
5491 /* This transformation is only worthwhile if we don't have to wrap
5492 arg in a SAVE_EXPR, and the operation can be simplified on at least
5493 one of the branches once its pushed inside the COND_EXPR. */
5494 if (!TREE_CONSTANT (arg))
5495 return NULL_TREE;
5497 if (TREE_CODE (cond) == COND_EXPR)
5499 test = TREE_OPERAND (cond, 0);
5500 true_value = TREE_OPERAND (cond, 1);
5501 false_value = TREE_OPERAND (cond, 2);
5502 /* If this operand throws an expression, then it does not make
5503 sense to try to perform a logical or arithmetic operation
5504 involving it. */
5505 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5506 lhs = true_value;
5507 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5508 rhs = false_value;
5510 else
5512 tree testtype = TREE_TYPE (cond);
5513 test = cond;
5514 true_value = constant_boolean_node (true, testtype);
5515 false_value = constant_boolean_node (false, testtype);
5518 arg = fold_convert (arg_type, arg);
5519 if (lhs == 0)
5521 true_value = fold_convert (cond_type, true_value);
5522 if (cond_first_p)
5523 lhs = fold_build2 (code, type, true_value, arg);
5524 else
5525 lhs = fold_build2 (code, type, arg, true_value);
5527 if (rhs == 0)
5529 false_value = fold_convert (cond_type, false_value);
5530 if (cond_first_p)
5531 rhs = fold_build2 (code, type, false_value, arg);
5532 else
5533 rhs = fold_build2 (code, type, arg, false_value);
5536 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5537 return fold_convert (type, test);
5541 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5543 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5544 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5545 ADDEND is the same as X.
5547 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5548 and finite. The problematic cases are when X is zero, and its mode
5549 has signed zeros. In the case of rounding towards -infinity,
5550 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5551 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5553 static bool
5554 fold_real_zero_addition_p (tree type, tree addend, int negate)
5556 if (!real_zerop (addend))
5557 return false;
5559 /* Don't allow the fold with -fsignaling-nans. */
5560 if (HONOR_SNANS (TYPE_MODE (type)))
5561 return false;
5563 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5564 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5565 return true;
5567 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5568 if (TREE_CODE (addend) == REAL_CST
5569 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5570 negate = !negate;
5572 /* The mode has signed zeros, and we have to honor their sign.
5573 In this situation, there is only one case we can return true for.
5574 X - 0 is the same as X unless rounding towards -infinity is
5575 supported. */
5576 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5579 /* Subroutine of fold() that checks comparisons of built-in math
5580 functions against real constants.
5582 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5583 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5584 is the type of the result and ARG0 and ARG1 are the operands of the
5585 comparison. ARG1 must be a TREE_REAL_CST.
5587 The function returns the constant folded tree if a simplification
5588 can be made, and NULL_TREE otherwise. */
5590 static tree
5591 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5592 tree type, tree arg0, tree arg1)
5594 REAL_VALUE_TYPE c;
5596 if (BUILTIN_SQRT_P (fcode))
5598 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5599 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5601 c = TREE_REAL_CST (arg1);
5602 if (REAL_VALUE_NEGATIVE (c))
5604 /* sqrt(x) < y is always false, if y is negative. */
5605 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5606 return omit_one_operand (type, integer_zero_node, arg);
5608 /* sqrt(x) > y is always true, if y is negative and we
5609 don't care about NaNs, i.e. negative values of x. */
5610 if (code == NE_EXPR || !HONOR_NANS (mode))
5611 return omit_one_operand (type, integer_one_node, arg);
5613 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5614 return fold_build2 (GE_EXPR, type, arg,
5615 build_real (TREE_TYPE (arg), dconst0));
5617 else if (code == GT_EXPR || code == GE_EXPR)
5619 REAL_VALUE_TYPE c2;
5621 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5622 real_convert (&c2, mode, &c2);
5624 if (REAL_VALUE_ISINF (c2))
5626 /* sqrt(x) > y is x == +Inf, when y is very large. */
5627 if (HONOR_INFINITIES (mode))
5628 return fold_build2 (EQ_EXPR, type, arg,
5629 build_real (TREE_TYPE (arg), c2));
5631 /* sqrt(x) > y is always false, when y is very large
5632 and we don't care about infinities. */
5633 return omit_one_operand (type, integer_zero_node, arg);
5636 /* sqrt(x) > c is the same as x > c*c. */
5637 return fold_build2 (code, type, arg,
5638 build_real (TREE_TYPE (arg), c2));
5640 else if (code == LT_EXPR || code == LE_EXPR)
5642 REAL_VALUE_TYPE c2;
5644 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5645 real_convert (&c2, mode, &c2);
5647 if (REAL_VALUE_ISINF (c2))
5649 /* sqrt(x) < y is always true, when y is a very large
5650 value and we don't care about NaNs or Infinities. */
5651 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5652 return omit_one_operand (type, integer_one_node, arg);
5654 /* sqrt(x) < y is x != +Inf when y is very large and we
5655 don't care about NaNs. */
5656 if (! HONOR_NANS (mode))
5657 return fold_build2 (NE_EXPR, type, arg,
5658 build_real (TREE_TYPE (arg), c2));
5660 /* sqrt(x) < y is x >= 0 when y is very large and we
5661 don't care about Infinities. */
5662 if (! HONOR_INFINITIES (mode))
5663 return fold_build2 (GE_EXPR, type, arg,
5664 build_real (TREE_TYPE (arg), dconst0));
5666 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5667 if (lang_hooks.decls.global_bindings_p () != 0
5668 || CONTAINS_PLACEHOLDER_P (arg))
5669 return NULL_TREE;
5671 arg = save_expr (arg);
5672 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5673 fold_build2 (GE_EXPR, type, arg,
5674 build_real (TREE_TYPE (arg),
5675 dconst0)),
5676 fold_build2 (NE_EXPR, type, arg,
5677 build_real (TREE_TYPE (arg),
5678 c2)));
5681 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5682 if (! HONOR_NANS (mode))
5683 return fold_build2 (code, type, arg,
5684 build_real (TREE_TYPE (arg), c2));
5686 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5687 if (lang_hooks.decls.global_bindings_p () == 0
5688 && ! CONTAINS_PLACEHOLDER_P (arg))
5690 arg = save_expr (arg);
5691 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5692 fold_build2 (GE_EXPR, type, arg,
5693 build_real (TREE_TYPE (arg),
5694 dconst0)),
5695 fold_build2 (code, type, arg,
5696 build_real (TREE_TYPE (arg),
5697 c2)));
5702 return NULL_TREE;
5705 /* Subroutine of fold() that optimizes comparisons against Infinities,
5706 either +Inf or -Inf.
5708 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5709 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5710 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5712 The function returns the constant folded tree if a simplification
5713 can be made, and NULL_TREE otherwise. */
5715 static tree
5716 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5718 enum machine_mode mode;
5719 REAL_VALUE_TYPE max;
5720 tree temp;
5721 bool neg;
5723 mode = TYPE_MODE (TREE_TYPE (arg0));
5725 /* For negative infinity swap the sense of the comparison. */
5726 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5727 if (neg)
5728 code = swap_tree_comparison (code);
5730 switch (code)
5732 case GT_EXPR:
5733 /* x > +Inf is always false, if with ignore sNANs. */
5734 if (HONOR_SNANS (mode))
5735 return NULL_TREE;
5736 return omit_one_operand (type, integer_zero_node, arg0);
5738 case LE_EXPR:
5739 /* x <= +Inf is always true, if we don't case about NaNs. */
5740 if (! HONOR_NANS (mode))
5741 return omit_one_operand (type, integer_one_node, arg0);
5743 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5744 if (lang_hooks.decls.global_bindings_p () == 0
5745 && ! CONTAINS_PLACEHOLDER_P (arg0))
5747 arg0 = save_expr (arg0);
5748 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5750 break;
5752 case EQ_EXPR:
5753 case GE_EXPR:
5754 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5755 real_maxval (&max, neg, mode);
5756 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5757 arg0, build_real (TREE_TYPE (arg0), max));
5759 case LT_EXPR:
5760 /* x < +Inf is always equal to x <= DBL_MAX. */
5761 real_maxval (&max, neg, mode);
5762 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5763 arg0, build_real (TREE_TYPE (arg0), max));
5765 case NE_EXPR:
5766 /* x != +Inf is always equal to !(x > DBL_MAX). */
5767 real_maxval (&max, neg, mode);
5768 if (! HONOR_NANS (mode))
5769 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5770 arg0, build_real (TREE_TYPE (arg0), max));
5772 /* The transformation below creates non-gimple code and thus is
5773 not appropriate if we are in gimple form. */
5774 if (in_gimple_form)
5775 return NULL_TREE;
5777 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5778 arg0, build_real (TREE_TYPE (arg0), max));
5779 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5781 default:
5782 break;
5785 return NULL_TREE;
5788 /* Subroutine of fold() that optimizes comparisons of a division by
5789 a nonzero integer constant against an integer constant, i.e.
5790 X/C1 op C2.
5792 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5793 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5794 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5796 The function returns the constant folded tree if a simplification
5797 can be made, and NULL_TREE otherwise. */
5799 static tree
5800 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5802 tree prod, tmp, hi, lo;
5803 tree arg00 = TREE_OPERAND (arg0, 0);
5804 tree arg01 = TREE_OPERAND (arg0, 1);
5805 unsigned HOST_WIDE_INT lpart;
5806 HOST_WIDE_INT hpart;
5807 int overflow;
5809 /* We have to do this the hard way to detect unsigned overflow.
5810 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5811 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5812 TREE_INT_CST_HIGH (arg01),
5813 TREE_INT_CST_LOW (arg1),
5814 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5815 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5816 prod = force_fit_type (prod, -1, overflow, false);
5818 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5820 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5821 lo = prod;
5823 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5824 overflow = add_double (TREE_INT_CST_LOW (prod),
5825 TREE_INT_CST_HIGH (prod),
5826 TREE_INT_CST_LOW (tmp),
5827 TREE_INT_CST_HIGH (tmp),
5828 &lpart, &hpart);
5829 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5830 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5831 TREE_CONSTANT_OVERFLOW (prod));
5833 else if (tree_int_cst_sgn (arg01) >= 0)
5835 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5836 switch (tree_int_cst_sgn (arg1))
5838 case -1:
5839 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5840 hi = prod;
5841 break;
5843 case 0:
5844 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5845 hi = tmp;
5846 break;
5848 case 1:
5849 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5850 lo = prod;
5851 break;
5853 default:
5854 gcc_unreachable ();
5857 else
5859 /* A negative divisor reverses the relational operators. */
5860 code = swap_tree_comparison (code);
5862 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5863 switch (tree_int_cst_sgn (arg1))
5865 case -1:
5866 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5867 lo = prod;
5868 break;
5870 case 0:
5871 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5872 lo = tmp;
5873 break;
5875 case 1:
5876 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5877 hi = prod;
5878 break;
5880 default:
5881 gcc_unreachable ();
5885 switch (code)
5887 case EQ_EXPR:
5888 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5889 return omit_one_operand (type, integer_zero_node, arg00);
5890 if (TREE_OVERFLOW (hi))
5891 return fold_build2 (GE_EXPR, type, arg00, lo);
5892 if (TREE_OVERFLOW (lo))
5893 return fold_build2 (LE_EXPR, type, arg00, hi);
5894 return build_range_check (type, arg00, 1, lo, hi);
5896 case NE_EXPR:
5897 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5898 return omit_one_operand (type, integer_one_node, arg00);
5899 if (TREE_OVERFLOW (hi))
5900 return fold_build2 (LT_EXPR, type, arg00, lo);
5901 if (TREE_OVERFLOW (lo))
5902 return fold_build2 (GT_EXPR, type, arg00, hi);
5903 return build_range_check (type, arg00, 0, lo, hi);
5905 case LT_EXPR:
5906 if (TREE_OVERFLOW (lo))
5907 return omit_one_operand (type, integer_zero_node, arg00);
5908 return fold_build2 (LT_EXPR, type, arg00, lo);
5910 case LE_EXPR:
5911 if (TREE_OVERFLOW (hi))
5912 return omit_one_operand (type, integer_one_node, arg00);
5913 return fold_build2 (LE_EXPR, type, arg00, hi);
5915 case GT_EXPR:
5916 if (TREE_OVERFLOW (hi))
5917 return omit_one_operand (type, integer_zero_node, arg00);
5918 return fold_build2 (GT_EXPR, type, arg00, hi);
5920 case GE_EXPR:
5921 if (TREE_OVERFLOW (lo))
5922 return omit_one_operand (type, integer_one_node, arg00);
5923 return fold_build2 (GE_EXPR, type, arg00, lo);
5925 default:
5926 break;
5929 return NULL_TREE;
5933 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5934 equality/inequality test, then return a simplified form of
5935 the test using shifts and logical operations. Otherwise return
5936 NULL. TYPE is the desired result type. */
5938 tree
5939 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5940 tree result_type)
5942 /* If this is testing a single bit, we can optimize the test. */
5943 if ((code == NE_EXPR || code == EQ_EXPR)
5944 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5945 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5947 tree inner = TREE_OPERAND (arg0, 0);
5948 tree type = TREE_TYPE (arg0);
5949 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5950 enum machine_mode operand_mode = TYPE_MODE (type);
5951 int ops_unsigned;
5952 tree signed_type, unsigned_type, intermediate_type;
5953 tree arg00;
5955 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5956 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5957 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5958 if (arg00 != NULL_TREE
5959 /* This is only a win if casting to a signed type is cheap,
5960 i.e. when arg00's type is not a partial mode. */
5961 && TYPE_PRECISION (TREE_TYPE (arg00))
5962 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5964 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5965 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5966 result_type, fold_convert (stype, arg00),
5967 fold_convert (stype, integer_zero_node));
5970 /* Otherwise we have (A & C) != 0 where C is a single bit,
5971 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5972 Similarly for (A & C) == 0. */
5974 /* If INNER is a right shift of a constant and it plus BITNUM does
5975 not overflow, adjust BITNUM and INNER. */
5976 if (TREE_CODE (inner) == RSHIFT_EXPR
5977 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5978 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5979 && bitnum < TYPE_PRECISION (type)
5980 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5981 bitnum - TYPE_PRECISION (type)))
5983 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5984 inner = TREE_OPERAND (inner, 0);
5987 /* If we are going to be able to omit the AND below, we must do our
5988 operations as unsigned. If we must use the AND, we have a choice.
5989 Normally unsigned is faster, but for some machines signed is. */
5990 #ifdef LOAD_EXTEND_OP
5991 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5992 && !flag_syntax_only) ? 0 : 1;
5993 #else
5994 ops_unsigned = 1;
5995 #endif
5997 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5998 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5999 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6000 inner = fold_convert (intermediate_type, inner);
6002 if (bitnum != 0)
6003 inner = build2 (RSHIFT_EXPR, intermediate_type,
6004 inner, size_int (bitnum));
6006 if (code == EQ_EXPR)
6007 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6008 inner, integer_one_node);
6010 /* Put the AND last so it can combine with more things. */
6011 inner = build2 (BIT_AND_EXPR, intermediate_type,
6012 inner, integer_one_node);
6014 /* Make sure to return the proper type. */
6015 inner = fold_convert (result_type, inner);
6017 return inner;
6019 return NULL_TREE;
6022 /* Check whether we are allowed to reorder operands arg0 and arg1,
6023 such that the evaluation of arg1 occurs before arg0. */
6025 static bool
6026 reorder_operands_p (tree arg0, tree arg1)
6028 if (! flag_evaluation_order)
6029 return true;
6030 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6031 return true;
6032 return ! TREE_SIDE_EFFECTS (arg0)
6033 && ! TREE_SIDE_EFFECTS (arg1);
6036 /* Test whether it is preferable two swap two operands, ARG0 and
6037 ARG1, for example because ARG0 is an integer constant and ARG1
6038 isn't. If REORDER is true, only recommend swapping if we can
6039 evaluate the operands in reverse order. */
6041 bool
6042 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6044 STRIP_SIGN_NOPS (arg0);
6045 STRIP_SIGN_NOPS (arg1);
6047 if (TREE_CODE (arg1) == INTEGER_CST)
6048 return 0;
6049 if (TREE_CODE (arg0) == INTEGER_CST)
6050 return 1;
6052 if (TREE_CODE (arg1) == REAL_CST)
6053 return 0;
6054 if (TREE_CODE (arg0) == REAL_CST)
6055 return 1;
6057 if (TREE_CODE (arg1) == COMPLEX_CST)
6058 return 0;
6059 if (TREE_CODE (arg0) == COMPLEX_CST)
6060 return 1;
6062 if (TREE_CONSTANT (arg1))
6063 return 0;
6064 if (TREE_CONSTANT (arg0))
6065 return 1;
6067 if (optimize_size)
6068 return 0;
6070 if (reorder && flag_evaluation_order
6071 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6072 return 0;
6074 if (DECL_P (arg1))
6075 return 0;
6076 if (DECL_P (arg0))
6077 return 1;
6079 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6080 for commutative and comparison operators. Ensuring a canonical
6081 form allows the optimizers to find additional redundancies without
6082 having to explicitly check for both orderings. */
6083 if (TREE_CODE (arg0) == SSA_NAME
6084 && TREE_CODE (arg1) == SSA_NAME
6085 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6086 return 1;
6088 return 0;
6091 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6092 ARG0 is extended to a wider type. */
6094 static tree
6095 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6097 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6098 tree arg1_unw;
6099 tree shorter_type, outer_type;
6100 tree min, max;
6101 bool above, below;
6103 if (arg0_unw == arg0)
6104 return NULL_TREE;
6105 shorter_type = TREE_TYPE (arg0_unw);
6107 #ifdef HAVE_canonicalize_funcptr_for_compare
6108 /* Disable this optimization if we're casting a function pointer
6109 type on targets that require function pointer canonicalization. */
6110 if (HAVE_canonicalize_funcptr_for_compare
6111 && TREE_CODE (shorter_type) == POINTER_TYPE
6112 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6113 return NULL_TREE;
6114 #endif
6116 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6117 return NULL_TREE;
6119 arg1_unw = get_unwidened (arg1, shorter_type);
6120 if (!arg1_unw)
6121 return NULL_TREE;
6123 /* If possible, express the comparison in the shorter mode. */
6124 if ((code == EQ_EXPR || code == NE_EXPR
6125 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6126 && (TREE_TYPE (arg1_unw) == shorter_type
6127 || (TREE_CODE (arg1_unw) == INTEGER_CST
6128 && TREE_CODE (shorter_type) == INTEGER_TYPE
6129 && int_fits_type_p (arg1_unw, shorter_type))))
6130 return fold_build2 (code, type, arg0_unw,
6131 fold_convert (shorter_type, arg1_unw));
6133 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6134 return NULL_TREE;
6136 /* If we are comparing with the integer that does not fit into the range
6137 of the shorter type, the result is known. */
6138 outer_type = TREE_TYPE (arg1_unw);
6139 min = lower_bound_in_type (outer_type, shorter_type);
6140 max = upper_bound_in_type (outer_type, shorter_type);
6142 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6143 max, arg1_unw));
6144 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6145 arg1_unw, min));
6147 switch (code)
6149 case EQ_EXPR:
6150 if (above || below)
6151 return omit_one_operand (type, integer_zero_node, arg0);
6152 break;
6154 case NE_EXPR:
6155 if (above || below)
6156 return omit_one_operand (type, integer_one_node, arg0);
6157 break;
6159 case LT_EXPR:
6160 case LE_EXPR:
6161 if (above)
6162 return omit_one_operand (type, integer_one_node, arg0);
6163 else if (below)
6164 return omit_one_operand (type, integer_zero_node, arg0);
6166 case GT_EXPR:
6167 case GE_EXPR:
6168 if (above)
6169 return omit_one_operand (type, integer_zero_node, arg0);
6170 else if (below)
6171 return omit_one_operand (type, integer_one_node, arg0);
6173 default:
6174 break;
6177 return NULL_TREE;
6180 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6181 ARG0 just the signedness is changed. */
6183 static tree
6184 fold_sign_changed_comparison (enum tree_code code, tree type,
6185 tree arg0, tree arg1)
6187 tree arg0_inner, tmp;
6188 tree inner_type, outer_type;
6190 if (TREE_CODE (arg0) != NOP_EXPR
6191 && TREE_CODE (arg0) != CONVERT_EXPR)
6192 return NULL_TREE;
6194 outer_type = TREE_TYPE (arg0);
6195 arg0_inner = TREE_OPERAND (arg0, 0);
6196 inner_type = TREE_TYPE (arg0_inner);
6198 #ifdef HAVE_canonicalize_funcptr_for_compare
6199 /* Disable this optimization if we're casting a function pointer
6200 type on targets that require function pointer canonicalization. */
6201 if (HAVE_canonicalize_funcptr_for_compare
6202 && TREE_CODE (inner_type) == POINTER_TYPE
6203 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6204 return NULL_TREE;
6205 #endif
6207 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6208 return NULL_TREE;
6210 if (TREE_CODE (arg1) != INTEGER_CST
6211 && !((TREE_CODE (arg1) == NOP_EXPR
6212 || TREE_CODE (arg1) == CONVERT_EXPR)
6213 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6214 return NULL_TREE;
6216 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6217 && code != NE_EXPR
6218 && code != EQ_EXPR)
6219 return NULL_TREE;
6221 if (TREE_CODE (arg1) == INTEGER_CST)
6223 tmp = build_int_cst_wide (inner_type,
6224 TREE_INT_CST_LOW (arg1),
6225 TREE_INT_CST_HIGH (arg1));
6226 arg1 = force_fit_type (tmp, 0,
6227 TREE_OVERFLOW (arg1),
6228 TREE_CONSTANT_OVERFLOW (arg1));
6230 else
6231 arg1 = fold_convert (inner_type, arg1);
6233 return fold_build2 (code, type, arg0_inner, arg1);
6236 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6237 step of the array. ADDR is the address. MULT is the multiplicative expression.
6238 If the function succeeds, the new address expression is returned. Otherwise
6239 NULL_TREE is returned. */
6241 static tree
6242 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6244 tree s, delta, step;
6245 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6246 tree ref = TREE_OPERAND (addr, 0), pref;
6247 tree ret, pos;
6248 tree itype;
6250 STRIP_NOPS (arg0);
6251 STRIP_NOPS (arg1);
6253 if (TREE_CODE (arg0) == INTEGER_CST)
6255 s = arg0;
6256 delta = arg1;
6258 else if (TREE_CODE (arg1) == INTEGER_CST)
6260 s = arg1;
6261 delta = arg0;
6263 else
6264 return NULL_TREE;
6266 for (;; ref = TREE_OPERAND (ref, 0))
6268 if (TREE_CODE (ref) == ARRAY_REF)
6270 step = array_ref_element_size (ref);
6272 if (TREE_CODE (step) != INTEGER_CST)
6273 continue;
6275 itype = TREE_TYPE (step);
6277 /* If the type sizes do not match, we might run into problems
6278 when one of them would overflow. */
6279 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6280 continue;
6282 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6283 continue;
6285 delta = fold_convert (itype, delta);
6286 break;
6289 if (!handled_component_p (ref))
6290 return NULL_TREE;
6293 /* We found the suitable array reference. So copy everything up to it,
6294 and replace the index. */
6296 pref = TREE_OPERAND (addr, 0);
6297 ret = copy_node (pref);
6298 pos = ret;
6300 while (pref != ref)
6302 pref = TREE_OPERAND (pref, 0);
6303 TREE_OPERAND (pos, 0) = copy_node (pref);
6304 pos = TREE_OPERAND (pos, 0);
6307 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6308 TREE_OPERAND (pos, 1),
6309 delta);
6311 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6315 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6316 means A >= Y && A != MAX, but in this case we know that
6317 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6319 static tree
6320 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6322 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6324 if (TREE_CODE (bound) == LT_EXPR)
6325 a = TREE_OPERAND (bound, 0);
6326 else if (TREE_CODE (bound) == GT_EXPR)
6327 a = TREE_OPERAND (bound, 1);
6328 else
6329 return NULL_TREE;
6331 typea = TREE_TYPE (a);
6332 if (!INTEGRAL_TYPE_P (typea)
6333 && !POINTER_TYPE_P (typea))
6334 return NULL_TREE;
6336 if (TREE_CODE (ineq) == LT_EXPR)
6338 a1 = TREE_OPERAND (ineq, 1);
6339 y = TREE_OPERAND (ineq, 0);
6341 else if (TREE_CODE (ineq) == GT_EXPR)
6343 a1 = TREE_OPERAND (ineq, 0);
6344 y = TREE_OPERAND (ineq, 1);
6346 else
6347 return NULL_TREE;
6349 if (TREE_TYPE (a1) != typea)
6350 return NULL_TREE;
6352 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6353 if (!integer_onep (diff))
6354 return NULL_TREE;
6356 return fold_build2 (GE_EXPR, type, a, y);
6359 /* Fold complex addition when both components are accessible by parts.
6360 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6361 or MINUS_EXPR for subtraction. */
6363 static tree
6364 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6366 tree ar, ai, br, bi, rr, ri, inner_type;
6368 if (TREE_CODE (ac) == COMPLEX_EXPR)
6369 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6370 else if (TREE_CODE (ac) == COMPLEX_CST)
6371 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6372 else
6373 return NULL;
6375 if (TREE_CODE (bc) == COMPLEX_EXPR)
6376 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6377 else if (TREE_CODE (bc) == COMPLEX_CST)
6378 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6379 else
6380 return NULL;
6382 inner_type = TREE_TYPE (type);
6384 rr = fold_build2 (code, inner_type, ar, br);
6385 ri = fold_build2 (code, inner_type, ai, bi);
6387 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6390 /* Perform some simplifications of complex multiplication when one or more
6391 of the components are constants or zeros. Return non-null if successful. */
6393 tree
6394 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6396 tree rr, ri, inner_type, zero;
6397 bool ar0, ai0, br0, bi0, bi1;
6399 inner_type = TREE_TYPE (type);
6400 zero = NULL;
6402 if (SCALAR_FLOAT_TYPE_P (inner_type))
6404 ar0 = ai0 = br0 = bi0 = bi1 = false;
6406 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6408 if (TREE_CODE (ar) == REAL_CST
6409 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6410 ar0 = true, zero = ar;
6412 if (TREE_CODE (ai) == REAL_CST
6413 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6414 ai0 = true, zero = ai;
6416 if (TREE_CODE (br) == REAL_CST
6417 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6418 br0 = true, zero = br;
6420 if (TREE_CODE (bi) == REAL_CST)
6422 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6423 bi0 = true, zero = bi;
6424 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6425 bi1 = true;
6428 else
6430 ar0 = integer_zerop (ar);
6431 if (ar0)
6432 zero = ar;
6433 ai0 = integer_zerop (ai);
6434 if (ai0)
6435 zero = ai;
6436 br0 = integer_zerop (br);
6437 if (br0)
6438 zero = br;
6439 bi0 = integer_zerop (bi);
6440 if (bi0)
6442 zero = bi;
6443 bi1 = false;
6445 else
6446 bi1 = integer_onep (bi);
6449 /* We won't optimize anything below unless something is zero. */
6450 if (zero == NULL)
6451 return NULL;
6453 if (ai0 && br0 && bi1)
6455 rr = zero;
6456 ri = ar;
6458 else if (ai0 && bi0)
6460 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6461 ri = zero;
6463 else if (ai0 && br0)
6465 rr = zero;
6466 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6468 else if (ar0 && bi0)
6470 rr = zero;
6471 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6473 else if (ar0 && br0)
6475 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6476 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6477 ri = zero;
6479 else if (bi0)
6481 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6482 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6484 else if (ai0)
6486 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6487 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6489 else if (br0)
6491 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6492 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6493 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6495 else if (ar0)
6497 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6498 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6499 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6501 else
6502 return NULL;
6504 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6507 static tree
6508 fold_complex_mult (tree type, tree ac, tree bc)
6510 tree ar, ai, br, bi;
6512 if (TREE_CODE (ac) == COMPLEX_EXPR)
6513 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6514 else if (TREE_CODE (ac) == COMPLEX_CST)
6515 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6516 else
6517 return NULL;
6519 if (TREE_CODE (bc) == COMPLEX_EXPR)
6520 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6521 else if (TREE_CODE (bc) == COMPLEX_CST)
6522 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6523 else
6524 return NULL;
6526 return fold_complex_mult_parts (type, ar, ai, br, bi);
6529 /* Perform some simplifications of complex division when one or more of
6530 the components are constants or zeros. Return non-null if successful. */
6532 tree
6533 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6534 enum tree_code code)
6536 tree rr, ri, inner_type, zero;
6537 bool ar0, ai0, br0, bi0, bi1;
6539 inner_type = TREE_TYPE (type);
6540 zero = NULL;
6542 if (SCALAR_FLOAT_TYPE_P (inner_type))
6544 ar0 = ai0 = br0 = bi0 = bi1 = false;
6546 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6548 if (TREE_CODE (ar) == REAL_CST
6549 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6550 ar0 = true, zero = ar;
6552 if (TREE_CODE (ai) == REAL_CST
6553 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6554 ai0 = true, zero = ai;
6556 if (TREE_CODE (br) == REAL_CST
6557 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6558 br0 = true, zero = br;
6560 if (TREE_CODE (bi) == REAL_CST)
6562 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6563 bi0 = true, zero = bi;
6564 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6565 bi1 = true;
6568 else
6570 ar0 = integer_zerop (ar);
6571 if (ar0)
6572 zero = ar;
6573 ai0 = integer_zerop (ai);
6574 if (ai0)
6575 zero = ai;
6576 br0 = integer_zerop (br);
6577 if (br0)
6578 zero = br;
6579 bi0 = integer_zerop (bi);
6580 if (bi0)
6582 zero = bi;
6583 bi1 = false;
6585 else
6586 bi1 = integer_onep (bi);
6589 /* We won't optimize anything below unless something is zero. */
6590 if (zero == NULL)
6591 return NULL;
6593 if (ai0 && bi0)
6595 rr = fold_build2 (code, inner_type, ar, br);
6596 ri = zero;
6598 else if (ai0 && br0)
6600 rr = zero;
6601 ri = fold_build2 (code, inner_type, ar, bi);
6602 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6604 else if (ar0 && bi0)
6606 rr = zero;
6607 ri = fold_build2 (code, inner_type, ai, br);
6609 else if (ar0 && br0)
6611 rr = fold_build2 (code, inner_type, ai, bi);
6612 ri = zero;
6614 else if (bi0)
6616 rr = fold_build2 (code, inner_type, ar, br);
6617 ri = fold_build2 (code, inner_type, ai, br);
6619 else if (br0)
6621 rr = fold_build2 (code, inner_type, ai, bi);
6622 ri = fold_build2 (code, inner_type, ar, bi);
6623 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6625 else
6626 return NULL;
6628 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6631 static tree
6632 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6634 tree ar, ai, br, bi;
6636 if (TREE_CODE (ac) == COMPLEX_EXPR)
6637 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6638 else if (TREE_CODE (ac) == COMPLEX_CST)
6639 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6640 else
6641 return NULL;
6643 if (TREE_CODE (bc) == COMPLEX_EXPR)
6644 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6645 else if (TREE_CODE (bc) == COMPLEX_CST)
6646 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6647 else
6648 return NULL;
6650 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6653 /* Fold a unary expression of code CODE and type TYPE with operand
6654 OP0. Return the folded expression if folding is successful.
6655 Otherwise, return NULL_TREE. */
6657 static tree
6658 fold_unary (enum tree_code code, tree type, tree op0)
6660 tree tem;
6661 tree arg0;
6662 enum tree_code_class kind = TREE_CODE_CLASS (code);
6664 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6665 && TREE_CODE_LENGTH (code) == 1);
6667 arg0 = op0;
6668 if (arg0)
6670 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6672 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6673 STRIP_SIGN_NOPS (arg0);
6675 else
6677 /* Strip any conversions that don't change the mode. This
6678 is safe for every expression, except for a comparison
6679 expression because its signedness is derived from its
6680 operands.
6682 Note that this is done as an internal manipulation within
6683 the constant folder, in order to find the simplest
6684 representation of the arguments so that their form can be
6685 studied. In any cases, the appropriate type conversions
6686 should be put back in the tree that will get out of the
6687 constant folder. */
6688 STRIP_NOPS (arg0);
6692 if (TREE_CODE_CLASS (code) == tcc_unary)
6694 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6695 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6696 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6697 else if (TREE_CODE (arg0) == COND_EXPR)
6699 tree arg01 = TREE_OPERAND (arg0, 1);
6700 tree arg02 = TREE_OPERAND (arg0, 2);
6701 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6702 arg01 = fold_build1 (code, type, arg01);
6703 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6704 arg02 = fold_build1 (code, type, arg02);
6705 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6706 arg01, arg02);
6708 /* If this was a conversion, and all we did was to move into
6709 inside the COND_EXPR, bring it back out. But leave it if
6710 it is a conversion from integer to integer and the
6711 result precision is no wider than a word since such a
6712 conversion is cheap and may be optimized away by combine,
6713 while it couldn't if it were outside the COND_EXPR. Then return
6714 so we don't get into an infinite recursion loop taking the
6715 conversion out and then back in. */
6717 if ((code == NOP_EXPR || code == CONVERT_EXPR
6718 || code == NON_LVALUE_EXPR)
6719 && TREE_CODE (tem) == COND_EXPR
6720 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6721 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6722 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6723 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6724 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6725 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6726 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6727 && (INTEGRAL_TYPE_P
6728 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6729 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6730 || flag_syntax_only))
6731 tem = build1 (code, type,
6732 build3 (COND_EXPR,
6733 TREE_TYPE (TREE_OPERAND
6734 (TREE_OPERAND (tem, 1), 0)),
6735 TREE_OPERAND (tem, 0),
6736 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6737 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6738 return tem;
6740 else if (COMPARISON_CLASS_P (arg0))
6742 if (TREE_CODE (type) == BOOLEAN_TYPE)
6744 arg0 = copy_node (arg0);
6745 TREE_TYPE (arg0) = type;
6746 return arg0;
6748 else if (TREE_CODE (type) != INTEGER_TYPE)
6749 return fold_build3 (COND_EXPR, type, arg0,
6750 fold_build1 (code, type,
6751 integer_one_node),
6752 fold_build1 (code, type,
6753 integer_zero_node));
6757 switch (code)
6759 case NOP_EXPR:
6760 case FLOAT_EXPR:
6761 case CONVERT_EXPR:
6762 case FIX_TRUNC_EXPR:
6763 case FIX_CEIL_EXPR:
6764 case FIX_FLOOR_EXPR:
6765 case FIX_ROUND_EXPR:
6766 if (TREE_TYPE (op0) == type)
6767 return op0;
6769 /* Handle cases of two conversions in a row. */
6770 if (TREE_CODE (op0) == NOP_EXPR
6771 || TREE_CODE (op0) == CONVERT_EXPR)
6773 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6774 tree inter_type = TREE_TYPE (op0);
6775 int inside_int = INTEGRAL_TYPE_P (inside_type);
6776 int inside_ptr = POINTER_TYPE_P (inside_type);
6777 int inside_float = FLOAT_TYPE_P (inside_type);
6778 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6779 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6780 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6781 int inter_int = INTEGRAL_TYPE_P (inter_type);
6782 int inter_ptr = POINTER_TYPE_P (inter_type);
6783 int inter_float = FLOAT_TYPE_P (inter_type);
6784 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6785 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6786 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6787 int final_int = INTEGRAL_TYPE_P (type);
6788 int final_ptr = POINTER_TYPE_P (type);
6789 int final_float = FLOAT_TYPE_P (type);
6790 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6791 unsigned int final_prec = TYPE_PRECISION (type);
6792 int final_unsignedp = TYPE_UNSIGNED (type);
6794 /* In addition to the cases of two conversions in a row
6795 handled below, if we are converting something to its own
6796 type via an object of identical or wider precision, neither
6797 conversion is needed. */
6798 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6799 && ((inter_int && final_int) || (inter_float && final_float))
6800 && inter_prec >= final_prec)
6801 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6803 /* Likewise, if the intermediate and final types are either both
6804 float or both integer, we don't need the middle conversion if
6805 it is wider than the final type and doesn't change the signedness
6806 (for integers). Avoid this if the final type is a pointer
6807 since then we sometimes need the inner conversion. Likewise if
6808 the outer has a precision not equal to the size of its mode. */
6809 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6810 || (inter_float && inside_float)
6811 || (inter_vec && inside_vec))
6812 && inter_prec >= inside_prec
6813 && (inter_float || inter_vec
6814 || inter_unsignedp == inside_unsignedp)
6815 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6816 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6817 && ! final_ptr
6818 && (! final_vec || inter_prec == inside_prec))
6819 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6821 /* If we have a sign-extension of a zero-extended value, we can
6822 replace that by a single zero-extension. */
6823 if (inside_int && inter_int && final_int
6824 && inside_prec < inter_prec && inter_prec < final_prec
6825 && inside_unsignedp && !inter_unsignedp)
6826 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6828 /* Two conversions in a row are not needed unless:
6829 - some conversion is floating-point (overstrict for now), or
6830 - some conversion is a vector (overstrict for now), or
6831 - the intermediate type is narrower than both initial and
6832 final, or
6833 - the intermediate type and innermost type differ in signedness,
6834 and the outermost type is wider than the intermediate, or
6835 - the initial type is a pointer type and the precisions of the
6836 intermediate and final types differ, or
6837 - the final type is a pointer type and the precisions of the
6838 initial and intermediate types differ. */
6839 if (! inside_float && ! inter_float && ! final_float
6840 && ! inside_vec && ! inter_vec && ! final_vec
6841 && (inter_prec > inside_prec || inter_prec > final_prec)
6842 && ! (inside_int && inter_int
6843 && inter_unsignedp != inside_unsignedp
6844 && inter_prec < final_prec)
6845 && ((inter_unsignedp && inter_prec > inside_prec)
6846 == (final_unsignedp && final_prec > inter_prec))
6847 && ! (inside_ptr && inter_prec != final_prec)
6848 && ! (final_ptr && inside_prec != inter_prec)
6849 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6850 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6851 && ! final_ptr)
6852 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6855 if (TREE_CODE (op0) == MODIFY_EXPR
6856 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6857 /* Detect assigning a bitfield. */
6858 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6859 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6861 /* Don't leave an assignment inside a conversion
6862 unless assigning a bitfield. */
6863 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6864 /* First do the assignment, then return converted constant. */
6865 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6866 TREE_NO_WARNING (tem) = 1;
6867 TREE_USED (tem) = 1;
6868 return tem;
6871 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6872 constants (if x has signed type, the sign bit cannot be set
6873 in c). This folds extension into the BIT_AND_EXPR. */
6874 if (INTEGRAL_TYPE_P (type)
6875 && TREE_CODE (type) != BOOLEAN_TYPE
6876 && TREE_CODE (op0) == BIT_AND_EXPR
6877 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6879 tree and = op0;
6880 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6881 int change = 0;
6883 if (TYPE_UNSIGNED (TREE_TYPE (and))
6884 || (TYPE_PRECISION (type)
6885 <= TYPE_PRECISION (TREE_TYPE (and))))
6886 change = 1;
6887 else if (TYPE_PRECISION (TREE_TYPE (and1))
6888 <= HOST_BITS_PER_WIDE_INT
6889 && host_integerp (and1, 1))
6891 unsigned HOST_WIDE_INT cst;
6893 cst = tree_low_cst (and1, 1);
6894 cst &= (HOST_WIDE_INT) -1
6895 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6896 change = (cst == 0);
6897 #ifdef LOAD_EXTEND_OP
6898 if (change
6899 && !flag_syntax_only
6900 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6901 == ZERO_EXTEND))
6903 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6904 and0 = fold_convert (uns, and0);
6905 and1 = fold_convert (uns, and1);
6907 #endif
6909 if (change)
6911 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6912 TREE_INT_CST_HIGH (and1));
6913 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6914 TREE_CONSTANT_OVERFLOW (and1));
6915 return fold_build2 (BIT_AND_EXPR, type,
6916 fold_convert (type, and0), tem);
6920 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6921 T2 being pointers to types of the same size. */
6922 if (POINTER_TYPE_P (type)
6923 && BINARY_CLASS_P (arg0)
6924 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6925 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6927 tree arg00 = TREE_OPERAND (arg0, 0);
6928 tree t0 = type;
6929 tree t1 = TREE_TYPE (arg00);
6930 tree tt0 = TREE_TYPE (t0);
6931 tree tt1 = TREE_TYPE (t1);
6932 tree s0 = TYPE_SIZE (tt0);
6933 tree s1 = TYPE_SIZE (tt1);
6935 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6936 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6937 TREE_OPERAND (arg0, 1));
6940 tem = fold_convert_const (code, type, arg0);
6941 return tem ? tem : NULL_TREE;
6943 case VIEW_CONVERT_EXPR:
6944 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6945 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6946 return NULL_TREE;
6948 case NEGATE_EXPR:
6949 if (negate_expr_p (arg0))
6950 return fold_convert (type, negate_expr (arg0));
6951 /* Convert - (~A) to A + 1. */
6952 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6953 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6954 build_int_cst (type, 1));
6955 return NULL_TREE;
6957 case ABS_EXPR:
6958 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6959 return fold_abs_const (arg0, type);
6960 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6961 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6962 /* Convert fabs((double)float) into (double)fabsf(float). */
6963 else if (TREE_CODE (arg0) == NOP_EXPR
6964 && TREE_CODE (type) == REAL_TYPE)
6966 tree targ0 = strip_float_extensions (arg0);
6967 if (targ0 != arg0)
6968 return fold_convert (type, fold_build1 (ABS_EXPR,
6969 TREE_TYPE (targ0),
6970 targ0));
6972 else if (tree_expr_nonnegative_p (arg0))
6973 return arg0;
6975 /* Strip sign ops from argument. */
6976 if (TREE_CODE (type) == REAL_TYPE)
6978 tem = fold_strip_sign_ops (arg0);
6979 if (tem)
6980 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6982 return NULL_TREE;
6984 case CONJ_EXPR:
6985 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6986 return fold_convert (type, arg0);
6987 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6988 return build2 (COMPLEX_EXPR, type,
6989 TREE_OPERAND (arg0, 0),
6990 negate_expr (TREE_OPERAND (arg0, 1)));
6991 else if (TREE_CODE (arg0) == COMPLEX_CST)
6992 return build_complex (type, TREE_REALPART (arg0),
6993 negate_expr (TREE_IMAGPART (arg0)));
6994 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6995 return fold_build2 (TREE_CODE (arg0), type,
6996 fold_build1 (CONJ_EXPR, type,
6997 TREE_OPERAND (arg0, 0)),
6998 fold_build1 (CONJ_EXPR, type,
6999 TREE_OPERAND (arg0, 1)));
7000 else if (TREE_CODE (arg0) == CONJ_EXPR)
7001 return TREE_OPERAND (arg0, 0);
7002 return NULL_TREE;
7004 case BIT_NOT_EXPR:
7005 if (TREE_CODE (arg0) == INTEGER_CST)
7006 return fold_not_const (arg0, type);
7007 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7008 return TREE_OPERAND (arg0, 0);
7009 /* Convert ~ (-A) to A - 1. */
7010 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7011 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7012 build_int_cst (type, 1));
7013 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7014 else if (INTEGRAL_TYPE_P (type)
7015 && ((TREE_CODE (arg0) == MINUS_EXPR
7016 && integer_onep (TREE_OPERAND (arg0, 1)))
7017 || (TREE_CODE (arg0) == PLUS_EXPR
7018 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7019 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7020 return NULL_TREE;
7022 case TRUTH_NOT_EXPR:
7023 /* The argument to invert_truthvalue must have Boolean type. */
7024 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7025 arg0 = fold_convert (boolean_type_node, arg0);
7027 /* Note that the operand of this must be an int
7028 and its values must be 0 or 1.
7029 ("true" is a fixed value perhaps depending on the language,
7030 but we don't handle values other than 1 correctly yet.) */
7031 tem = invert_truthvalue (arg0);
7032 /* Avoid infinite recursion. */
7033 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7034 return NULL_TREE;
7035 return fold_convert (type, tem);
7037 case REALPART_EXPR:
7038 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7039 return NULL_TREE;
7040 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7041 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7042 TREE_OPERAND (arg0, 1));
7043 else if (TREE_CODE (arg0) == COMPLEX_CST)
7044 return TREE_REALPART (arg0);
7045 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7046 return fold_build2 (TREE_CODE (arg0), type,
7047 fold_build1 (REALPART_EXPR, type,
7048 TREE_OPERAND (arg0, 0)),
7049 fold_build1 (REALPART_EXPR, type,
7050 TREE_OPERAND (arg0, 1)));
7051 return NULL_TREE;
7053 case IMAGPART_EXPR:
7054 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7055 return fold_convert (type, integer_zero_node);
7056 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7057 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7058 TREE_OPERAND (arg0, 0));
7059 else if (TREE_CODE (arg0) == COMPLEX_CST)
7060 return TREE_IMAGPART (arg0);
7061 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7062 return fold_build2 (TREE_CODE (arg0), type,
7063 fold_build1 (IMAGPART_EXPR, type,
7064 TREE_OPERAND (arg0, 0)),
7065 fold_build1 (IMAGPART_EXPR, type,
7066 TREE_OPERAND (arg0, 1)));
7067 return NULL_TREE;
7069 default:
7070 return NULL_TREE;
7071 } /* switch (code) */
7074 /* Fold a binary expression of code CODE and type TYPE with operands
7075 OP0 and OP1. Return the folded expression if folding is
7076 successful. Otherwise, return NULL_TREE. */
7078 static tree
7079 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7081 tree t1 = NULL_TREE;
7082 tree tem;
7083 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7084 enum tree_code_class kind = TREE_CODE_CLASS (code);
7086 /* WINS will be nonzero when the switch is done
7087 if all operands are constant. */
7088 int wins = 1;
7090 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7091 && TREE_CODE_LENGTH (code) == 2);
7093 arg0 = op0;
7094 arg1 = op1;
7096 if (arg0)
7098 tree subop;
7100 /* Strip any conversions that don't change the mode. This is
7101 safe for every expression, except for a comparison expression
7102 because its signedness is derived from its operands. So, in
7103 the latter case, only strip conversions that don't change the
7104 signedness.
7106 Note that this is done as an internal manipulation within the
7107 constant folder, in order to find the simplest representation
7108 of the arguments so that their form can be studied. In any
7109 cases, the appropriate type conversions should be put back in
7110 the tree that will get out of the constant folder. */
7111 if (kind == tcc_comparison)
7112 STRIP_SIGN_NOPS (arg0);
7113 else
7114 STRIP_NOPS (arg0);
7116 if (TREE_CODE (arg0) == COMPLEX_CST)
7117 subop = TREE_REALPART (arg0);
7118 else
7119 subop = arg0;
7121 if (TREE_CODE (subop) != INTEGER_CST
7122 && TREE_CODE (subop) != REAL_CST)
7123 /* Note that TREE_CONSTANT isn't enough:
7124 static var addresses are constant but we can't
7125 do arithmetic on them. */
7126 wins = 0;
7129 if (arg1)
7131 tree subop;
7133 /* Strip any conversions that don't change the mode. This is
7134 safe for every expression, except for a comparison expression
7135 because its signedness is derived from its operands. So, in
7136 the latter case, only strip conversions that don't change the
7137 signedness.
7139 Note that this is done as an internal manipulation within the
7140 constant folder, in order to find the simplest representation
7141 of the arguments so that their form can be studied. In any
7142 cases, the appropriate type conversions should be put back in
7143 the tree that will get out of the constant folder. */
7144 if (kind == tcc_comparison)
7145 STRIP_SIGN_NOPS (arg1);
7146 else
7147 STRIP_NOPS (arg1);
7149 if (TREE_CODE (arg1) == COMPLEX_CST)
7150 subop = TREE_REALPART (arg1);
7151 else
7152 subop = arg1;
7154 if (TREE_CODE (subop) != INTEGER_CST
7155 && TREE_CODE (subop) != REAL_CST)
7156 /* Note that TREE_CONSTANT isn't enough:
7157 static var addresses are constant but we can't
7158 do arithmetic on them. */
7159 wins = 0;
7162 /* If this is a commutative operation, and ARG0 is a constant, move it
7163 to ARG1 to reduce the number of tests below. */
7164 if (commutative_tree_code (code)
7165 && tree_swap_operands_p (arg0, arg1, true))
7166 return fold_build2 (code, type, op1, op0);
7168 /* Now WINS is set as described above,
7169 ARG0 is the first operand of EXPR,
7170 and ARG1 is the second operand (if it has more than one operand).
7172 First check for cases where an arithmetic operation is applied to a
7173 compound, conditional, or comparison operation. Push the arithmetic
7174 operation inside the compound or conditional to see if any folding
7175 can then be done. Convert comparison to conditional for this purpose.
7176 The also optimizes non-constant cases that used to be done in
7177 expand_expr.
7179 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7180 one of the operands is a comparison and the other is a comparison, a
7181 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7182 code below would make the expression more complex. Change it to a
7183 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7184 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7186 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7187 || code == EQ_EXPR || code == NE_EXPR)
7188 && ((truth_value_p (TREE_CODE (arg0))
7189 && (truth_value_p (TREE_CODE (arg1))
7190 || (TREE_CODE (arg1) == BIT_AND_EXPR
7191 && integer_onep (TREE_OPERAND (arg1, 1)))))
7192 || (truth_value_p (TREE_CODE (arg1))
7193 && (truth_value_p (TREE_CODE (arg0))
7194 || (TREE_CODE (arg0) == BIT_AND_EXPR
7195 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7197 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7198 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7199 : TRUTH_XOR_EXPR,
7200 boolean_type_node,
7201 fold_convert (boolean_type_node, arg0),
7202 fold_convert (boolean_type_node, arg1));
7204 if (code == EQ_EXPR)
7205 tem = invert_truthvalue (tem);
7207 return fold_convert (type, tem);
7210 if (TREE_CODE_CLASS (code) == tcc_comparison
7211 && TREE_CODE (arg0) == COMPOUND_EXPR)
7212 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7213 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7214 else if (TREE_CODE_CLASS (code) == tcc_comparison
7215 && TREE_CODE (arg1) == COMPOUND_EXPR)
7216 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7217 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7218 else if (TREE_CODE_CLASS (code) == tcc_binary
7219 || TREE_CODE_CLASS (code) == tcc_comparison)
7221 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7222 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7223 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7224 arg1));
7225 if (TREE_CODE (arg1) == COMPOUND_EXPR
7226 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7227 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7228 fold_build2 (code, type,
7229 arg0, TREE_OPERAND (arg1, 1)));
7231 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7233 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7234 arg0, arg1,
7235 /*cond_first_p=*/1);
7236 if (tem != NULL_TREE)
7237 return tem;
7240 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7242 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7243 arg1, arg0,
7244 /*cond_first_p=*/0);
7245 if (tem != NULL_TREE)
7246 return tem;
7250 switch (code)
7252 case PLUS_EXPR:
7253 /* A + (-B) -> A - B */
7254 if (TREE_CODE (arg1) == NEGATE_EXPR)
7255 return fold_build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7256 /* (-A) + B -> B - A */
7257 if (TREE_CODE (arg0) == NEGATE_EXPR
7258 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7259 return fold_build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0));
7260 /* Convert ~A + 1 to -A. */
7261 if (INTEGRAL_TYPE_P (type)
7262 && TREE_CODE (arg0) == BIT_NOT_EXPR
7263 && integer_onep (arg1))
7264 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7266 if (TREE_CODE (type) == COMPLEX_TYPE)
7268 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7269 if (tem)
7270 return tem;
7273 if (! FLOAT_TYPE_P (type))
7275 if (integer_zerop (arg1))
7276 return non_lvalue (fold_convert (type, arg0));
7278 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7279 with a constant, and the two constants have no bits in common,
7280 we should treat this as a BIT_IOR_EXPR since this may produce more
7281 simplifications. */
7282 if (TREE_CODE (arg0) == BIT_AND_EXPR
7283 && TREE_CODE (arg1) == BIT_AND_EXPR
7284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7285 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7286 && integer_zerop (const_binop (BIT_AND_EXPR,
7287 TREE_OPERAND (arg0, 1),
7288 TREE_OPERAND (arg1, 1), 0)))
7290 code = BIT_IOR_EXPR;
7291 goto bit_ior;
7294 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7295 (plus (plus (mult) (mult)) (foo)) so that we can
7296 take advantage of the factoring cases below. */
7297 if (((TREE_CODE (arg0) == PLUS_EXPR
7298 || TREE_CODE (arg0) == MINUS_EXPR)
7299 && TREE_CODE (arg1) == MULT_EXPR)
7300 || ((TREE_CODE (arg1) == PLUS_EXPR
7301 || TREE_CODE (arg1) == MINUS_EXPR)
7302 && TREE_CODE (arg0) == MULT_EXPR))
7304 tree parg0, parg1, parg, marg;
7305 enum tree_code pcode;
7307 if (TREE_CODE (arg1) == MULT_EXPR)
7308 parg = arg0, marg = arg1;
7309 else
7310 parg = arg1, marg = arg0;
7311 pcode = TREE_CODE (parg);
7312 parg0 = TREE_OPERAND (parg, 0);
7313 parg1 = TREE_OPERAND (parg, 1);
7314 STRIP_NOPS (parg0);
7315 STRIP_NOPS (parg1);
7317 if (TREE_CODE (parg0) == MULT_EXPR
7318 && TREE_CODE (parg1) != MULT_EXPR)
7319 return fold_build2 (pcode, type,
7320 fold_build2 (PLUS_EXPR, type,
7321 fold_convert (type, parg0),
7322 fold_convert (type, marg)),
7323 fold_convert (type, parg1));
7324 if (TREE_CODE (parg0) != MULT_EXPR
7325 && TREE_CODE (parg1) == MULT_EXPR)
7326 return fold_build2 (PLUS_EXPR, type,
7327 fold_convert (type, parg0),
7328 fold_build2 (pcode, type,
7329 fold_convert (type, marg),
7330 fold_convert (type,
7331 parg1)));
7334 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7336 tree arg00, arg01, arg10, arg11;
7337 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7339 /* (A * C) + (B * C) -> (A+B) * C.
7340 We are most concerned about the case where C is a constant,
7341 but other combinations show up during loop reduction. Since
7342 it is not difficult, try all four possibilities. */
7344 arg00 = TREE_OPERAND (arg0, 0);
7345 arg01 = TREE_OPERAND (arg0, 1);
7346 arg10 = TREE_OPERAND (arg1, 0);
7347 arg11 = TREE_OPERAND (arg1, 1);
7348 same = NULL_TREE;
7350 if (operand_equal_p (arg01, arg11, 0))
7351 same = arg01, alt0 = arg00, alt1 = arg10;
7352 else if (operand_equal_p (arg00, arg10, 0))
7353 same = arg00, alt0 = arg01, alt1 = arg11;
7354 else if (operand_equal_p (arg00, arg11, 0))
7355 same = arg00, alt0 = arg01, alt1 = arg10;
7356 else if (operand_equal_p (arg01, arg10, 0))
7357 same = arg01, alt0 = arg00, alt1 = arg11;
7359 /* No identical multiplicands; see if we can find a common
7360 power-of-two factor in non-power-of-two multiplies. This
7361 can help in multi-dimensional array access. */
7362 else if (TREE_CODE (arg01) == INTEGER_CST
7363 && TREE_CODE (arg11) == INTEGER_CST
7364 && TREE_INT_CST_HIGH (arg01) == 0
7365 && TREE_INT_CST_HIGH (arg11) == 0)
7367 HOST_WIDE_INT int01, int11, tmp;
7368 int01 = TREE_INT_CST_LOW (arg01);
7369 int11 = TREE_INT_CST_LOW (arg11);
7371 /* Move min of absolute values to int11. */
7372 if ((int01 >= 0 ? int01 : -int01)
7373 < (int11 >= 0 ? int11 : -int11))
7375 tmp = int01, int01 = int11, int11 = tmp;
7376 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7377 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7380 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7382 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7383 build_int_cst (NULL_TREE,
7384 int01 / int11));
7385 alt1 = arg10;
7386 same = arg11;
7390 if (same)
7391 return fold_build2 (MULT_EXPR, type,
7392 fold_build2 (PLUS_EXPR, type,
7393 fold_convert (type, alt0),
7394 fold_convert (type, alt1)),
7395 same);
7398 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7399 of the array. Loop optimizer sometimes produce this type of
7400 expressions. */
7401 if (TREE_CODE (arg0) == ADDR_EXPR
7402 && TREE_CODE (arg1) == MULT_EXPR)
7404 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7405 if (tem)
7406 return fold_convert (type, fold (tem));
7408 else if (TREE_CODE (arg1) == ADDR_EXPR
7409 && TREE_CODE (arg0) == MULT_EXPR)
7411 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7412 if (tem)
7413 return fold_convert (type, fold (tem));
7416 else
7418 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7419 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7420 return non_lvalue (fold_convert (type, arg0));
7422 /* Likewise if the operands are reversed. */
7423 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7424 return non_lvalue (fold_convert (type, arg1));
7426 /* Convert X + -C into X - C. */
7427 if (TREE_CODE (arg1) == REAL_CST
7428 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7430 tem = fold_negate_const (arg1, type);
7431 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7432 return fold_build2 (MINUS_EXPR, type,
7433 fold_convert (type, arg0),
7434 fold_convert (type, tem));
7437 /* Convert x+x into x*2.0. */
7438 if (operand_equal_p (arg0, arg1, 0)
7439 && SCALAR_FLOAT_TYPE_P (type))
7440 return fold_build2 (MULT_EXPR, type, arg0,
7441 build_real (type, dconst2));
7443 /* Convert x*c+x into x*(c+1). */
7444 if (flag_unsafe_math_optimizations
7445 && TREE_CODE (arg0) == MULT_EXPR
7446 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7447 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7450 REAL_VALUE_TYPE c;
7452 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7453 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7454 return fold_build2 (MULT_EXPR, type, arg1,
7455 build_real (type, c));
7458 /* Convert x+x*c into x*(c+1). */
7459 if (flag_unsafe_math_optimizations
7460 && TREE_CODE (arg1) == MULT_EXPR
7461 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7462 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7463 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7465 REAL_VALUE_TYPE c;
7467 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7468 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7469 return fold_build2 (MULT_EXPR, type, arg0,
7470 build_real (type, c));
7473 /* Convert x*c1+x*c2 into x*(c1+c2). */
7474 if (flag_unsafe_math_optimizations
7475 && TREE_CODE (arg0) == MULT_EXPR
7476 && TREE_CODE (arg1) == MULT_EXPR
7477 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7478 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7479 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7480 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7481 && operand_equal_p (TREE_OPERAND (arg0, 0),
7482 TREE_OPERAND (arg1, 0), 0))
7484 REAL_VALUE_TYPE c1, c2;
7486 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7487 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7488 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7489 return fold_build2 (MULT_EXPR, type,
7490 TREE_OPERAND (arg0, 0),
7491 build_real (type, c1));
7493 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7494 if (flag_unsafe_math_optimizations
7495 && TREE_CODE (arg1) == PLUS_EXPR
7496 && TREE_CODE (arg0) != MULT_EXPR)
7498 tree tree10 = TREE_OPERAND (arg1, 0);
7499 tree tree11 = TREE_OPERAND (arg1, 1);
7500 if (TREE_CODE (tree11) == MULT_EXPR
7501 && TREE_CODE (tree10) == MULT_EXPR)
7503 tree tree0;
7504 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7505 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7508 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7509 if (flag_unsafe_math_optimizations
7510 && TREE_CODE (arg0) == PLUS_EXPR
7511 && TREE_CODE (arg1) != MULT_EXPR)
7513 tree tree00 = TREE_OPERAND (arg0, 0);
7514 tree tree01 = TREE_OPERAND (arg0, 1);
7515 if (TREE_CODE (tree01) == MULT_EXPR
7516 && TREE_CODE (tree00) == MULT_EXPR)
7518 tree tree0;
7519 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7520 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7525 bit_rotate:
7526 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7527 is a rotate of A by C1 bits. */
7528 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7529 is a rotate of A by B bits. */
7531 enum tree_code code0, code1;
7532 code0 = TREE_CODE (arg0);
7533 code1 = TREE_CODE (arg1);
7534 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7535 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7536 && operand_equal_p (TREE_OPERAND (arg0, 0),
7537 TREE_OPERAND (arg1, 0), 0)
7538 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7540 tree tree01, tree11;
7541 enum tree_code code01, code11;
7543 tree01 = TREE_OPERAND (arg0, 1);
7544 tree11 = TREE_OPERAND (arg1, 1);
7545 STRIP_NOPS (tree01);
7546 STRIP_NOPS (tree11);
7547 code01 = TREE_CODE (tree01);
7548 code11 = TREE_CODE (tree11);
7549 if (code01 == INTEGER_CST
7550 && code11 == INTEGER_CST
7551 && TREE_INT_CST_HIGH (tree01) == 0
7552 && TREE_INT_CST_HIGH (tree11) == 0
7553 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7554 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7555 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7556 code0 == LSHIFT_EXPR ? tree01 : tree11);
7557 else if (code11 == MINUS_EXPR)
7559 tree tree110, tree111;
7560 tree110 = TREE_OPERAND (tree11, 0);
7561 tree111 = TREE_OPERAND (tree11, 1);
7562 STRIP_NOPS (tree110);
7563 STRIP_NOPS (tree111);
7564 if (TREE_CODE (tree110) == INTEGER_CST
7565 && 0 == compare_tree_int (tree110,
7566 TYPE_PRECISION
7567 (TREE_TYPE (TREE_OPERAND
7568 (arg0, 0))))
7569 && operand_equal_p (tree01, tree111, 0))
7570 return build2 ((code0 == LSHIFT_EXPR
7571 ? LROTATE_EXPR
7572 : RROTATE_EXPR),
7573 type, TREE_OPERAND (arg0, 0), tree01);
7575 else if (code01 == MINUS_EXPR)
7577 tree tree010, tree011;
7578 tree010 = TREE_OPERAND (tree01, 0);
7579 tree011 = TREE_OPERAND (tree01, 1);
7580 STRIP_NOPS (tree010);
7581 STRIP_NOPS (tree011);
7582 if (TREE_CODE (tree010) == INTEGER_CST
7583 && 0 == compare_tree_int (tree010,
7584 TYPE_PRECISION
7585 (TREE_TYPE (TREE_OPERAND
7586 (arg0, 0))))
7587 && operand_equal_p (tree11, tree011, 0))
7588 return build2 ((code0 != LSHIFT_EXPR
7589 ? LROTATE_EXPR
7590 : RROTATE_EXPR),
7591 type, TREE_OPERAND (arg0, 0), tree11);
7596 associate:
7597 /* In most languages, can't associate operations on floats through
7598 parentheses. Rather than remember where the parentheses were, we
7599 don't associate floats at all, unless the user has specified
7600 -funsafe-math-optimizations. */
7602 if (! wins
7603 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7605 tree var0, con0, lit0, minus_lit0;
7606 tree var1, con1, lit1, minus_lit1;
7608 /* Split both trees into variables, constants, and literals. Then
7609 associate each group together, the constants with literals,
7610 then the result with variables. This increases the chances of
7611 literals being recombined later and of generating relocatable
7612 expressions for the sum of a constant and literal. */
7613 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7614 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7615 code == MINUS_EXPR);
7617 /* Only do something if we found more than two objects. Otherwise,
7618 nothing has changed and we risk infinite recursion. */
7619 if (2 < ((var0 != 0) + (var1 != 0)
7620 + (con0 != 0) + (con1 != 0)
7621 + (lit0 != 0) + (lit1 != 0)
7622 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7624 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7625 if (code == MINUS_EXPR)
7626 code = PLUS_EXPR;
7628 var0 = associate_trees (var0, var1, code, type);
7629 con0 = associate_trees (con0, con1, code, type);
7630 lit0 = associate_trees (lit0, lit1, code, type);
7631 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7633 /* Preserve the MINUS_EXPR if the negative part of the literal is
7634 greater than the positive part. Otherwise, the multiplicative
7635 folding code (i.e extract_muldiv) may be fooled in case
7636 unsigned constants are subtracted, like in the following
7637 example: ((X*2 + 4) - 8U)/2. */
7638 if (minus_lit0 && lit0)
7640 if (TREE_CODE (lit0) == INTEGER_CST
7641 && TREE_CODE (minus_lit0) == INTEGER_CST
7642 && tree_int_cst_lt (lit0, minus_lit0))
7644 minus_lit0 = associate_trees (minus_lit0, lit0,
7645 MINUS_EXPR, type);
7646 lit0 = 0;
7648 else
7650 lit0 = associate_trees (lit0, minus_lit0,
7651 MINUS_EXPR, type);
7652 minus_lit0 = 0;
7655 if (minus_lit0)
7657 if (con0 == 0)
7658 return fold_convert (type,
7659 associate_trees (var0, minus_lit0,
7660 MINUS_EXPR, type));
7661 else
7663 con0 = associate_trees (con0, minus_lit0,
7664 MINUS_EXPR, type);
7665 return fold_convert (type,
7666 associate_trees (var0, con0,
7667 PLUS_EXPR, type));
7671 con0 = associate_trees (con0, lit0, code, type);
7672 return fold_convert (type, associate_trees (var0, con0,
7673 code, type));
7677 binary:
7678 if (wins)
7679 t1 = const_binop (code, arg0, arg1, 0);
7680 if (t1 != NULL_TREE)
7682 /* The return value should always have
7683 the same type as the original expression. */
7684 if (TREE_TYPE (t1) != type)
7685 t1 = fold_convert (type, t1);
7687 return t1;
7689 return NULL_TREE;
7691 case MINUS_EXPR:
7692 /* A - (-B) -> A + B */
7693 if (TREE_CODE (arg1) == NEGATE_EXPR)
7694 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7695 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7696 if (TREE_CODE (arg0) == NEGATE_EXPR
7697 && (FLOAT_TYPE_P (type)
7698 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7699 && negate_expr_p (arg1)
7700 && reorder_operands_p (arg0, arg1))
7701 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7702 TREE_OPERAND (arg0, 0));
7703 /* Convert -A - 1 to ~A. */
7704 if (INTEGRAL_TYPE_P (type)
7705 && TREE_CODE (arg0) == NEGATE_EXPR
7706 && integer_onep (arg1))
7707 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7709 /* Convert -1 - A to ~A. */
7710 if (INTEGRAL_TYPE_P (type)
7711 && integer_all_onesp (arg0))
7712 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7714 if (TREE_CODE (type) == COMPLEX_TYPE)
7716 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7717 if (tem)
7718 return tem;
7721 if (! FLOAT_TYPE_P (type))
7723 if (! wins && integer_zerop (arg0))
7724 return negate_expr (fold_convert (type, arg1));
7725 if (integer_zerop (arg1))
7726 return non_lvalue (fold_convert (type, arg0));
7728 /* Fold A - (A & B) into ~B & A. */
7729 if (!TREE_SIDE_EFFECTS (arg0)
7730 && TREE_CODE (arg1) == BIT_AND_EXPR)
7732 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7733 return fold_build2 (BIT_AND_EXPR, type,
7734 fold_build1 (BIT_NOT_EXPR, type,
7735 TREE_OPERAND (arg1, 0)),
7736 arg0);
7737 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7738 return fold_build2 (BIT_AND_EXPR, type,
7739 fold_build1 (BIT_NOT_EXPR, type,
7740 TREE_OPERAND (arg1, 1)),
7741 arg0);
7744 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7745 any power of 2 minus 1. */
7746 if (TREE_CODE (arg0) == BIT_AND_EXPR
7747 && TREE_CODE (arg1) == BIT_AND_EXPR
7748 && operand_equal_p (TREE_OPERAND (arg0, 0),
7749 TREE_OPERAND (arg1, 0), 0))
7751 tree mask0 = TREE_OPERAND (arg0, 1);
7752 tree mask1 = TREE_OPERAND (arg1, 1);
7753 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7755 if (operand_equal_p (tem, mask1, 0))
7757 tem = fold_build2 (BIT_XOR_EXPR, type,
7758 TREE_OPERAND (arg0, 0), mask1);
7759 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7764 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7765 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7766 return non_lvalue (fold_convert (type, arg0));
7768 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7769 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7770 (-ARG1 + ARG0) reduces to -ARG1. */
7771 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7772 return negate_expr (fold_convert (type, arg1));
7774 /* Fold &x - &x. This can happen from &x.foo - &x.
7775 This is unsafe for certain floats even in non-IEEE formats.
7776 In IEEE, it is unsafe because it does wrong for NaNs.
7777 Also note that operand_equal_p is always false if an operand
7778 is volatile. */
7780 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7781 && operand_equal_p (arg0, arg1, 0))
7782 return fold_convert (type, integer_zero_node);
7784 /* A - B -> A + (-B) if B is easily negatable. */
7785 if (!wins && negate_expr_p (arg1)
7786 && ((FLOAT_TYPE_P (type)
7787 /* Avoid this transformation if B is a positive REAL_CST. */
7788 && (TREE_CODE (arg1) != REAL_CST
7789 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7790 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7791 return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1));
7793 /* Try folding difference of addresses. */
7795 HOST_WIDE_INT diff;
7797 if ((TREE_CODE (arg0) == ADDR_EXPR
7798 || TREE_CODE (arg1) == ADDR_EXPR)
7799 && ptr_difference_const (arg0, arg1, &diff))
7800 return build_int_cst_type (type, diff);
7803 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7804 of the array. Loop optimizer sometimes produce this type of
7805 expressions. */
7806 if (TREE_CODE (arg0) == ADDR_EXPR
7807 && TREE_CODE (arg1) == MULT_EXPR)
7809 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7810 if (tem)
7811 return fold_convert (type, fold (tem));
7814 if (TREE_CODE (arg0) == MULT_EXPR
7815 && TREE_CODE (arg1) == MULT_EXPR
7816 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7818 /* (A * C) - (B * C) -> (A-B) * C. */
7819 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7820 TREE_OPERAND (arg1, 1), 0))
7821 return fold_build2 (MULT_EXPR, type,
7822 fold_build2 (MINUS_EXPR, type,
7823 TREE_OPERAND (arg0, 0),
7824 TREE_OPERAND (arg1, 0)),
7825 TREE_OPERAND (arg0, 1));
7826 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7827 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7828 TREE_OPERAND (arg1, 0), 0))
7829 return fold_build2 (MULT_EXPR, type,
7830 TREE_OPERAND (arg0, 0),
7831 fold_build2 (MINUS_EXPR, type,
7832 TREE_OPERAND (arg0, 1),
7833 TREE_OPERAND (arg1, 1)));
7836 goto associate;
7838 case MULT_EXPR:
7839 /* (-A) * (-B) -> A * B */
7840 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7841 return fold_build2 (MULT_EXPR, type,
7842 TREE_OPERAND (arg0, 0),
7843 negate_expr (arg1));
7844 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7845 return fold_build2 (MULT_EXPR, type,
7846 negate_expr (arg0),
7847 TREE_OPERAND (arg1, 0));
7849 if (TREE_CODE (type) == COMPLEX_TYPE)
7851 tem = fold_complex_mult (type, arg0, arg1);
7852 if (tem)
7853 return tem;
7856 if (! FLOAT_TYPE_P (type))
7858 if (integer_zerop (arg1))
7859 return omit_one_operand (type, arg1, arg0);
7860 if (integer_onep (arg1))
7861 return non_lvalue (fold_convert (type, arg0));
7862 /* Transform x * -1 into -x. */
7863 if (integer_all_onesp (arg1))
7864 return fold_convert (type, negate_expr (arg0));
7866 /* (a * (1 << b)) is (a << b) */
7867 if (TREE_CODE (arg1) == LSHIFT_EXPR
7868 && integer_onep (TREE_OPERAND (arg1, 0)))
7869 return fold_build2 (LSHIFT_EXPR, type, arg0,
7870 TREE_OPERAND (arg1, 1));
7871 if (TREE_CODE (arg0) == LSHIFT_EXPR
7872 && integer_onep (TREE_OPERAND (arg0, 0)))
7873 return fold_build2 (LSHIFT_EXPR, type, arg1,
7874 TREE_OPERAND (arg0, 1));
7876 if (TREE_CODE (arg1) == INTEGER_CST
7877 && 0 != (tem = extract_muldiv (op0,
7878 fold_convert (type, arg1),
7879 code, NULL_TREE)))
7880 return fold_convert (type, tem);
7883 else
7885 /* Maybe fold x * 0 to 0. The expressions aren't the same
7886 when x is NaN, since x * 0 is also NaN. Nor are they the
7887 same in modes with signed zeros, since multiplying a
7888 negative value by 0 gives -0, not +0. */
7889 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7890 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7891 && real_zerop (arg1))
7892 return omit_one_operand (type, arg1, arg0);
7893 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7894 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7895 && real_onep (arg1))
7896 return non_lvalue (fold_convert (type, arg0));
7898 /* Transform x * -1.0 into -x. */
7899 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7900 && real_minus_onep (arg1))
7901 return fold_convert (type, negate_expr (arg0));
7903 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7904 if (flag_unsafe_math_optimizations
7905 && TREE_CODE (arg0) == RDIV_EXPR
7906 && TREE_CODE (arg1) == REAL_CST
7907 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7909 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7910 arg1, 0);
7911 if (tem)
7912 return fold_build2 (RDIV_EXPR, type, tem,
7913 TREE_OPERAND (arg0, 1));
7916 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7917 if (operand_equal_p (arg0, arg1, 0))
7919 tree tem = fold_strip_sign_ops (arg0);
7920 if (tem != NULL_TREE)
7922 tem = fold_convert (type, tem);
7923 return fold_build2 (MULT_EXPR, type, tem, tem);
7927 if (flag_unsafe_math_optimizations)
7929 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7930 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7932 /* Optimizations of root(...)*root(...). */
7933 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7935 tree rootfn, arg, arglist;
7936 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7937 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7939 /* Optimize sqrt(x)*sqrt(x) as x. */
7940 if (BUILTIN_SQRT_P (fcode0)
7941 && operand_equal_p (arg00, arg10, 0)
7942 && ! HONOR_SNANS (TYPE_MODE (type)))
7943 return arg00;
7945 /* Optimize root(x)*root(y) as root(x*y). */
7946 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7947 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7948 arglist = build_tree_list (NULL_TREE, arg);
7949 return build_function_call_expr (rootfn, arglist);
7952 /* Optimize expN(x)*expN(y) as expN(x+y). */
7953 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7955 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7956 tree arg = fold_build2 (PLUS_EXPR, type,
7957 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7958 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7959 tree arglist = build_tree_list (NULL_TREE, arg);
7960 return build_function_call_expr (expfn, arglist);
7963 /* Optimizations of pow(...)*pow(...). */
7964 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7965 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7966 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7968 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7969 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7970 1)));
7971 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7972 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7973 1)));
7975 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7976 if (operand_equal_p (arg01, arg11, 0))
7978 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7979 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7980 tree arglist = tree_cons (NULL_TREE, arg,
7981 build_tree_list (NULL_TREE,
7982 arg01));
7983 return build_function_call_expr (powfn, arglist);
7986 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7987 if (operand_equal_p (arg00, arg10, 0))
7989 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7990 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7991 tree arglist = tree_cons (NULL_TREE, arg00,
7992 build_tree_list (NULL_TREE,
7993 arg));
7994 return build_function_call_expr (powfn, arglist);
7998 /* Optimize tan(x)*cos(x) as sin(x). */
7999 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8000 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8001 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8002 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8003 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8004 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8005 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8006 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8008 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8010 if (sinfn != NULL_TREE)
8011 return build_function_call_expr (sinfn,
8012 TREE_OPERAND (arg0, 1));
8015 /* Optimize x*pow(x,c) as pow(x,c+1). */
8016 if (fcode1 == BUILT_IN_POW
8017 || fcode1 == BUILT_IN_POWF
8018 || fcode1 == BUILT_IN_POWL)
8020 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8021 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8022 1)));
8023 if (TREE_CODE (arg11) == REAL_CST
8024 && ! TREE_CONSTANT_OVERFLOW (arg11)
8025 && operand_equal_p (arg0, arg10, 0))
8027 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8028 REAL_VALUE_TYPE c;
8029 tree arg, arglist;
8031 c = TREE_REAL_CST (arg11);
8032 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8033 arg = build_real (type, c);
8034 arglist = build_tree_list (NULL_TREE, arg);
8035 arglist = tree_cons (NULL_TREE, arg0, arglist);
8036 return build_function_call_expr (powfn, arglist);
8040 /* Optimize pow(x,c)*x as pow(x,c+1). */
8041 if (fcode0 == BUILT_IN_POW
8042 || fcode0 == BUILT_IN_POWF
8043 || fcode0 == BUILT_IN_POWL)
8045 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8046 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8047 1)));
8048 if (TREE_CODE (arg01) == REAL_CST
8049 && ! TREE_CONSTANT_OVERFLOW (arg01)
8050 && operand_equal_p (arg1, arg00, 0))
8052 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8053 REAL_VALUE_TYPE c;
8054 tree arg, arglist;
8056 c = TREE_REAL_CST (arg01);
8057 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8058 arg = build_real (type, c);
8059 arglist = build_tree_list (NULL_TREE, arg);
8060 arglist = tree_cons (NULL_TREE, arg1, arglist);
8061 return build_function_call_expr (powfn, arglist);
8065 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8066 if (! optimize_size
8067 && operand_equal_p (arg0, arg1, 0))
8069 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8071 if (powfn)
8073 tree arg = build_real (type, dconst2);
8074 tree arglist = build_tree_list (NULL_TREE, arg);
8075 arglist = tree_cons (NULL_TREE, arg0, arglist);
8076 return build_function_call_expr (powfn, arglist);
8081 goto associate;
8083 case BIT_IOR_EXPR:
8084 bit_ior:
8085 if (integer_all_onesp (arg1))
8086 return omit_one_operand (type, arg1, arg0);
8087 if (integer_zerop (arg1))
8088 return non_lvalue (fold_convert (type, arg0));
8089 if (operand_equal_p (arg0, arg1, 0))
8090 return non_lvalue (fold_convert (type, arg0));
8092 /* ~X | X is -1. */
8093 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8096 t1 = build_int_cst (type, -1);
8097 t1 = force_fit_type (t1, 0, false, false);
8098 return omit_one_operand (type, t1, arg1);
8101 /* X | ~X is -1. */
8102 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8105 t1 = build_int_cst (type, -1);
8106 t1 = force_fit_type (t1, 0, false, false);
8107 return omit_one_operand (type, t1, arg0);
8110 t1 = distribute_bit_expr (code, type, arg0, arg1);
8111 if (t1 != NULL_TREE)
8112 return t1;
8114 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8116 This results in more efficient code for machines without a NAND
8117 instruction. Combine will canonicalize to the first form
8118 which will allow use of NAND instructions provided by the
8119 backend if they exist. */
8120 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8121 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8123 return fold_build1 (BIT_NOT_EXPR, type,
8124 build2 (BIT_AND_EXPR, type,
8125 TREE_OPERAND (arg0, 0),
8126 TREE_OPERAND (arg1, 0)));
8129 /* See if this can be simplified into a rotate first. If that
8130 is unsuccessful continue in the association code. */
8131 goto bit_rotate;
8133 case BIT_XOR_EXPR:
8134 if (integer_zerop (arg1))
8135 return non_lvalue (fold_convert (type, arg0));
8136 if (integer_all_onesp (arg1))
8137 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8138 if (operand_equal_p (arg0, arg1, 0))
8139 return omit_one_operand (type, integer_zero_node, arg0);
8141 /* ~X ^ X is -1. */
8142 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8143 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8145 t1 = build_int_cst (type, -1);
8146 t1 = force_fit_type (t1, 0, false, false);
8147 return omit_one_operand (type, t1, arg1);
8150 /* X ^ ~X is -1. */
8151 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8152 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8154 t1 = build_int_cst (type, -1);
8155 t1 = force_fit_type (t1, 0, false, false);
8156 return omit_one_operand (type, t1, arg0);
8159 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8160 with a constant, and the two constants have no bits in common,
8161 we should treat this as a BIT_IOR_EXPR since this may produce more
8162 simplifications. */
8163 if (TREE_CODE (arg0) == BIT_AND_EXPR
8164 && TREE_CODE (arg1) == BIT_AND_EXPR
8165 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8166 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8167 && integer_zerop (const_binop (BIT_AND_EXPR,
8168 TREE_OPERAND (arg0, 1),
8169 TREE_OPERAND (arg1, 1), 0)))
8171 code = BIT_IOR_EXPR;
8172 goto bit_ior;
8175 /* See if this can be simplified into a rotate first. If that
8176 is unsuccessful continue in the association code. */
8177 goto bit_rotate;
8179 case BIT_AND_EXPR:
8180 if (integer_all_onesp (arg1))
8181 return non_lvalue (fold_convert (type, arg0));
8182 if (integer_zerop (arg1))
8183 return omit_one_operand (type, arg1, arg0);
8184 if (operand_equal_p (arg0, arg1, 0))
8185 return non_lvalue (fold_convert (type, arg0));
8187 /* ~X & X is always zero. */
8188 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8189 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8190 return omit_one_operand (type, integer_zero_node, arg1);
8192 /* X & ~X is always zero. */
8193 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8194 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8195 return omit_one_operand (type, integer_zero_node, arg0);
8197 t1 = distribute_bit_expr (code, type, arg0, arg1);
8198 if (t1 != NULL_TREE)
8199 return t1;
8200 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8201 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8202 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8204 unsigned int prec
8205 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8207 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8208 && (~TREE_INT_CST_LOW (arg1)
8209 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8210 return fold_convert (type, TREE_OPERAND (arg0, 0));
8213 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8215 This results in more efficient code for machines without a NOR
8216 instruction. Combine will canonicalize to the first form
8217 which will allow use of NOR instructions provided by the
8218 backend if they exist. */
8219 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8220 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8222 return fold_build1 (BIT_NOT_EXPR, type,
8223 build2 (BIT_IOR_EXPR, type,
8224 TREE_OPERAND (arg0, 0),
8225 TREE_OPERAND (arg1, 0)));
8228 goto associate;
8230 case RDIV_EXPR:
8231 /* Don't touch a floating-point divide by zero unless the mode
8232 of the constant can represent infinity. */
8233 if (TREE_CODE (arg1) == REAL_CST
8234 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8235 && real_zerop (arg1))
8236 return NULL_TREE;
8238 /* (-A) / (-B) -> A / B */
8239 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8240 return fold_build2 (RDIV_EXPR, type,
8241 TREE_OPERAND (arg0, 0),
8242 negate_expr (arg1));
8243 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8244 return fold_build2 (RDIV_EXPR, type,
8245 negate_expr (arg0),
8246 TREE_OPERAND (arg1, 0));
8248 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8249 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8250 && real_onep (arg1))
8251 return non_lvalue (fold_convert (type, arg0));
8253 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8254 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8255 && real_minus_onep (arg1))
8256 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8258 /* If ARG1 is a constant, we can convert this to a multiply by the
8259 reciprocal. This does not have the same rounding properties,
8260 so only do this if -funsafe-math-optimizations. We can actually
8261 always safely do it if ARG1 is a power of two, but it's hard to
8262 tell if it is or not in a portable manner. */
8263 if (TREE_CODE (arg1) == REAL_CST)
8265 if (flag_unsafe_math_optimizations
8266 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8267 arg1, 0)))
8268 return fold_build2 (MULT_EXPR, type, arg0, tem);
8269 /* Find the reciprocal if optimizing and the result is exact. */
8270 if (optimize)
8272 REAL_VALUE_TYPE r;
8273 r = TREE_REAL_CST (arg1);
8274 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8276 tem = build_real (type, r);
8277 return fold_build2 (MULT_EXPR, type, arg0, tem);
8281 /* Convert A/B/C to A/(B*C). */
8282 if (flag_unsafe_math_optimizations
8283 && TREE_CODE (arg0) == RDIV_EXPR)
8284 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8285 fold_build2 (MULT_EXPR, type,
8286 TREE_OPERAND (arg0, 1), arg1));
8288 /* Convert A/(B/C) to (A/B)*C. */
8289 if (flag_unsafe_math_optimizations
8290 && TREE_CODE (arg1) == RDIV_EXPR)
8291 return fold_build2 (MULT_EXPR, type,
8292 fold_build2 (RDIV_EXPR, type, arg0,
8293 TREE_OPERAND (arg1, 0)),
8294 TREE_OPERAND (arg1, 1));
8296 /* Convert C1/(X*C2) into (C1/C2)/X. */
8297 if (flag_unsafe_math_optimizations
8298 && TREE_CODE (arg1) == MULT_EXPR
8299 && TREE_CODE (arg0) == REAL_CST
8300 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8302 tree tem = const_binop (RDIV_EXPR, arg0,
8303 TREE_OPERAND (arg1, 1), 0);
8304 if (tem)
8305 return fold_build2 (RDIV_EXPR, type, tem,
8306 TREE_OPERAND (arg1, 0));
8309 if (TREE_CODE (type) == COMPLEX_TYPE)
8311 tem = fold_complex_div (type, arg0, arg1, code);
8312 if (tem)
8313 return tem;
8316 if (flag_unsafe_math_optimizations)
8318 enum built_in_function fcode = builtin_mathfn_code (arg1);
8319 /* Optimize x/expN(y) into x*expN(-y). */
8320 if (BUILTIN_EXPONENT_P (fcode))
8322 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8323 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8324 tree arglist = build_tree_list (NULL_TREE,
8325 fold_convert (type, arg));
8326 arg1 = build_function_call_expr (expfn, arglist);
8327 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8330 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8331 if (fcode == BUILT_IN_POW
8332 || fcode == BUILT_IN_POWF
8333 || fcode == BUILT_IN_POWL)
8335 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8336 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8337 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8338 tree neg11 = fold_convert (type, negate_expr (arg11));
8339 tree arglist = tree_cons(NULL_TREE, arg10,
8340 build_tree_list (NULL_TREE, neg11));
8341 arg1 = build_function_call_expr (powfn, arglist);
8342 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8346 if (flag_unsafe_math_optimizations)
8348 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8349 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8351 /* Optimize sin(x)/cos(x) as tan(x). */
8352 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8353 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8354 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8355 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8356 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8358 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8360 if (tanfn != NULL_TREE)
8361 return build_function_call_expr (tanfn,
8362 TREE_OPERAND (arg0, 1));
8365 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8366 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8367 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8368 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8369 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8370 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8372 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8374 if (tanfn != NULL_TREE)
8376 tree tmp = TREE_OPERAND (arg0, 1);
8377 tmp = build_function_call_expr (tanfn, tmp);
8378 return fold_build2 (RDIV_EXPR, type,
8379 build_real (type, dconst1), tmp);
8383 /* Optimize pow(x,c)/x as pow(x,c-1). */
8384 if (fcode0 == BUILT_IN_POW
8385 || fcode0 == BUILT_IN_POWF
8386 || fcode0 == BUILT_IN_POWL)
8388 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8389 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8390 if (TREE_CODE (arg01) == REAL_CST
8391 && ! TREE_CONSTANT_OVERFLOW (arg01)
8392 && operand_equal_p (arg1, arg00, 0))
8394 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8395 REAL_VALUE_TYPE c;
8396 tree arg, arglist;
8398 c = TREE_REAL_CST (arg01);
8399 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8400 arg = build_real (type, c);
8401 arglist = build_tree_list (NULL_TREE, arg);
8402 arglist = tree_cons (NULL_TREE, arg1, arglist);
8403 return build_function_call_expr (powfn, arglist);
8407 goto binary;
8409 case TRUNC_DIV_EXPR:
8410 case ROUND_DIV_EXPR:
8411 case FLOOR_DIV_EXPR:
8412 case CEIL_DIV_EXPR:
8413 case EXACT_DIV_EXPR:
8414 if (integer_onep (arg1))
8415 return non_lvalue (fold_convert (type, arg0));
8416 if (integer_zerop (arg1))
8417 return NULL_TREE;
8418 /* X / -1 is -X. */
8419 if (!TYPE_UNSIGNED (type)
8420 && TREE_CODE (arg1) == INTEGER_CST
8421 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8422 && TREE_INT_CST_HIGH (arg1) == -1)
8423 return fold_convert (type, negate_expr (arg0));
8425 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8426 operation, EXACT_DIV_EXPR.
8428 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8429 At one time others generated faster code, it's not clear if they do
8430 after the last round to changes to the DIV code in expmed.c. */
8431 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8432 && multiple_of_p (type, arg0, arg1))
8433 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8435 if (TREE_CODE (arg1) == INTEGER_CST
8436 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8437 return fold_convert (type, tem);
8439 if (TREE_CODE (type) == COMPLEX_TYPE)
8441 tem = fold_complex_div (type, arg0, arg1, code);
8442 if (tem)
8443 return tem;
8445 goto binary;
8447 case CEIL_MOD_EXPR:
8448 case FLOOR_MOD_EXPR:
8449 case ROUND_MOD_EXPR:
8450 case TRUNC_MOD_EXPR:
8451 /* X % 1 is always zero, but be sure to preserve any side
8452 effects in X. */
8453 if (integer_onep (arg1))
8454 return omit_one_operand (type, integer_zero_node, arg0);
8456 /* X % 0, return X % 0 unchanged so that we can get the
8457 proper warnings and errors. */
8458 if (integer_zerop (arg1))
8459 return NULL_TREE;
8461 /* 0 % X is always zero, but be sure to preserve any side
8462 effects in X. Place this after checking for X == 0. */
8463 if (integer_zerop (arg0))
8464 return omit_one_operand (type, integer_zero_node, arg1);
8466 /* X % -1 is zero. */
8467 if (!TYPE_UNSIGNED (type)
8468 && TREE_CODE (arg1) == INTEGER_CST
8469 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8470 && TREE_INT_CST_HIGH (arg1) == -1)
8471 return omit_one_operand (type, integer_zero_node, arg0);
8473 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8474 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8475 if (code == TRUNC_MOD_EXPR
8476 && TYPE_UNSIGNED (type)
8477 && integer_pow2p (arg1))
8479 unsigned HOST_WIDE_INT high, low;
8480 tree mask;
8481 int l;
8483 l = tree_log2 (arg1);
8484 if (l >= HOST_BITS_PER_WIDE_INT)
8486 high = ((unsigned HOST_WIDE_INT) 1
8487 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8488 low = -1;
8490 else
8492 high = 0;
8493 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8496 mask = build_int_cst_wide (type, low, high);
8497 return fold_build2 (BIT_AND_EXPR, type,
8498 fold_convert (type, arg0), mask);
8501 /* X % -C is the same as X % C. */
8502 if (code == TRUNC_MOD_EXPR
8503 && !TYPE_UNSIGNED (type)
8504 && TREE_CODE (arg1) == INTEGER_CST
8505 && TREE_INT_CST_HIGH (arg1) < 0
8506 && !flag_trapv
8507 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8508 && !sign_bit_p (arg1, arg1))
8509 return fold_build2 (code, type, fold_convert (type, arg0),
8510 fold_convert (type, negate_expr (arg1)));
8512 /* X % -Y is the same as X % Y. */
8513 if (code == TRUNC_MOD_EXPR
8514 && !TYPE_UNSIGNED (type)
8515 && TREE_CODE (arg1) == NEGATE_EXPR
8516 && !flag_trapv)
8517 return fold_build2 (code, type, fold_convert (type, arg0),
8518 fold_convert (type, TREE_OPERAND (arg1, 0)));
8520 if (TREE_CODE (arg1) == INTEGER_CST
8521 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8522 return fold_convert (type, tem);
8524 goto binary;
8526 case LROTATE_EXPR:
8527 case RROTATE_EXPR:
8528 if (integer_all_onesp (arg0))
8529 return omit_one_operand (type, arg0, arg1);
8530 goto shift;
8532 case RSHIFT_EXPR:
8533 /* Optimize -1 >> x for arithmetic right shifts. */
8534 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8535 return omit_one_operand (type, arg0, arg1);
8536 /* ... fall through ... */
8538 case LSHIFT_EXPR:
8539 shift:
8540 if (integer_zerop (arg1))
8541 return non_lvalue (fold_convert (type, arg0));
8542 if (integer_zerop (arg0))
8543 return omit_one_operand (type, arg0, arg1);
8545 /* Since negative shift count is not well-defined,
8546 don't try to compute it in the compiler. */
8547 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8548 return NULL_TREE;
8549 /* Rewrite an LROTATE_EXPR by a constant into an
8550 RROTATE_EXPR by a new constant. */
8551 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8553 tree tem = build_int_cst (NULL_TREE,
8554 GET_MODE_BITSIZE (TYPE_MODE (type)));
8555 tem = fold_convert (TREE_TYPE (arg1), tem);
8556 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8557 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8560 /* If we have a rotate of a bit operation with the rotate count and
8561 the second operand of the bit operation both constant,
8562 permute the two operations. */
8563 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8564 && (TREE_CODE (arg0) == BIT_AND_EXPR
8565 || TREE_CODE (arg0) == BIT_IOR_EXPR
8566 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8567 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8568 return fold_build2 (TREE_CODE (arg0), type,
8569 fold_build2 (code, type,
8570 TREE_OPERAND (arg0, 0), arg1),
8571 fold_build2 (code, type,
8572 TREE_OPERAND (arg0, 1), arg1));
8574 /* Two consecutive rotates adding up to the width of the mode can
8575 be ignored. */
8576 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8577 && TREE_CODE (arg0) == RROTATE_EXPR
8578 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8579 && TREE_INT_CST_HIGH (arg1) == 0
8580 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8581 && ((TREE_INT_CST_LOW (arg1)
8582 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8583 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8584 return TREE_OPERAND (arg0, 0);
8586 goto binary;
8588 case MIN_EXPR:
8589 if (operand_equal_p (arg0, arg1, 0))
8590 return omit_one_operand (type, arg0, arg1);
8591 if (INTEGRAL_TYPE_P (type)
8592 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8593 return omit_one_operand (type, arg1, arg0);
8594 goto associate;
8596 case MAX_EXPR:
8597 if (operand_equal_p (arg0, arg1, 0))
8598 return omit_one_operand (type, arg0, arg1);
8599 if (INTEGRAL_TYPE_P (type)
8600 && TYPE_MAX_VALUE (type)
8601 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8602 return omit_one_operand (type, arg1, arg0);
8603 goto associate;
8605 case TRUTH_ANDIF_EXPR:
8606 /* Note that the operands of this must be ints
8607 and their values must be 0 or 1.
8608 ("true" is a fixed value perhaps depending on the language.) */
8609 /* If first arg is constant zero, return it. */
8610 if (integer_zerop (arg0))
8611 return fold_convert (type, arg0);
8612 case TRUTH_AND_EXPR:
8613 /* If either arg is constant true, drop it. */
8614 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8615 return non_lvalue (fold_convert (type, arg1));
8616 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8617 /* Preserve sequence points. */
8618 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8619 return non_lvalue (fold_convert (type, arg0));
8620 /* If second arg is constant zero, result is zero, but first arg
8621 must be evaluated. */
8622 if (integer_zerop (arg1))
8623 return omit_one_operand (type, arg1, arg0);
8624 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8625 case will be handled here. */
8626 if (integer_zerop (arg0))
8627 return omit_one_operand (type, arg0, arg1);
8629 /* !X && X is always false. */
8630 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8631 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8632 return omit_one_operand (type, integer_zero_node, arg1);
8633 /* X && !X is always false. */
8634 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8635 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8636 return omit_one_operand (type, integer_zero_node, arg0);
8638 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8639 means A >= Y && A != MAX, but in this case we know that
8640 A < X <= MAX. */
8642 if (!TREE_SIDE_EFFECTS (arg0)
8643 && !TREE_SIDE_EFFECTS (arg1))
8645 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8646 if (tem)
8647 return fold_build2 (code, type, tem, arg1);
8649 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8650 if (tem)
8651 return fold_build2 (code, type, arg0, tem);
8654 truth_andor:
8655 /* We only do these simplifications if we are optimizing. */
8656 if (!optimize)
8657 return NULL_TREE;
8659 /* Check for things like (A || B) && (A || C). We can convert this
8660 to A || (B && C). Note that either operator can be any of the four
8661 truth and/or operations and the transformation will still be
8662 valid. Also note that we only care about order for the
8663 ANDIF and ORIF operators. If B contains side effects, this
8664 might change the truth-value of A. */
8665 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8666 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8667 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8668 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8669 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8670 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8672 tree a00 = TREE_OPERAND (arg0, 0);
8673 tree a01 = TREE_OPERAND (arg0, 1);
8674 tree a10 = TREE_OPERAND (arg1, 0);
8675 tree a11 = TREE_OPERAND (arg1, 1);
8676 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8677 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8678 && (code == TRUTH_AND_EXPR
8679 || code == TRUTH_OR_EXPR));
8681 if (operand_equal_p (a00, a10, 0))
8682 return fold_build2 (TREE_CODE (arg0), type, a00,
8683 fold_build2 (code, type, a01, a11));
8684 else if (commutative && operand_equal_p (a00, a11, 0))
8685 return fold_build2 (TREE_CODE (arg0), type, a00,
8686 fold_build2 (code, type, a01, a10));
8687 else if (commutative && operand_equal_p (a01, a10, 0))
8688 return fold_build2 (TREE_CODE (arg0), type, a01,
8689 fold_build2 (code, type, a00, a11));
8691 /* This case if tricky because we must either have commutative
8692 operators or else A10 must not have side-effects. */
8694 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8695 && operand_equal_p (a01, a11, 0))
8696 return fold_build2 (TREE_CODE (arg0), type,
8697 fold_build2 (code, type, a00, a10),
8698 a01);
8701 /* See if we can build a range comparison. */
8702 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8703 return tem;
8705 /* Check for the possibility of merging component references. If our
8706 lhs is another similar operation, try to merge its rhs with our
8707 rhs. Then try to merge our lhs and rhs. */
8708 if (TREE_CODE (arg0) == code
8709 && 0 != (tem = fold_truthop (code, type,
8710 TREE_OPERAND (arg0, 1), arg1)))
8711 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8713 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8714 return tem;
8716 return NULL_TREE;
8718 case TRUTH_ORIF_EXPR:
8719 /* Note that the operands of this must be ints
8720 and their values must be 0 or true.
8721 ("true" is a fixed value perhaps depending on the language.) */
8722 /* If first arg is constant true, return it. */
8723 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8724 return fold_convert (type, arg0);
8725 case TRUTH_OR_EXPR:
8726 /* If either arg is constant zero, drop it. */
8727 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8728 return non_lvalue (fold_convert (type, arg1));
8729 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8730 /* Preserve sequence points. */
8731 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8732 return non_lvalue (fold_convert (type, arg0));
8733 /* If second arg is constant true, result is true, but we must
8734 evaluate first arg. */
8735 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8736 return omit_one_operand (type, arg1, arg0);
8737 /* Likewise for first arg, but note this only occurs here for
8738 TRUTH_OR_EXPR. */
8739 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8740 return omit_one_operand (type, arg0, arg1);
8742 /* !X || X is always true. */
8743 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8744 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8745 return omit_one_operand (type, integer_one_node, arg1);
8746 /* X || !X is always true. */
8747 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8748 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8749 return omit_one_operand (type, integer_one_node, arg0);
8751 goto truth_andor;
8753 case TRUTH_XOR_EXPR:
8754 /* If the second arg is constant zero, drop it. */
8755 if (integer_zerop (arg1))
8756 return non_lvalue (fold_convert (type, arg0));
8757 /* If the second arg is constant true, this is a logical inversion. */
8758 if (integer_onep (arg1))
8760 /* Only call invert_truthvalue if operand is a truth value. */
8761 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8762 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8763 else
8764 tem = invert_truthvalue (arg0);
8765 return non_lvalue (fold_convert (type, tem));
8767 /* Identical arguments cancel to zero. */
8768 if (operand_equal_p (arg0, arg1, 0))
8769 return omit_one_operand (type, integer_zero_node, arg0);
8771 /* !X ^ X is always true. */
8772 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8773 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8774 return omit_one_operand (type, integer_one_node, arg1);
8776 /* X ^ !X is always true. */
8777 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8778 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8779 return omit_one_operand (type, integer_one_node, arg0);
8781 return NULL_TREE;
8783 case EQ_EXPR:
8784 case NE_EXPR:
8785 case LT_EXPR:
8786 case GT_EXPR:
8787 case LE_EXPR:
8788 case GE_EXPR:
8789 /* If one arg is a real or integer constant, put it last. */
8790 if (tree_swap_operands_p (arg0, arg1, true))
8791 return fold_build2 (swap_tree_comparison (code), type, arg1, arg0);
8793 /* If this is an equality comparison of the address of a non-weak
8794 object against zero, then we know the result. */
8795 if ((code == EQ_EXPR || code == NE_EXPR)
8796 && TREE_CODE (arg0) == ADDR_EXPR
8797 && DECL_P (TREE_OPERAND (arg0, 0))
8798 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8799 && integer_zerop (arg1))
8800 return constant_boolean_node (code != EQ_EXPR, type);
8802 /* If this is an equality comparison of the address of two non-weak,
8803 unaliased symbols neither of which are extern (since we do not
8804 have access to attributes for externs), then we know the result. */
8805 if ((code == EQ_EXPR || code == NE_EXPR)
8806 && TREE_CODE (arg0) == ADDR_EXPR
8807 && DECL_P (TREE_OPERAND (arg0, 0))
8808 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8809 && ! lookup_attribute ("alias",
8810 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8811 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8812 && TREE_CODE (arg1) == ADDR_EXPR
8813 && DECL_P (TREE_OPERAND (arg1, 0))
8814 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8815 && ! lookup_attribute ("alias",
8816 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8817 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8818 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8819 ? code == EQ_EXPR : code != EQ_EXPR,
8820 type);
8822 /* If this is a comparison of two exprs that look like an
8823 ARRAY_REF of the same object, then we can fold this to a
8824 comparison of the two offsets. */
8825 if (TREE_CODE_CLASS (code) == tcc_comparison)
8827 tree base0, offset0, base1, offset1;
8829 if (extract_array_ref (arg0, &base0, &offset0)
8830 && extract_array_ref (arg1, &base1, &offset1)
8831 && operand_equal_p (base0, base1, 0))
8833 if (offset0 == NULL_TREE
8834 && offset1 == NULL_TREE)
8836 offset0 = integer_zero_node;
8837 offset1 = integer_zero_node;
8839 else if (offset0 == NULL_TREE)
8840 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8841 else if (offset1 == NULL_TREE)
8842 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8844 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8845 return fold_build2 (code, type, offset0, offset1);
8849 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8851 tree targ0 = strip_float_extensions (arg0);
8852 tree targ1 = strip_float_extensions (arg1);
8853 tree newtype = TREE_TYPE (targ0);
8855 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8856 newtype = TREE_TYPE (targ1);
8858 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8859 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8860 return fold_build2 (code, type, fold_convert (newtype, targ0),
8861 fold_convert (newtype, targ1));
8863 /* (-a) CMP (-b) -> b CMP a */
8864 if (TREE_CODE (arg0) == NEGATE_EXPR
8865 && TREE_CODE (arg1) == NEGATE_EXPR)
8866 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8867 TREE_OPERAND (arg0, 0));
8869 if (TREE_CODE (arg1) == REAL_CST)
8871 REAL_VALUE_TYPE cst;
8872 cst = TREE_REAL_CST (arg1);
8874 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8875 if (TREE_CODE (arg0) == NEGATE_EXPR)
8876 return
8877 fold_build2 (swap_tree_comparison (code), type,
8878 TREE_OPERAND (arg0, 0),
8879 build_real (TREE_TYPE (arg1),
8880 REAL_VALUE_NEGATE (cst)));
8882 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8883 /* a CMP (-0) -> a CMP 0 */
8884 if (REAL_VALUE_MINUS_ZERO (cst))
8885 return fold_build2 (code, type, arg0,
8886 build_real (TREE_TYPE (arg1), dconst0));
8888 /* x != NaN is always true, other ops are always false. */
8889 if (REAL_VALUE_ISNAN (cst)
8890 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8892 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8893 return omit_one_operand (type, tem, arg0);
8896 /* Fold comparisons against infinity. */
8897 if (REAL_VALUE_ISINF (cst))
8899 tem = fold_inf_compare (code, type, arg0, arg1);
8900 if (tem != NULL_TREE)
8901 return tem;
8905 /* If this is a comparison of a real constant with a PLUS_EXPR
8906 or a MINUS_EXPR of a real constant, we can convert it into a
8907 comparison with a revised real constant as long as no overflow
8908 occurs when unsafe_math_optimizations are enabled. */
8909 if (flag_unsafe_math_optimizations
8910 && TREE_CODE (arg1) == REAL_CST
8911 && (TREE_CODE (arg0) == PLUS_EXPR
8912 || TREE_CODE (arg0) == MINUS_EXPR)
8913 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8914 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8915 ? MINUS_EXPR : PLUS_EXPR,
8916 arg1, TREE_OPERAND (arg0, 1), 0))
8917 && ! TREE_CONSTANT_OVERFLOW (tem))
8918 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8920 /* Likewise, we can simplify a comparison of a real constant with
8921 a MINUS_EXPR whose first operand is also a real constant, i.e.
8922 (c1 - x) < c2 becomes x > c1-c2. */
8923 if (flag_unsafe_math_optimizations
8924 && TREE_CODE (arg1) == REAL_CST
8925 && TREE_CODE (arg0) == MINUS_EXPR
8926 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8927 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8928 arg1, 0))
8929 && ! TREE_CONSTANT_OVERFLOW (tem))
8930 return fold_build2 (swap_tree_comparison (code), type,
8931 TREE_OPERAND (arg0, 1), tem);
8933 /* Fold comparisons against built-in math functions. */
8934 if (TREE_CODE (arg1) == REAL_CST
8935 && flag_unsafe_math_optimizations
8936 && ! flag_errno_math)
8938 enum built_in_function fcode = builtin_mathfn_code (arg0);
8940 if (fcode != END_BUILTINS)
8942 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8943 if (tem != NULL_TREE)
8944 return tem;
8949 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8950 if (TREE_CONSTANT (arg1)
8951 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8952 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8953 /* This optimization is invalid for ordered comparisons
8954 if CONST+INCR overflows or if foo+incr might overflow.
8955 This optimization is invalid for floating point due to rounding.
8956 For pointer types we assume overflow doesn't happen. */
8957 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8958 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8959 && (code == EQ_EXPR || code == NE_EXPR))))
8961 tree varop, newconst;
8963 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8965 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8966 arg1, TREE_OPERAND (arg0, 1));
8967 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8968 TREE_OPERAND (arg0, 0),
8969 TREE_OPERAND (arg0, 1));
8971 else
8973 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8974 arg1, TREE_OPERAND (arg0, 1));
8975 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8976 TREE_OPERAND (arg0, 0),
8977 TREE_OPERAND (arg0, 1));
8981 /* If VAROP is a reference to a bitfield, we must mask
8982 the constant by the width of the field. */
8983 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8984 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8985 && host_integerp (DECL_SIZE (TREE_OPERAND
8986 (TREE_OPERAND (varop, 0), 1)), 1))
8988 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8989 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8990 tree folded_compare, shift;
8992 /* First check whether the comparison would come out
8993 always the same. If we don't do that we would
8994 change the meaning with the masking. */
8995 folded_compare = fold_build2 (code, type,
8996 TREE_OPERAND (varop, 0), arg1);
8997 if (integer_zerop (folded_compare)
8998 || integer_onep (folded_compare))
8999 return omit_one_operand (type, folded_compare, varop);
9001 shift = build_int_cst (NULL_TREE,
9002 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9003 shift = fold_convert (TREE_TYPE (varop), shift);
9004 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9005 newconst, shift);
9006 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9007 newconst, shift);
9010 return fold_build2 (code, type, varop, newconst);
9013 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9014 This transformation affects the cases which are handled in later
9015 optimizations involving comparisons with non-negative constants. */
9016 if (TREE_CODE (arg1) == INTEGER_CST
9017 && TREE_CODE (arg0) != INTEGER_CST
9018 && tree_int_cst_sgn (arg1) > 0)
9020 switch (code)
9022 case GE_EXPR:
9023 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9024 return fold_build2 (GT_EXPR, type, arg0, arg1);
9026 case LT_EXPR:
9027 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9028 return fold_build2 (LE_EXPR, type, arg0, arg1);
9030 default:
9031 break;
9035 /* Comparisons with the highest or lowest possible integer of
9036 the specified size will have known values.
9038 This is quite similar to fold_relational_hi_lo, however,
9039 attempts to share the code have been nothing but trouble. */
9041 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9043 if (TREE_CODE (arg1) == INTEGER_CST
9044 && ! TREE_CONSTANT_OVERFLOW (arg1)
9045 && width <= 2 * HOST_BITS_PER_WIDE_INT
9046 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9047 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9049 HOST_WIDE_INT signed_max_hi;
9050 unsigned HOST_WIDE_INT signed_max_lo;
9051 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9053 if (width <= HOST_BITS_PER_WIDE_INT)
9055 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9056 - 1;
9057 signed_max_hi = 0;
9058 max_hi = 0;
9060 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9062 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9063 min_lo = 0;
9064 min_hi = 0;
9066 else
9068 max_lo = signed_max_lo;
9069 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9070 min_hi = -1;
9073 else
9075 width -= HOST_BITS_PER_WIDE_INT;
9076 signed_max_lo = -1;
9077 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9078 - 1;
9079 max_lo = -1;
9080 min_lo = 0;
9082 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9084 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9085 min_hi = 0;
9087 else
9089 max_hi = signed_max_hi;
9090 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9094 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9095 && TREE_INT_CST_LOW (arg1) == max_lo)
9096 switch (code)
9098 case GT_EXPR:
9099 return omit_one_operand (type, integer_zero_node, arg0);
9101 case GE_EXPR:
9102 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9104 case LE_EXPR:
9105 return omit_one_operand (type, integer_one_node, arg0);
9107 case LT_EXPR:
9108 return fold_build2 (NE_EXPR, type, arg0, arg1);
9110 /* The GE_EXPR and LT_EXPR cases above are not normally
9111 reached because of previous transformations. */
9113 default:
9114 break;
9116 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9117 == max_hi
9118 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9119 switch (code)
9121 case GT_EXPR:
9122 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9123 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9124 case LE_EXPR:
9125 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9126 return fold_build2 (NE_EXPR, type, arg0, arg1);
9127 default:
9128 break;
9130 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9131 == min_hi
9132 && TREE_INT_CST_LOW (arg1) == min_lo)
9133 switch (code)
9135 case LT_EXPR:
9136 return omit_one_operand (type, integer_zero_node, arg0);
9138 case LE_EXPR:
9139 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9141 case GE_EXPR:
9142 return omit_one_operand (type, integer_one_node, arg0);
9144 case GT_EXPR:
9145 return fold_build2 (NE_EXPR, type, arg0, arg1);
9147 default:
9148 break;
9150 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9151 == min_hi
9152 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9153 switch (code)
9155 case GE_EXPR:
9156 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9157 return fold_build2 (NE_EXPR, type, arg0, arg1);
9158 case LT_EXPR:
9159 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9160 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9161 default:
9162 break;
9165 else if (!in_gimple_form
9166 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9167 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9168 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9169 /* signed_type does not work on pointer types. */
9170 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9172 /* The following case also applies to X < signed_max+1
9173 and X >= signed_max+1 because previous transformations. */
9174 if (code == LE_EXPR || code == GT_EXPR)
9176 tree st0, st1;
9177 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9178 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9179 return fold
9180 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9181 type, fold_convert (st0, arg0),
9182 fold_convert (st1, integer_zero_node)));
9188 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9189 a MINUS_EXPR of a constant, we can convert it into a comparison with
9190 a revised constant as long as no overflow occurs. */
9191 if ((code == EQ_EXPR || code == NE_EXPR)
9192 && TREE_CODE (arg1) == INTEGER_CST
9193 && (TREE_CODE (arg0) == PLUS_EXPR
9194 || TREE_CODE (arg0) == MINUS_EXPR)
9195 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9196 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9197 ? MINUS_EXPR : PLUS_EXPR,
9198 arg1, TREE_OPERAND (arg0, 1), 0))
9199 && ! TREE_CONSTANT_OVERFLOW (tem))
9200 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9202 /* Similarly for a NEGATE_EXPR. */
9203 else if ((code == EQ_EXPR || code == NE_EXPR)
9204 && TREE_CODE (arg0) == NEGATE_EXPR
9205 && TREE_CODE (arg1) == INTEGER_CST
9206 && 0 != (tem = negate_expr (arg1))
9207 && TREE_CODE (tem) == INTEGER_CST
9208 && ! TREE_CONSTANT_OVERFLOW (tem))
9209 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9211 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9212 for !=. Don't do this for ordered comparisons due to overflow. */
9213 else if ((code == NE_EXPR || code == EQ_EXPR)
9214 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9215 return fold_build2 (code, type,
9216 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9218 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9219 && (TREE_CODE (arg0) == NOP_EXPR
9220 || TREE_CODE (arg0) == CONVERT_EXPR))
9222 /* If we are widening one operand of an integer comparison,
9223 see if the other operand is similarly being widened. Perhaps we
9224 can do the comparison in the narrower type. */
9225 tem = fold_widened_comparison (code, type, arg0, arg1);
9226 if (tem)
9227 return tem;
9229 /* Or if we are changing signedness. */
9230 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9231 if (tem)
9232 return tem;
9235 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9236 constant, we can simplify it. */
9237 else if (TREE_CODE (arg1) == INTEGER_CST
9238 && (TREE_CODE (arg0) == MIN_EXPR
9239 || TREE_CODE (arg0) == MAX_EXPR)
9240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9242 tem = optimize_minmax_comparison (code, type, op0, op1);
9243 if (tem)
9244 return tem;
9246 return NULL_TREE;
9249 /* If we are comparing an ABS_EXPR with a constant, we can
9250 convert all the cases into explicit comparisons, but they may
9251 well not be faster than doing the ABS and one comparison.
9252 But ABS (X) <= C is a range comparison, which becomes a subtraction
9253 and a comparison, and is probably faster. */
9254 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9255 && TREE_CODE (arg0) == ABS_EXPR
9256 && ! TREE_SIDE_EFFECTS (arg0)
9257 && (0 != (tem = negate_expr (arg1)))
9258 && TREE_CODE (tem) == INTEGER_CST
9259 && ! TREE_CONSTANT_OVERFLOW (tem))
9260 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9261 build2 (GE_EXPR, type,
9262 TREE_OPERAND (arg0, 0), tem),
9263 build2 (LE_EXPR, type,
9264 TREE_OPERAND (arg0, 0), arg1));
9266 /* Convert ABS_EXPR<x> >= 0 to true. */
9267 else if (code == GE_EXPR
9268 && tree_expr_nonnegative_p (arg0)
9269 && (integer_zerop (arg1)
9270 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9271 && real_zerop (arg1))))
9272 return omit_one_operand (type, integer_one_node, arg0);
9274 /* Convert ABS_EXPR<x> < 0 to false. */
9275 else if (code == LT_EXPR
9276 && tree_expr_nonnegative_p (arg0)
9277 && (integer_zerop (arg1) || real_zerop (arg1)))
9278 return omit_one_operand (type, integer_zero_node, arg0);
9280 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9281 else if ((code == EQ_EXPR || code == NE_EXPR)
9282 && TREE_CODE (arg0) == ABS_EXPR
9283 && (integer_zerop (arg1) || real_zerop (arg1)))
9284 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9286 /* If this is an EQ or NE comparison with zero and ARG0 is
9287 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9288 two operations, but the latter can be done in one less insn
9289 on machines that have only two-operand insns or on which a
9290 constant cannot be the first operand. */
9291 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9292 && TREE_CODE (arg0) == BIT_AND_EXPR)
9294 tree arg00 = TREE_OPERAND (arg0, 0);
9295 tree arg01 = TREE_OPERAND (arg0, 1);
9296 if (TREE_CODE (arg00) == LSHIFT_EXPR
9297 && integer_onep (TREE_OPERAND (arg00, 0)))
9298 return
9299 fold_build2 (code, type,
9300 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9301 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9302 arg01, TREE_OPERAND (arg00, 1)),
9303 fold_convert (TREE_TYPE (arg0),
9304 integer_one_node)),
9305 arg1);
9306 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9307 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9308 return
9309 fold_build2 (code, type,
9310 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9311 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9312 arg00, TREE_OPERAND (arg01, 1)),
9313 fold_convert (TREE_TYPE (arg0),
9314 integer_one_node)),
9315 arg1);
9318 /* If this is an NE or EQ comparison of zero against the result of a
9319 signed MOD operation whose second operand is a power of 2, make
9320 the MOD operation unsigned since it is simpler and equivalent. */
9321 if ((code == NE_EXPR || code == EQ_EXPR)
9322 && integer_zerop (arg1)
9323 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9324 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9325 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9326 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9327 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9328 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9330 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9331 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9332 fold_convert (newtype,
9333 TREE_OPERAND (arg0, 0)),
9334 fold_convert (newtype,
9335 TREE_OPERAND (arg0, 1)));
9337 return fold_build2 (code, type, newmod,
9338 fold_convert (newtype, arg1));
9341 /* If this is an NE comparison of zero with an AND of one, remove the
9342 comparison since the AND will give the correct value. */
9343 if (code == NE_EXPR && integer_zerop (arg1)
9344 && TREE_CODE (arg0) == BIT_AND_EXPR
9345 && integer_onep (TREE_OPERAND (arg0, 1)))
9346 return fold_convert (type, arg0);
9348 /* If we have (A & C) == C where C is a power of 2, convert this into
9349 (A & C) != 0. Similarly for NE_EXPR. */
9350 if ((code == EQ_EXPR || code == NE_EXPR)
9351 && TREE_CODE (arg0) == BIT_AND_EXPR
9352 && integer_pow2p (TREE_OPERAND (arg0, 1))
9353 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9354 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9355 arg0, fold_convert (TREE_TYPE (arg0),
9356 integer_zero_node));
9358 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9359 2, then fold the expression into shifts and logical operations. */
9360 tem = fold_single_bit_test (code, arg0, arg1, type);
9361 if (tem)
9362 return tem;
9364 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9365 Similarly for NE_EXPR. */
9366 if ((code == EQ_EXPR || code == NE_EXPR)
9367 && TREE_CODE (arg0) == BIT_AND_EXPR
9368 && TREE_CODE (arg1) == INTEGER_CST
9369 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9371 tree notc = fold_build1 (BIT_NOT_EXPR,
9372 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9373 TREE_OPERAND (arg0, 1));
9374 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9375 arg1, notc);
9376 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9377 if (integer_nonzerop (dandnotc))
9378 return omit_one_operand (type, rslt, arg0);
9381 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9382 Similarly for NE_EXPR. */
9383 if ((code == EQ_EXPR || code == NE_EXPR)
9384 && TREE_CODE (arg0) == BIT_IOR_EXPR
9385 && TREE_CODE (arg1) == INTEGER_CST
9386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9388 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9389 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9390 TREE_OPERAND (arg0, 1), notd);
9391 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9392 if (integer_nonzerop (candnotd))
9393 return omit_one_operand (type, rslt, arg0);
9396 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9397 and similarly for >= into !=. */
9398 if ((code == LT_EXPR || code == GE_EXPR)
9399 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9400 && TREE_CODE (arg1) == LSHIFT_EXPR
9401 && integer_onep (TREE_OPERAND (arg1, 0)))
9402 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9403 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9404 TREE_OPERAND (arg1, 1)),
9405 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9407 else if ((code == LT_EXPR || code == GE_EXPR)
9408 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9409 && (TREE_CODE (arg1) == NOP_EXPR
9410 || TREE_CODE (arg1) == CONVERT_EXPR)
9411 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9412 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9413 return
9414 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9415 fold_convert (TREE_TYPE (arg0),
9416 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9417 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9418 1))),
9419 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9421 /* Simplify comparison of something with itself. (For IEEE
9422 floating-point, we can only do some of these simplifications.) */
9423 if (operand_equal_p (arg0, arg1, 0))
9425 switch (code)
9427 case EQ_EXPR:
9428 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9429 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9430 return constant_boolean_node (1, type);
9431 break;
9433 case GE_EXPR:
9434 case LE_EXPR:
9435 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9436 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9437 return constant_boolean_node (1, type);
9438 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9440 case NE_EXPR:
9441 /* For NE, we can only do this simplification if integer
9442 or we don't honor IEEE floating point NaNs. */
9443 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9444 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9445 break;
9446 /* ... fall through ... */
9447 case GT_EXPR:
9448 case LT_EXPR:
9449 return constant_boolean_node (0, type);
9450 default:
9451 gcc_unreachable ();
9455 /* If we are comparing an expression that just has comparisons
9456 of two integer values, arithmetic expressions of those comparisons,
9457 and constants, we can simplify it. There are only three cases
9458 to check: the two values can either be equal, the first can be
9459 greater, or the second can be greater. Fold the expression for
9460 those three values. Since each value must be 0 or 1, we have
9461 eight possibilities, each of which corresponds to the constant 0
9462 or 1 or one of the six possible comparisons.
9464 This handles common cases like (a > b) == 0 but also handles
9465 expressions like ((x > y) - (y > x)) > 0, which supposedly
9466 occur in macroized code. */
9468 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9470 tree cval1 = 0, cval2 = 0;
9471 int save_p = 0;
9473 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9474 /* Don't handle degenerate cases here; they should already
9475 have been handled anyway. */
9476 && cval1 != 0 && cval2 != 0
9477 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9478 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9479 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9480 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9481 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9482 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9483 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9485 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9486 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9488 /* We can't just pass T to eval_subst in case cval1 or cval2
9489 was the same as ARG1. */
9491 tree high_result
9492 = fold_build2 (code, type,
9493 eval_subst (arg0, cval1, maxval,
9494 cval2, minval),
9495 arg1);
9496 tree equal_result
9497 = fold_build2 (code, type,
9498 eval_subst (arg0, cval1, maxval,
9499 cval2, maxval),
9500 arg1);
9501 tree low_result
9502 = fold_build2 (code, type,
9503 eval_subst (arg0, cval1, minval,
9504 cval2, maxval),
9505 arg1);
9507 /* All three of these results should be 0 or 1. Confirm they
9508 are. Then use those values to select the proper code
9509 to use. */
9511 if ((integer_zerop (high_result)
9512 || integer_onep (high_result))
9513 && (integer_zerop (equal_result)
9514 || integer_onep (equal_result))
9515 && (integer_zerop (low_result)
9516 || integer_onep (low_result)))
9518 /* Make a 3-bit mask with the high-order bit being the
9519 value for `>', the next for '=', and the low for '<'. */
9520 switch ((integer_onep (high_result) * 4)
9521 + (integer_onep (equal_result) * 2)
9522 + integer_onep (low_result))
9524 case 0:
9525 /* Always false. */
9526 return omit_one_operand (type, integer_zero_node, arg0);
9527 case 1:
9528 code = LT_EXPR;
9529 break;
9530 case 2:
9531 code = EQ_EXPR;
9532 break;
9533 case 3:
9534 code = LE_EXPR;
9535 break;
9536 case 4:
9537 code = GT_EXPR;
9538 break;
9539 case 5:
9540 code = NE_EXPR;
9541 break;
9542 case 6:
9543 code = GE_EXPR;
9544 break;
9545 case 7:
9546 /* Always true. */
9547 return omit_one_operand (type, integer_one_node, arg0);
9550 if (save_p)
9551 return save_expr (build2 (code, type, cval1, cval2));
9552 else
9553 return fold_build2 (code, type, cval1, cval2);
9558 /* If this is a comparison of a field, we may be able to simplify it. */
9559 if (((TREE_CODE (arg0) == COMPONENT_REF
9560 && lang_hooks.can_use_bit_fields_p ())
9561 || TREE_CODE (arg0) == BIT_FIELD_REF)
9562 && (code == EQ_EXPR || code == NE_EXPR)
9563 /* Handle the constant case even without -O
9564 to make sure the warnings are given. */
9565 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9567 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9568 if (t1)
9569 return t1;
9572 /* If this is a comparison of complex values and either or both sides
9573 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9574 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9575 This may prevent needless evaluations. */
9576 if ((code == EQ_EXPR || code == NE_EXPR)
9577 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9578 && (TREE_CODE (arg0) == COMPLEX_EXPR
9579 || TREE_CODE (arg1) == COMPLEX_EXPR
9580 || TREE_CODE (arg0) == COMPLEX_CST
9581 || TREE_CODE (arg1) == COMPLEX_CST))
9583 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9584 tree real0, imag0, real1, imag1;
9586 arg0 = save_expr (arg0);
9587 arg1 = save_expr (arg1);
9588 real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
9589 imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
9590 real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
9591 imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
9593 return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9594 : TRUTH_ORIF_EXPR),
9595 type,
9596 fold_build2 (code, type, real0, real1),
9597 fold_build2 (code, type, imag0, imag1));
9600 /* Optimize comparisons of strlen vs zero to a compare of the
9601 first character of the string vs zero. To wit,
9602 strlen(ptr) == 0 => *ptr == 0
9603 strlen(ptr) != 0 => *ptr != 0
9604 Other cases should reduce to one of these two (or a constant)
9605 due to the return value of strlen being unsigned. */
9606 if ((code == EQ_EXPR || code == NE_EXPR)
9607 && integer_zerop (arg1)
9608 && TREE_CODE (arg0) == CALL_EXPR)
9610 tree fndecl = get_callee_fndecl (arg0);
9611 tree arglist;
9613 if (fndecl
9614 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9615 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9616 && (arglist = TREE_OPERAND (arg0, 1))
9617 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9618 && ! TREE_CHAIN (arglist))
9619 return fold_build2 (code, type,
9620 build1 (INDIRECT_REF, char_type_node,
9621 TREE_VALUE (arglist)),
9622 fold_convert (char_type_node,
9623 integer_zero_node));
9626 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9627 into a single range test. */
9628 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9629 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9630 && TREE_CODE (arg1) == INTEGER_CST
9631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9632 && !integer_zerop (TREE_OPERAND (arg0, 1))
9633 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9634 && !TREE_OVERFLOW (arg1))
9636 t1 = fold_div_compare (code, type, arg0, arg1);
9637 if (t1 != NULL_TREE)
9638 return t1;
9641 if ((code == EQ_EXPR || code == NE_EXPR)
9642 && !TREE_SIDE_EFFECTS (arg0)
9643 && integer_zerop (arg1)
9644 && tree_expr_nonzero_p (arg0))
9645 return constant_boolean_node (code==NE_EXPR, type);
9647 t1 = fold_relational_const (code, type, arg0, arg1);
9648 return t1 == NULL_TREE ? NULL_TREE : t1;
9650 case UNORDERED_EXPR:
9651 case ORDERED_EXPR:
9652 case UNLT_EXPR:
9653 case UNLE_EXPR:
9654 case UNGT_EXPR:
9655 case UNGE_EXPR:
9656 case UNEQ_EXPR:
9657 case LTGT_EXPR:
9658 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9660 t1 = fold_relational_const (code, type, arg0, arg1);
9661 if (t1 != NULL_TREE)
9662 return t1;
9665 /* If the first operand is NaN, the result is constant. */
9666 if (TREE_CODE (arg0) == REAL_CST
9667 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9668 && (code != LTGT_EXPR || ! flag_trapping_math))
9670 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9671 ? integer_zero_node
9672 : integer_one_node;
9673 return omit_one_operand (type, t1, arg1);
9676 /* If the second operand is NaN, the result is constant. */
9677 if (TREE_CODE (arg1) == REAL_CST
9678 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9679 && (code != LTGT_EXPR || ! flag_trapping_math))
9681 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9682 ? integer_zero_node
9683 : integer_one_node;
9684 return omit_one_operand (type, t1, arg0);
9687 /* Simplify unordered comparison of something with itself. */
9688 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9689 && operand_equal_p (arg0, arg1, 0))
9690 return constant_boolean_node (1, type);
9692 if (code == LTGT_EXPR
9693 && !flag_trapping_math
9694 && operand_equal_p (arg0, arg1, 0))
9695 return constant_boolean_node (0, type);
9697 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9699 tree targ0 = strip_float_extensions (arg0);
9700 tree targ1 = strip_float_extensions (arg1);
9701 tree newtype = TREE_TYPE (targ0);
9703 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9704 newtype = TREE_TYPE (targ1);
9706 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9707 return fold_build2 (code, type, fold_convert (newtype, targ0),
9708 fold_convert (newtype, targ1));
9711 return NULL_TREE;
9713 case COMPOUND_EXPR:
9714 /* When pedantic, a compound expression can be neither an lvalue
9715 nor an integer constant expression. */
9716 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9717 return NULL_TREE;
9718 /* Don't let (0, 0) be null pointer constant. */
9719 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9720 : fold_convert (type, arg1);
9721 return pedantic_non_lvalue (tem);
9723 case COMPLEX_EXPR:
9724 if (wins)
9725 return build_complex (type, arg0, arg1);
9726 return NULL_TREE;
9728 default:
9729 return NULL_TREE;
9730 } /* switch (code) */
9733 /* Fold a ternary expression of code CODE and type TYPE with operands
9734 OP0, OP1, and OP2. Return the folded expression if folding is
9735 successful. Otherwise, return NULL_TREE. */
9737 static tree
9738 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9740 tree tem;
9741 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9742 enum tree_code_class kind = TREE_CODE_CLASS (code);
9744 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9745 && TREE_CODE_LENGTH (code) == 3);
9747 /* Strip any conversions that don't change the mode. This is safe
9748 for every expression, except for a comparison expression because
9749 its signedness is derived from its operands. So, in the latter
9750 case, only strip conversions that don't change the signedness.
9752 Note that this is done as an internal manipulation within the
9753 constant folder, in order to find the simplest representation of
9754 the arguments so that their form can be studied. In any cases,
9755 the appropriate type conversions should be put back in the tree
9756 that will get out of the constant folder. */
9757 if (op0)
9759 arg0 = op0;
9760 STRIP_NOPS (arg0);
9763 if (op1)
9765 arg1 = op1;
9766 STRIP_NOPS (arg1);
9769 switch (code)
9771 case COMPONENT_REF:
9772 if (TREE_CODE (arg0) == CONSTRUCTOR
9773 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9775 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
9776 if (m)
9777 return TREE_VALUE (m);
9779 return NULL_TREE;
9781 case COND_EXPR:
9782 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9783 so all simple results must be passed through pedantic_non_lvalue. */
9784 if (TREE_CODE (arg0) == INTEGER_CST)
9786 tem = integer_zerop (arg0) ? op2 : op1;
9787 /* Only optimize constant conditions when the selected branch
9788 has the same type as the COND_EXPR. This avoids optimizing
9789 away "c ? x : throw", where the throw has a void type. */
9790 if (! VOID_TYPE_P (TREE_TYPE (tem))
9791 || VOID_TYPE_P (type))
9792 return pedantic_non_lvalue (tem);
9793 return NULL_TREE;
9795 if (operand_equal_p (arg1, op2, 0))
9796 return pedantic_omit_one_operand (type, arg1, arg0);
9798 /* If we have A op B ? A : C, we may be able to convert this to a
9799 simpler expression, depending on the operation and the values
9800 of B and C. Signed zeros prevent all of these transformations,
9801 for reasons given above each one.
9803 Also try swapping the arguments and inverting the conditional. */
9804 if (COMPARISON_CLASS_P (arg0)
9805 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9806 arg1, TREE_OPERAND (arg0, 1))
9807 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9809 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9810 if (tem)
9811 return tem;
9814 if (COMPARISON_CLASS_P (arg0)
9815 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9816 op2,
9817 TREE_OPERAND (arg0, 1))
9818 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9820 tem = invert_truthvalue (arg0);
9821 if (COMPARISON_CLASS_P (tem))
9823 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9824 if (tem)
9825 return tem;
9829 /* If the second operand is simpler than the third, swap them
9830 since that produces better jump optimization results. */
9831 if (tree_swap_operands_p (op1, op2, false))
9833 /* See if this can be inverted. If it can't, possibly because
9834 it was a floating-point inequality comparison, don't do
9835 anything. */
9836 tem = invert_truthvalue (arg0);
9838 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9839 return fold_build3 (code, type, tem, op2, op1);
9842 /* Convert A ? 1 : 0 to simply A. */
9843 if (integer_onep (op1)
9844 && integer_zerop (op2)
9845 /* If we try to convert OP0 to our type, the
9846 call to fold will try to move the conversion inside
9847 a COND, which will recurse. In that case, the COND_EXPR
9848 is probably the best choice, so leave it alone. */
9849 && type == TREE_TYPE (arg0))
9850 return pedantic_non_lvalue (arg0);
9852 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9853 over COND_EXPR in cases such as floating point comparisons. */
9854 if (integer_zerop (op1)
9855 && integer_onep (op2)
9856 && truth_value_p (TREE_CODE (arg0)))
9857 return pedantic_non_lvalue (fold_convert (type,
9858 invert_truthvalue (arg0)));
9860 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9861 if (TREE_CODE (arg0) == LT_EXPR
9862 && integer_zerop (TREE_OPERAND (arg0, 1))
9863 && integer_zerop (op2)
9864 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9865 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
9866 TREE_TYPE (tem), tem, arg1));
9868 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9869 already handled above. */
9870 if (TREE_CODE (arg0) == BIT_AND_EXPR
9871 && integer_onep (TREE_OPERAND (arg0, 1))
9872 && integer_zerop (op2)
9873 && integer_pow2p (arg1))
9875 tree tem = TREE_OPERAND (arg0, 0);
9876 STRIP_NOPS (tem);
9877 if (TREE_CODE (tem) == RSHIFT_EXPR
9878 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9879 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9880 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9881 return fold_build2 (BIT_AND_EXPR, type,
9882 TREE_OPERAND (tem, 0), arg1);
9885 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9886 is probably obsolete because the first operand should be a
9887 truth value (that's why we have the two cases above), but let's
9888 leave it in until we can confirm this for all front-ends. */
9889 if (integer_zerop (op2)
9890 && TREE_CODE (arg0) == NE_EXPR
9891 && integer_zerop (TREE_OPERAND (arg0, 1))
9892 && integer_pow2p (arg1)
9893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9894 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9895 arg1, OEP_ONLY_CONST))
9896 return pedantic_non_lvalue (fold_convert (type,
9897 TREE_OPERAND (arg0, 0)));
9899 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9900 if (integer_zerop (op2)
9901 && truth_value_p (TREE_CODE (arg0))
9902 && truth_value_p (TREE_CODE (arg1)))
9903 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
9905 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9906 if (integer_onep (op2)
9907 && truth_value_p (TREE_CODE (arg0))
9908 && truth_value_p (TREE_CODE (arg1)))
9910 /* Only perform transformation if ARG0 is easily inverted. */
9911 tem = invert_truthvalue (arg0);
9912 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9913 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
9916 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9917 if (integer_zerop (arg1)
9918 && truth_value_p (TREE_CODE (arg0))
9919 && truth_value_p (TREE_CODE (op2)))
9921 /* Only perform transformation if ARG0 is easily inverted. */
9922 tem = invert_truthvalue (arg0);
9923 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9924 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
9927 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9928 if (integer_onep (arg1)
9929 && truth_value_p (TREE_CODE (arg0))
9930 && truth_value_p (TREE_CODE (op2)))
9931 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
9933 return NULL_TREE;
9935 case CALL_EXPR:
9936 /* Check for a built-in function. */
9937 if (TREE_CODE (op0) == ADDR_EXPR
9938 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
9939 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
9941 tree fndecl = TREE_OPERAND (op0, 0);
9942 tree arglist = op1;
9943 tree tmp = fold_builtin (fndecl, arglist, false);
9944 if (tmp)
9945 return tmp;
9947 return NULL_TREE;
9949 default:
9950 return NULL_TREE;
9951 } /* switch (code) */
9954 /* Perform constant folding and related simplification of EXPR.
9955 The related simplifications include x*1 => x, x*0 => 0, etc.,
9956 and application of the associative law.
9957 NOP_EXPR conversions may be removed freely (as long as we
9958 are careful not to change the type of the overall expression).
9959 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
9960 but we can constant-fold them if they have constant operands. */
9962 #ifdef ENABLE_FOLD_CHECKING
9963 # define fold(x) fold_1 (x)
9964 static tree fold_1 (tree);
9965 static
9966 #endif
9967 tree
9968 fold (tree expr)
9970 const tree t = expr;
9971 enum tree_code code = TREE_CODE (t);
9972 enum tree_code_class kind = TREE_CODE_CLASS (code);
9973 tree tem;
9975 /* Return right away if a constant. */
9976 if (kind == tcc_constant)
9977 return t;
9979 if (IS_EXPR_CODE_CLASS (kind))
9981 tree type = TREE_TYPE (t);
9982 tree op0, op1, op2;
9984 switch (TREE_CODE_LENGTH (code))
9986 case 1:
9987 op0 = TREE_OPERAND (t, 0);
9988 tem = fold_unary (code, type, op0);
9989 return tem ? tem : expr;
9990 case 2:
9991 op0 = TREE_OPERAND (t, 0);
9992 op1 = TREE_OPERAND (t, 1);
9993 tem = fold_binary (code, type, op0, op1);
9994 return tem ? tem : expr;
9995 case 3:
9996 op0 = TREE_OPERAND (t, 0);
9997 op1 = TREE_OPERAND (t, 1);
9998 op2 = TREE_OPERAND (t, 2);
9999 tem = fold_ternary (code, type, op0, op1, op2);
10000 return tem ? tem : expr;
10001 default:
10002 break;
10006 switch (code)
10008 case CONST_DECL:
10009 return fold (DECL_INITIAL (t));
10011 case ASSERT_EXPR:
10013 /* Given ASSERT_EXPR <Y, COND>, return Y if COND can be folded
10014 to boolean_true_node. If COND folds to boolean_false_node,
10015 return ASSERT_EXPR <Y, 0>. Otherwise, return the original
10016 expression. */
10017 tree c = fold (ASSERT_EXPR_COND (t));
10018 if (c == boolean_true_node)
10019 return ASSERT_EXPR_VAR (t);
10020 else if (c == boolean_false_node)
10021 return build (ASSERT_EXPR, TREE_TYPE (t), ASSERT_EXPR_VAR (t), c);
10022 else
10023 return t;
10026 default:
10027 return t;
10028 } /* switch (code) */
10031 #ifdef ENABLE_FOLD_CHECKING
10032 #undef fold
10034 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10035 static void fold_check_failed (tree, tree);
10036 void print_fold_checksum (tree);
10038 /* When --enable-checking=fold, compute a digest of expr before
10039 and after actual fold call to see if fold did not accidentally
10040 change original expr. */
10042 tree
10043 fold (tree expr)
10045 tree ret;
10046 struct md5_ctx ctx;
10047 unsigned char checksum_before[16], checksum_after[16];
10048 htab_t ht;
10050 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10051 md5_init_ctx (&ctx);
10052 fold_checksum_tree (expr, &ctx, ht);
10053 md5_finish_ctx (&ctx, checksum_before);
10054 htab_empty (ht);
10056 ret = fold_1 (expr);
10058 md5_init_ctx (&ctx);
10059 fold_checksum_tree (expr, &ctx, ht);
10060 md5_finish_ctx (&ctx, checksum_after);
10061 htab_delete (ht);
10063 if (memcmp (checksum_before, checksum_after, 16))
10064 fold_check_failed (expr, ret);
10066 return ret;
10069 void
10070 print_fold_checksum (tree expr)
10072 struct md5_ctx ctx;
10073 unsigned char checksum[16], cnt;
10074 htab_t ht;
10076 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10077 md5_init_ctx (&ctx);
10078 fold_checksum_tree (expr, &ctx, ht);
10079 md5_finish_ctx (&ctx, checksum);
10080 htab_delete (ht);
10081 for (cnt = 0; cnt < 16; ++cnt)
10082 fprintf (stderr, "%02x", checksum[cnt]);
10083 putc ('\n', stderr);
10086 static void
10087 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10089 internal_error ("fold check: original tree changed by fold");
10092 static void
10093 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10095 void **slot;
10096 enum tree_code code;
10097 char buf[sizeof (struct tree_decl)];
10098 int i, len;
10100 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10101 <= sizeof (struct tree_decl))
10102 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10103 if (expr == NULL)
10104 return;
10105 slot = htab_find_slot (ht, expr, INSERT);
10106 if (*slot != NULL)
10107 return;
10108 *slot = expr;
10109 code = TREE_CODE (expr);
10110 if (TREE_CODE_CLASS (code) == tcc_declaration
10111 && DECL_ASSEMBLER_NAME_SET_P (expr))
10113 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10114 memcpy (buf, expr, tree_size (expr));
10115 expr = (tree) buf;
10116 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10118 else if (TREE_CODE_CLASS (code) == tcc_type
10119 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10120 || TYPE_CACHED_VALUES_P (expr)))
10122 /* Allow these fields to be modified. */
10123 memcpy (buf, expr, tree_size (expr));
10124 expr = (tree) buf;
10125 TYPE_POINTER_TO (expr) = NULL;
10126 TYPE_REFERENCE_TO (expr) = NULL;
10127 if (TYPE_CACHED_VALUES_P (expr))
10129 TYPE_CACHED_VALUES_P (expr) = 0;
10130 TYPE_CACHED_VALUES (expr) = NULL;
10133 md5_process_bytes (expr, tree_size (expr), ctx);
10134 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10135 if (TREE_CODE_CLASS (code) != tcc_type
10136 && TREE_CODE_CLASS (code) != tcc_declaration)
10137 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10138 switch (TREE_CODE_CLASS (code))
10140 case tcc_constant:
10141 switch (code)
10143 case STRING_CST:
10144 md5_process_bytes (TREE_STRING_POINTER (expr),
10145 TREE_STRING_LENGTH (expr), ctx);
10146 break;
10147 case COMPLEX_CST:
10148 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10149 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10150 break;
10151 case VECTOR_CST:
10152 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10153 break;
10154 default:
10155 break;
10157 break;
10158 case tcc_exceptional:
10159 switch (code)
10161 case TREE_LIST:
10162 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10163 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10164 break;
10165 case TREE_VEC:
10166 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10167 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10168 break;
10169 default:
10170 break;
10172 break;
10173 case tcc_expression:
10174 case tcc_reference:
10175 case tcc_comparison:
10176 case tcc_unary:
10177 case tcc_binary:
10178 case tcc_statement:
10179 len = TREE_CODE_LENGTH (code);
10180 for (i = 0; i < len; ++i)
10181 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10182 break;
10183 case tcc_declaration:
10184 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10185 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10186 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10187 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10188 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10189 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10190 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10191 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10192 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10193 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10194 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10195 break;
10196 case tcc_type:
10197 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10198 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10199 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10200 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10201 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10202 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10203 if (INTEGRAL_TYPE_P (expr)
10204 || SCALAR_FLOAT_TYPE_P (expr))
10206 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10207 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10209 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10210 if (TREE_CODE (expr) == RECORD_TYPE
10211 || TREE_CODE (expr) == UNION_TYPE
10212 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10213 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10214 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10215 break;
10216 default:
10217 break;
10221 #endif
10223 /* Fold a unary tree expression with code CODE of type TYPE with an
10224 operand OP0. Return a folded expression if successful. Otherwise,
10225 return a tree expression with code CODE of type TYPE with an
10226 operand OP0. */
10228 tree
10229 fold_build1 (enum tree_code code, tree type, tree op0)
10231 tree tem = fold_unary (code, type, op0);
10232 if (tem)
10233 return tem;
10235 return build1 (code, type, op0);
10238 /* Fold a binary tree expression with code CODE of type TYPE with
10239 operands OP0 and OP1. Return a folded expression if successful.
10240 Otherwise, return a tree expression with code CODE of type TYPE
10241 with operands OP0 and OP1. */
10243 tree
10244 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10246 tree tem = fold_binary (code, type, op0, op1);
10247 if (tem)
10248 return tem;
10250 return build2 (code, type, op0, op1);
10253 /* Fold a ternary tree expression with code CODE of type TYPE with
10254 operands OP0, OP1, and OP2. Return a folded expression if
10255 successful. Otherwise, return a tree expression with code CODE of
10256 type TYPE with operands OP0, OP1, and OP2. */
10258 tree
10259 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10261 tree tem = fold_ternary (code, type, op0, op1, op2);
10262 if (tem)
10263 return tem;
10265 return build3 (code, type, op0, op1, op2);
10268 /* Perform constant folding and related simplification of initializer
10269 expression EXPR. This behaves identically to "fold" but ignores
10270 potential run-time traps and exceptions that fold must preserve. */
10272 tree
10273 fold_initializer (tree expr)
10275 int saved_signaling_nans = flag_signaling_nans;
10276 int saved_trapping_math = flag_trapping_math;
10277 int saved_rounding_math = flag_rounding_math;
10278 int saved_trapv = flag_trapv;
10279 tree result;
10281 flag_signaling_nans = 0;
10282 flag_trapping_math = 0;
10283 flag_rounding_math = 0;
10284 flag_trapv = 0;
10286 result = fold (expr);
10288 flag_signaling_nans = saved_signaling_nans;
10289 flag_trapping_math = saved_trapping_math;
10290 flag_rounding_math = saved_rounding_math;
10291 flag_trapv = saved_trapv;
10293 return result;
10296 /* Determine if first argument is a multiple of second argument. Return 0 if
10297 it is not, or we cannot easily determined it to be.
10299 An example of the sort of thing we care about (at this point; this routine
10300 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10301 fold cases do now) is discovering that
10303 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10305 is a multiple of
10307 SAVE_EXPR (J * 8)
10309 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10311 This code also handles discovering that
10313 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10315 is a multiple of 8 so we don't have to worry about dealing with a
10316 possible remainder.
10318 Note that we *look* inside a SAVE_EXPR only to determine how it was
10319 calculated; it is not safe for fold to do much of anything else with the
10320 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10321 at run time. For example, the latter example above *cannot* be implemented
10322 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10323 evaluation time of the original SAVE_EXPR is not necessarily the same at
10324 the time the new expression is evaluated. The only optimization of this
10325 sort that would be valid is changing
10327 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10329 divided by 8 to
10331 SAVE_EXPR (I) * SAVE_EXPR (J)
10333 (where the same SAVE_EXPR (J) is used in the original and the
10334 transformed version). */
10336 static int
10337 multiple_of_p (tree type, tree top, tree bottom)
10339 if (operand_equal_p (top, bottom, 0))
10340 return 1;
10342 if (TREE_CODE (type) != INTEGER_TYPE)
10343 return 0;
10345 switch (TREE_CODE (top))
10347 case BIT_AND_EXPR:
10348 /* Bitwise and provides a power of two multiple. If the mask is
10349 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10350 if (!integer_pow2p (bottom))
10351 return 0;
10352 /* FALLTHRU */
10354 case MULT_EXPR:
10355 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10356 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10358 case PLUS_EXPR:
10359 case MINUS_EXPR:
10360 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10361 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10363 case LSHIFT_EXPR:
10364 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10366 tree op1, t1;
10368 op1 = TREE_OPERAND (top, 1);
10369 /* const_binop may not detect overflow correctly,
10370 so check for it explicitly here. */
10371 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10372 > TREE_INT_CST_LOW (op1)
10373 && TREE_INT_CST_HIGH (op1) == 0
10374 && 0 != (t1 = fold_convert (type,
10375 const_binop (LSHIFT_EXPR,
10376 size_one_node,
10377 op1, 0)))
10378 && ! TREE_OVERFLOW (t1))
10379 return multiple_of_p (type, t1, bottom);
10381 return 0;
10383 case NOP_EXPR:
10384 /* Can't handle conversions from non-integral or wider integral type. */
10385 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10386 || (TYPE_PRECISION (type)
10387 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10388 return 0;
10390 /* .. fall through ... */
10392 case SAVE_EXPR:
10393 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10395 case INTEGER_CST:
10396 if (TREE_CODE (bottom) != INTEGER_CST
10397 || (TYPE_UNSIGNED (type)
10398 && (tree_int_cst_sgn (top) < 0
10399 || tree_int_cst_sgn (bottom) < 0)))
10400 return 0;
10401 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10402 top, bottom, 0));
10404 default:
10405 return 0;
10409 /* Return true if `t' is known to be non-negative. */
10412 tree_expr_nonnegative_p (tree t)
10414 switch (TREE_CODE (t))
10416 case ABS_EXPR:
10417 return 1;
10419 case INTEGER_CST:
10420 return tree_int_cst_sgn (t) >= 0;
10422 case REAL_CST:
10423 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10425 case PLUS_EXPR:
10426 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10427 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10428 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10430 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10431 both unsigned and at least 2 bits shorter than the result. */
10432 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10433 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10434 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10436 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10437 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10438 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10439 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10441 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10442 TYPE_PRECISION (inner2)) + 1;
10443 return prec < TYPE_PRECISION (TREE_TYPE (t));
10446 break;
10448 case MULT_EXPR:
10449 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10451 /* x * x for floating point x is always non-negative. */
10452 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10453 return 1;
10454 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10455 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10458 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10459 both unsigned and their total bits is shorter than the result. */
10460 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10461 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10462 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10464 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10465 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10466 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10467 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10468 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10469 < TYPE_PRECISION (TREE_TYPE (t));
10471 return 0;
10473 case TRUNC_DIV_EXPR:
10474 case CEIL_DIV_EXPR:
10475 case FLOOR_DIV_EXPR:
10476 case ROUND_DIV_EXPR:
10477 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10478 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10480 case TRUNC_MOD_EXPR:
10481 case CEIL_MOD_EXPR:
10482 case FLOOR_MOD_EXPR:
10483 case ROUND_MOD_EXPR:
10484 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10486 case RDIV_EXPR:
10487 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10488 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10490 case BIT_AND_EXPR:
10491 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10492 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10493 case BIT_IOR_EXPR:
10494 case BIT_XOR_EXPR:
10495 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10496 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10498 case NOP_EXPR:
10500 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10501 tree outer_type = TREE_TYPE (t);
10503 if (TREE_CODE (outer_type) == REAL_TYPE)
10505 if (TREE_CODE (inner_type) == REAL_TYPE)
10506 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10507 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10509 if (TYPE_UNSIGNED (inner_type))
10510 return 1;
10511 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10514 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10516 if (TREE_CODE (inner_type) == REAL_TYPE)
10517 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10518 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10519 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10520 && TYPE_UNSIGNED (inner_type);
10523 break;
10525 case COND_EXPR:
10526 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10527 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10528 case COMPOUND_EXPR:
10529 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10530 case MIN_EXPR:
10531 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10532 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10533 case MAX_EXPR:
10534 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10535 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10536 case MODIFY_EXPR:
10537 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10538 case BIND_EXPR:
10539 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10540 case SAVE_EXPR:
10541 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10542 case NON_LVALUE_EXPR:
10543 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10544 case FLOAT_EXPR:
10545 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10547 case TARGET_EXPR:
10549 tree temp = TARGET_EXPR_SLOT (t);
10550 t = TARGET_EXPR_INITIAL (t);
10552 /* If the initializer is non-void, then it's a normal expression
10553 that will be assigned to the slot. */
10554 if (!VOID_TYPE_P (t))
10555 return tree_expr_nonnegative_p (t);
10557 /* Otherwise, the initializer sets the slot in some way. One common
10558 way is an assignment statement at the end of the initializer. */
10559 while (1)
10561 if (TREE_CODE (t) == BIND_EXPR)
10562 t = expr_last (BIND_EXPR_BODY (t));
10563 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10564 || TREE_CODE (t) == TRY_CATCH_EXPR)
10565 t = expr_last (TREE_OPERAND (t, 0));
10566 else if (TREE_CODE (t) == STATEMENT_LIST)
10567 t = expr_last (t);
10568 else
10569 break;
10571 if (TREE_CODE (t) == MODIFY_EXPR
10572 && TREE_OPERAND (t, 0) == temp)
10573 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10575 return 0;
10578 case CALL_EXPR:
10580 tree fndecl = get_callee_fndecl (t);
10581 tree arglist = TREE_OPERAND (t, 1);
10582 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10583 switch (DECL_FUNCTION_CODE (fndecl))
10585 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10586 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10587 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10588 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10590 CASE_BUILTIN_F (BUILT_IN_ACOS)
10591 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10592 CASE_BUILTIN_F (BUILT_IN_CABS)
10593 CASE_BUILTIN_F (BUILT_IN_COSH)
10594 CASE_BUILTIN_F (BUILT_IN_ERFC)
10595 CASE_BUILTIN_F (BUILT_IN_EXP)
10596 CASE_BUILTIN_F (BUILT_IN_EXP10)
10597 CASE_BUILTIN_F (BUILT_IN_EXP2)
10598 CASE_BUILTIN_F (BUILT_IN_FABS)
10599 CASE_BUILTIN_F (BUILT_IN_FDIM)
10600 CASE_BUILTIN_F (BUILT_IN_FREXP)
10601 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10602 CASE_BUILTIN_F (BUILT_IN_POW10)
10603 CASE_BUILTIN_I (BUILT_IN_FFS)
10604 CASE_BUILTIN_I (BUILT_IN_PARITY)
10605 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10606 /* Always true. */
10607 return 1;
10609 CASE_BUILTIN_F (BUILT_IN_SQRT)
10610 /* sqrt(-0.0) is -0.0. */
10611 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10612 return 1;
10613 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10615 CASE_BUILTIN_F (BUILT_IN_ASINH)
10616 CASE_BUILTIN_F (BUILT_IN_ATAN)
10617 CASE_BUILTIN_F (BUILT_IN_ATANH)
10618 CASE_BUILTIN_F (BUILT_IN_CBRT)
10619 CASE_BUILTIN_F (BUILT_IN_CEIL)
10620 CASE_BUILTIN_F (BUILT_IN_ERF)
10621 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10622 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10623 CASE_BUILTIN_F (BUILT_IN_FMOD)
10624 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10625 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10626 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10627 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10628 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10629 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10630 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10631 CASE_BUILTIN_F (BUILT_IN_LRINT)
10632 CASE_BUILTIN_F (BUILT_IN_LROUND)
10633 CASE_BUILTIN_F (BUILT_IN_MODF)
10634 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10635 CASE_BUILTIN_F (BUILT_IN_POW)
10636 CASE_BUILTIN_F (BUILT_IN_RINT)
10637 CASE_BUILTIN_F (BUILT_IN_ROUND)
10638 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10639 CASE_BUILTIN_F (BUILT_IN_SINH)
10640 CASE_BUILTIN_F (BUILT_IN_TANH)
10641 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10642 /* True if the 1st argument is nonnegative. */
10643 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10645 CASE_BUILTIN_F (BUILT_IN_FMAX)
10646 /* True if the 1st OR 2nd arguments are nonnegative. */
10647 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10648 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10650 CASE_BUILTIN_F (BUILT_IN_FMIN)
10651 /* True if the 1st AND 2nd arguments are nonnegative. */
10652 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10653 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10655 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10656 /* True if the 2nd argument is nonnegative. */
10657 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10659 default:
10660 break;
10661 #undef CASE_BUILTIN_F
10662 #undef CASE_BUILTIN_I
10666 /* ... fall through ... */
10668 default:
10669 if (truth_value_p (TREE_CODE (t)))
10670 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10671 return 1;
10674 /* We don't know sign of `t', so be conservative and return false. */
10675 return 0;
10678 /* Return true when T is an address and is known to be nonzero.
10679 For floating point we further ensure that T is not denormal.
10680 Similar logic is present in nonzero_address in rtlanal.h. */
10682 static bool
10683 tree_expr_nonzero_p (tree t)
10685 tree type = TREE_TYPE (t);
10687 /* Doing something useful for floating point would need more work. */
10688 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10689 return false;
10691 switch (TREE_CODE (t))
10693 case ABS_EXPR:
10694 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10695 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10697 case INTEGER_CST:
10698 /* We used to test for !integer_zerop here. This does not work correctly
10699 if TREE_CONSTANT_OVERFLOW (t). */
10700 return (TREE_INT_CST_LOW (t) != 0
10701 || TREE_INT_CST_HIGH (t) != 0);
10703 case PLUS_EXPR:
10704 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10706 /* With the presence of negative values it is hard
10707 to say something. */
10708 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10709 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10710 return false;
10711 /* One of operands must be positive and the other non-negative. */
10712 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10713 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10715 break;
10717 case MULT_EXPR:
10718 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10720 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10721 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10723 break;
10725 case NOP_EXPR:
10727 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10728 tree outer_type = TREE_TYPE (t);
10730 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10731 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10733 break;
10735 case ADDR_EXPR:
10737 tree base = get_base_address (TREE_OPERAND (t, 0));
10739 if (!base)
10740 return false;
10742 /* Weak declarations may link to NULL. */
10743 if (DECL_P (base))
10744 return !DECL_WEAK (base);
10746 /* Constants are never weak. */
10747 if (CONSTANT_CLASS_P (base))
10748 return true;
10750 return false;
10753 case COND_EXPR:
10754 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10755 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10757 case MIN_EXPR:
10758 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10759 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10761 case MAX_EXPR:
10762 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10764 /* When both operands are nonzero, then MAX must be too. */
10765 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10766 return true;
10768 /* MAX where operand 0 is positive is positive. */
10769 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10771 /* MAX where operand 1 is positive is positive. */
10772 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10773 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10774 return true;
10775 break;
10777 case COMPOUND_EXPR:
10778 case MODIFY_EXPR:
10779 case BIND_EXPR:
10780 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10782 case SAVE_EXPR:
10783 case NON_LVALUE_EXPR:
10784 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10786 case BIT_IOR_EXPR:
10787 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10788 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10790 default:
10791 break;
10793 return false;
10796 /* See if we are applying CODE, a relational to the highest or lowest
10797 possible integer of TYPE. If so, then the result is a compile
10798 time constant. */
10800 static tree
10801 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10802 tree *op1_p)
10804 tree op0 = *op0_p;
10805 tree op1 = *op1_p;
10806 enum tree_code code = *code_p;
10807 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10809 if (TREE_CODE (op1) == INTEGER_CST
10810 && ! TREE_CONSTANT_OVERFLOW (op1)
10811 && width <= HOST_BITS_PER_WIDE_INT
10812 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10813 || POINTER_TYPE_P (TREE_TYPE (op1))))
10815 unsigned HOST_WIDE_INT signed_max;
10816 unsigned HOST_WIDE_INT max, min;
10818 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10820 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10822 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10823 min = 0;
10825 else
10827 max = signed_max;
10828 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10831 if (TREE_INT_CST_HIGH (op1) == 0
10832 && TREE_INT_CST_LOW (op1) == max)
10833 switch (code)
10835 case GT_EXPR:
10836 return omit_one_operand (type, integer_zero_node, op0);
10838 case GE_EXPR:
10839 *code_p = EQ_EXPR;
10840 break;
10841 case LE_EXPR:
10842 return omit_one_operand (type, integer_one_node, op0);
10844 case LT_EXPR:
10845 *code_p = NE_EXPR;
10846 break;
10848 /* The GE_EXPR and LT_EXPR cases above are not normally
10849 reached because of previous transformations. */
10851 default:
10852 break;
10854 else if (TREE_INT_CST_HIGH (op1) == 0
10855 && TREE_INT_CST_LOW (op1) == max - 1)
10856 switch (code)
10858 case GT_EXPR:
10859 *code_p = EQ_EXPR;
10860 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10861 break;
10862 case LE_EXPR:
10863 *code_p = NE_EXPR;
10864 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10865 break;
10866 default:
10867 break;
10869 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10870 && TREE_INT_CST_LOW (op1) == min)
10871 switch (code)
10873 case LT_EXPR:
10874 return omit_one_operand (type, integer_zero_node, op0);
10876 case LE_EXPR:
10877 *code_p = EQ_EXPR;
10878 break;
10880 case GE_EXPR:
10881 return omit_one_operand (type, integer_one_node, op0);
10883 case GT_EXPR:
10884 *code_p = NE_EXPR;
10885 break;
10887 default:
10888 break;
10890 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10891 && TREE_INT_CST_LOW (op1) == min + 1)
10892 switch (code)
10894 case GE_EXPR:
10895 *code_p = NE_EXPR;
10896 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10897 break;
10898 case LT_EXPR:
10899 *code_p = EQ_EXPR;
10900 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10901 break;
10902 default:
10903 break;
10906 else if (TREE_INT_CST_HIGH (op1) == 0
10907 && TREE_INT_CST_LOW (op1) == signed_max
10908 && TYPE_UNSIGNED (TREE_TYPE (op1))
10909 /* signed_type does not work on pointer types. */
10910 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10912 /* The following case also applies to X < signed_max+1
10913 and X >= signed_max+1 because previous transformations. */
10914 if (code == LE_EXPR || code == GT_EXPR)
10916 tree st0, st1, exp, retval;
10917 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10918 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10920 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10921 type,
10922 fold_convert (st0, op0),
10923 fold_convert (st1, integer_zero_node));
10925 retval = fold_binary_to_constant (TREE_CODE (exp),
10926 TREE_TYPE (exp),
10927 TREE_OPERAND (exp, 0),
10928 TREE_OPERAND (exp, 1));
10930 /* If we are in gimple form, then returning EXP would create
10931 non-gimple expressions. Clearing it is safe and insures
10932 we do not allow a non-gimple expression to escape. */
10933 if (in_gimple_form)
10934 exp = NULL;
10936 return (retval ? retval : exp);
10941 return NULL_TREE;
10945 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10946 attempt to fold the expression to a constant without modifying TYPE,
10947 OP0 or OP1.
10949 If the expression could be simplified to a constant, then return
10950 the constant. If the expression would not be simplified to a
10951 constant, then return NULL_TREE.
10953 Note this is primarily designed to be called after gimplification
10954 of the tree structures and when at least one operand is a constant.
10955 As a result of those simplifying assumptions this routine is far
10956 simpler than the generic fold routine. */
10958 tree
10959 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10961 int wins = 1;
10962 tree subop0;
10963 tree subop1;
10964 tree tem;
10966 /* If this is a commutative operation, and ARG0 is a constant, move it
10967 to ARG1 to reduce the number of tests below. */
10968 if (commutative_tree_code (code)
10969 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10971 tem = op0;
10972 op0 = op1;
10973 op1 = tem;
10976 /* If either operand is a complex type, extract its real component. */
10977 if (TREE_CODE (op0) == COMPLEX_CST)
10978 subop0 = TREE_REALPART (op0);
10979 else
10980 subop0 = op0;
10982 if (TREE_CODE (op1) == COMPLEX_CST)
10983 subop1 = TREE_REALPART (op1);
10984 else
10985 subop1 = op1;
10987 /* Note if either argument is not a real or integer constant.
10988 With a few exceptions, simplification is limited to cases
10989 where both arguments are constants. */
10990 if ((TREE_CODE (subop0) != INTEGER_CST
10991 && TREE_CODE (subop0) != REAL_CST)
10992 || (TREE_CODE (subop1) != INTEGER_CST
10993 && TREE_CODE (subop1) != REAL_CST))
10994 wins = 0;
10996 switch (code)
10998 case PLUS_EXPR:
10999 /* (plus (address) (const_int)) is a constant. */
11000 if (TREE_CODE (op0) == PLUS_EXPR
11001 && TREE_CODE (op1) == INTEGER_CST
11002 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
11003 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
11004 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
11005 == ADDR_EXPR)))
11006 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
11008 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
11009 const_binop (PLUS_EXPR, op1,
11010 TREE_OPERAND (op0, 1), 0));
11012 case BIT_XOR_EXPR:
11014 binary:
11015 if (!wins)
11016 return NULL_TREE;
11018 /* Both arguments are constants. Simplify. */
11019 tem = const_binop (code, op0, op1, 0);
11020 if (tem != NULL_TREE)
11022 /* The return value should always have the same type as
11023 the original expression. */
11024 if (TREE_TYPE (tem) != type)
11025 tem = fold_convert (type, tem);
11027 return tem;
11029 return NULL_TREE;
11031 case MINUS_EXPR:
11032 /* Fold &x - &x. This can happen from &x.foo - &x.
11033 This is unsafe for certain floats even in non-IEEE formats.
11034 In IEEE, it is unsafe because it does wrong for NaNs.
11035 Also note that operand_equal_p is always false if an
11036 operand is volatile. */
11037 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
11038 return fold_convert (type, integer_zero_node);
11040 goto binary;
11042 case MULT_EXPR:
11043 case BIT_AND_EXPR:
11044 /* Special case multiplication or bitwise AND where one argument
11045 is zero. */
11046 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
11047 return omit_one_operand (type, op1, op0);
11048 else
11049 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
11050 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
11051 && real_zerop (op1))
11052 return omit_one_operand (type, op1, op0);
11054 goto binary;
11056 case BIT_IOR_EXPR:
11057 /* Special case when we know the result will be all ones. */
11058 if (integer_all_onesp (op1))
11059 return omit_one_operand (type, op1, op0);
11061 goto binary;
11063 case TRUNC_DIV_EXPR:
11064 case ROUND_DIV_EXPR:
11065 case FLOOR_DIV_EXPR:
11066 case CEIL_DIV_EXPR:
11067 case EXACT_DIV_EXPR:
11068 case TRUNC_MOD_EXPR:
11069 case ROUND_MOD_EXPR:
11070 case FLOOR_MOD_EXPR:
11071 case CEIL_MOD_EXPR:
11072 case RDIV_EXPR:
11073 /* Division by zero is undefined. */
11074 if (integer_zerop (op1))
11075 return NULL_TREE;
11077 if (TREE_CODE (op1) == REAL_CST
11078 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
11079 && real_zerop (op1))
11080 return NULL_TREE;
11082 goto binary;
11084 case MIN_EXPR:
11085 if (INTEGRAL_TYPE_P (type)
11086 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11087 return omit_one_operand (type, op1, op0);
11089 goto binary;
11091 case MAX_EXPR:
11092 if (INTEGRAL_TYPE_P (type)
11093 && TYPE_MAX_VALUE (type)
11094 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11095 return omit_one_operand (type, op1, op0);
11097 goto binary;
11099 case RSHIFT_EXPR:
11100 /* Optimize -1 >> x for arithmetic right shifts. */
11101 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
11102 return omit_one_operand (type, op0, op1);
11103 /* ... fall through ... */
11105 case LSHIFT_EXPR:
11106 if (integer_zerop (op0))
11107 return omit_one_operand (type, op0, op1);
11109 /* Since negative shift count is not well-defined, don't
11110 try to compute it in the compiler. */
11111 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
11112 return NULL_TREE;
11114 goto binary;
11116 case LROTATE_EXPR:
11117 case RROTATE_EXPR:
11118 /* -1 rotated either direction by any amount is still -1. */
11119 if (integer_all_onesp (op0))
11120 return omit_one_operand (type, op0, op1);
11122 /* 0 rotated either direction by any amount is still zero. */
11123 if (integer_zerop (op0))
11124 return omit_one_operand (type, op0, op1);
11126 goto binary;
11128 case COMPLEX_EXPR:
11129 if (wins)
11130 return build_complex (type, op0, op1);
11131 return NULL_TREE;
11133 case LT_EXPR:
11134 case LE_EXPR:
11135 case GT_EXPR:
11136 case GE_EXPR:
11137 case EQ_EXPR:
11138 case NE_EXPR:
11139 /* If one arg is a real or integer constant, put it last. */
11140 if ((TREE_CODE (op0) == INTEGER_CST
11141 && TREE_CODE (op1) != INTEGER_CST)
11142 || (TREE_CODE (op0) == REAL_CST
11143 && TREE_CODE (op0) != REAL_CST))
11145 tree temp;
11147 temp = op0;
11148 op0 = op1;
11149 op1 = temp;
11150 code = swap_tree_comparison (code);
11153 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11154 This transformation affects the cases which are handled in later
11155 optimizations involving comparisons with non-negative constants. */
11156 if (TREE_CODE (op1) == INTEGER_CST
11157 && TREE_CODE (op0) != INTEGER_CST
11158 && tree_int_cst_sgn (op1) > 0)
11160 switch (code)
11162 case GE_EXPR:
11163 code = GT_EXPR;
11164 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11165 break;
11167 case LT_EXPR:
11168 code = LE_EXPR;
11169 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11170 break;
11172 default:
11173 break;
11177 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
11178 if (tem)
11179 return tem;
11181 /* Fall through. */
11183 case ORDERED_EXPR:
11184 case UNORDERED_EXPR:
11185 case UNLT_EXPR:
11186 case UNLE_EXPR:
11187 case UNGT_EXPR:
11188 case UNGE_EXPR:
11189 case UNEQ_EXPR:
11190 case LTGT_EXPR:
11191 if (!wins)
11192 return NULL_TREE;
11194 return fold_relational_const (code, type, op0, op1);
11196 case RANGE_EXPR:
11197 /* This could probably be handled. */
11198 return NULL_TREE;
11200 case TRUTH_AND_EXPR:
11201 /* If second arg is constant zero, result is zero, but first arg
11202 must be evaluated. */
11203 if (integer_zerop (op1))
11204 return omit_one_operand (type, op1, op0);
11205 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11206 case will be handled here. */
11207 if (integer_zerop (op0))
11208 return omit_one_operand (type, op0, op1);
11209 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11210 return constant_boolean_node (true, type);
11211 return NULL_TREE;
11213 case TRUTH_OR_EXPR:
11214 /* If second arg is constant true, result is true, but we must
11215 evaluate first arg. */
11216 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
11217 return omit_one_operand (type, op1, op0);
11218 /* Likewise for first arg, but note this only occurs here for
11219 TRUTH_OR_EXPR. */
11220 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
11221 return omit_one_operand (type, op0, op1);
11222 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11223 return constant_boolean_node (false, type);
11224 return NULL_TREE;
11226 case TRUTH_XOR_EXPR:
11227 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11229 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
11230 return constant_boolean_node (x, type);
11232 return NULL_TREE;
11234 default:
11235 return NULL_TREE;
11239 /* Given the components of a unary expression CODE, TYPE and OP0,
11240 attempt to fold the expression to a constant without modifying
11241 TYPE or OP0.
11243 If the expression could be simplified to a constant, then return
11244 the constant. If the expression would not be simplified to a
11245 constant, then return NULL_TREE.
11247 Note this is primarily designed to be called after gimplification
11248 of the tree structures and when op0 is a constant. As a result
11249 of those simplifying assumptions this routine is far simpler than
11250 the generic fold routine. */
11252 tree
11253 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11255 /* Make sure we have a suitable constant argument. */
11256 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
11258 tree subop;
11260 if (TREE_CODE (op0) == COMPLEX_CST)
11261 subop = TREE_REALPART (op0);
11262 else
11263 subop = op0;
11265 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
11266 return NULL_TREE;
11269 switch (code)
11271 case NOP_EXPR:
11272 case FLOAT_EXPR:
11273 case CONVERT_EXPR:
11274 case FIX_TRUNC_EXPR:
11275 case FIX_FLOOR_EXPR:
11276 case FIX_CEIL_EXPR:
11277 case FIX_ROUND_EXPR:
11278 return fold_convert_const (code, type, op0);
11280 case NEGATE_EXPR:
11281 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11282 return fold_negate_const (op0, type);
11283 else
11284 return NULL_TREE;
11286 case ABS_EXPR:
11287 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11288 return fold_abs_const (op0, type);
11289 else
11290 return NULL_TREE;
11292 case BIT_NOT_EXPR:
11293 if (TREE_CODE (op0) == INTEGER_CST)
11294 return fold_not_const (op0, type);
11295 else
11296 return NULL_TREE;
11298 case REALPART_EXPR:
11299 if (TREE_CODE (op0) == COMPLEX_CST)
11300 return TREE_REALPART (op0);
11301 else
11302 return NULL_TREE;
11304 case IMAGPART_EXPR:
11305 if (TREE_CODE (op0) == COMPLEX_CST)
11306 return TREE_IMAGPART (op0);
11307 else
11308 return NULL_TREE;
11310 case CONJ_EXPR:
11311 if (TREE_CODE (op0) == COMPLEX_CST
11312 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
11313 return build_complex (type, TREE_REALPART (op0),
11314 negate_expr (TREE_IMAGPART (op0)));
11315 return NULL_TREE;
11317 default:
11318 return NULL_TREE;
11322 /* If EXP represents referencing an element in a constant string
11323 (either via pointer arithmetic or array indexing), return the
11324 tree representing the value accessed, otherwise return NULL. */
11326 tree
11327 fold_read_from_constant_string (tree exp)
11329 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11331 tree exp1 = TREE_OPERAND (exp, 0);
11332 tree index;
11333 tree string;
11335 if (TREE_CODE (exp) == INDIRECT_REF)
11336 string = string_constant (exp1, &index);
11337 else
11339 tree low_bound = array_ref_low_bound (exp);
11340 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11342 /* Optimize the special-case of a zero lower bound.
11344 We convert the low_bound to sizetype to avoid some problems
11345 with constant folding. (E.g. suppose the lower bound is 1,
11346 and its mode is QI. Without the conversion,l (ARRAY
11347 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11348 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11349 if (! integer_zerop (low_bound))
11350 index = size_diffop (index, fold_convert (sizetype, low_bound));
11352 string = exp1;
11355 if (string
11356 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11357 && TREE_CODE (string) == STRING_CST
11358 && TREE_CODE (index) == INTEGER_CST
11359 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11360 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11361 == MODE_INT)
11362 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11363 return fold_convert (TREE_TYPE (exp),
11364 build_int_cst (NULL_TREE,
11365 (TREE_STRING_POINTER (string)
11366 [TREE_INT_CST_LOW (index)])));
11368 return NULL;
11371 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11372 an integer constant or real constant.
11374 TYPE is the type of the result. */
11376 static tree
11377 fold_negate_const (tree arg0, tree type)
11379 tree t = NULL_TREE;
11381 switch (TREE_CODE (arg0))
11383 case INTEGER_CST:
11385 unsigned HOST_WIDE_INT low;
11386 HOST_WIDE_INT high;
11387 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11388 TREE_INT_CST_HIGH (arg0),
11389 &low, &high);
11390 t = build_int_cst_wide (type, low, high);
11391 t = force_fit_type (t, 1,
11392 (overflow | TREE_OVERFLOW (arg0))
11393 && !TYPE_UNSIGNED (type),
11394 TREE_CONSTANT_OVERFLOW (arg0));
11395 break;
11398 case REAL_CST:
11399 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11400 break;
11402 default:
11403 gcc_unreachable ();
11406 return t;
11409 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11410 an integer constant or real constant.
11412 TYPE is the type of the result. */
11414 tree
11415 fold_abs_const (tree arg0, tree type)
11417 tree t = NULL_TREE;
11419 switch (TREE_CODE (arg0))
11421 case INTEGER_CST:
11422 /* If the value is unsigned, then the absolute value is
11423 the same as the ordinary value. */
11424 if (TYPE_UNSIGNED (type))
11425 t = arg0;
11426 /* Similarly, if the value is non-negative. */
11427 else if (INT_CST_LT (integer_minus_one_node, arg0))
11428 t = arg0;
11429 /* If the value is negative, then the absolute value is
11430 its negation. */
11431 else
11433 unsigned HOST_WIDE_INT low;
11434 HOST_WIDE_INT high;
11435 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11436 TREE_INT_CST_HIGH (arg0),
11437 &low, &high);
11438 t = build_int_cst_wide (type, low, high);
11439 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11440 TREE_CONSTANT_OVERFLOW (arg0));
11442 break;
11444 case REAL_CST:
11445 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11446 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11447 else
11448 t = arg0;
11449 break;
11451 default:
11452 gcc_unreachable ();
11455 return t;
11458 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11459 constant. TYPE is the type of the result. */
11461 static tree
11462 fold_not_const (tree arg0, tree type)
11464 tree t = NULL_TREE;
11466 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11468 t = build_int_cst_wide (type,
11469 ~ TREE_INT_CST_LOW (arg0),
11470 ~ TREE_INT_CST_HIGH (arg0));
11471 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11472 TREE_CONSTANT_OVERFLOW (arg0));
11474 return t;
11477 /* Given CODE, a relational operator, the target type, TYPE and two
11478 constant operands OP0 and OP1, return the result of the
11479 relational operation. If the result is not a compile time
11480 constant, then return NULL_TREE. */
11482 static tree
11483 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11485 int result, invert;
11487 /* From here on, the only cases we handle are when the result is
11488 known to be a constant. */
11490 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11492 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11493 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11495 /* Handle the cases where either operand is a NaN. */
11496 if (real_isnan (c0) || real_isnan (c1))
11498 switch (code)
11500 case EQ_EXPR:
11501 case ORDERED_EXPR:
11502 result = 0;
11503 break;
11505 case NE_EXPR:
11506 case UNORDERED_EXPR:
11507 case UNLT_EXPR:
11508 case UNLE_EXPR:
11509 case UNGT_EXPR:
11510 case UNGE_EXPR:
11511 case UNEQ_EXPR:
11512 result = 1;
11513 break;
11515 case LT_EXPR:
11516 case LE_EXPR:
11517 case GT_EXPR:
11518 case GE_EXPR:
11519 case LTGT_EXPR:
11520 if (flag_trapping_math)
11521 return NULL_TREE;
11522 result = 0;
11523 break;
11525 default:
11526 gcc_unreachable ();
11529 return constant_boolean_node (result, type);
11532 return constant_boolean_node (real_compare (code, c0, c1), type);
11535 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11537 To compute GT, swap the arguments and do LT.
11538 To compute GE, do LT and invert the result.
11539 To compute LE, swap the arguments, do LT and invert the result.
11540 To compute NE, do EQ and invert the result.
11542 Therefore, the code below must handle only EQ and LT. */
11544 if (code == LE_EXPR || code == GT_EXPR)
11546 tree tem = op0;
11547 op0 = op1;
11548 op1 = tem;
11549 code = swap_tree_comparison (code);
11552 /* Note that it is safe to invert for real values here because we
11553 have already handled the one case that it matters. */
11555 invert = 0;
11556 if (code == NE_EXPR || code == GE_EXPR)
11558 invert = 1;
11559 code = invert_tree_comparison (code, false);
11562 /* Compute a result for LT or EQ if args permit;
11563 Otherwise return T. */
11564 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11566 if (code == EQ_EXPR)
11567 result = tree_int_cst_equal (op0, op1);
11568 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11569 result = INT_CST_LT_UNSIGNED (op0, op1);
11570 else
11571 result = INT_CST_LT (op0, op1);
11573 else
11574 return NULL_TREE;
11576 if (invert)
11577 result ^= 1;
11578 return constant_boolean_node (result, type);
11581 /* Build an expression for the a clean point containing EXPR with type TYPE.
11582 Don't build a cleanup point expression for EXPR which don't have side
11583 effects. */
11585 tree
11586 fold_build_cleanup_point_expr (tree type, tree expr)
11588 /* If the expression does not have side effects then we don't have to wrap
11589 it with a cleanup point expression. */
11590 if (!TREE_SIDE_EFFECTS (expr))
11591 return expr;
11593 /* If the expression is a return, check to see if the expression inside the
11594 return has no side effects or the right hand side of the modify expression
11595 inside the return. If either don't have side effects set we don't need to
11596 wrap the expression in a cleanup point expression. Note we don't check the
11597 left hand side of the modify because it should always be a return decl. */
11598 if (TREE_CODE (expr) == RETURN_EXPR)
11600 tree op = TREE_OPERAND (expr, 0);
11601 if (!op || !TREE_SIDE_EFFECTS (op))
11602 return expr;
11603 op = TREE_OPERAND (op, 1);
11604 if (!TREE_SIDE_EFFECTS (op))
11605 return expr;
11608 return build1 (CLEANUP_POINT_EXPR, type, expr);
11611 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11612 avoid confusing the gimplify process. */
11614 tree
11615 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11617 /* The size of the object is not relevant when talking about its address. */
11618 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11619 t = TREE_OPERAND (t, 0);
11621 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11622 if (TREE_CODE (t) == INDIRECT_REF
11623 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11625 t = TREE_OPERAND (t, 0);
11626 if (TREE_TYPE (t) != ptrtype)
11627 t = build1 (NOP_EXPR, ptrtype, t);
11629 else
11631 tree base = t;
11633 while (handled_component_p (base))
11634 base = TREE_OPERAND (base, 0);
11635 if (DECL_P (base))
11636 TREE_ADDRESSABLE (base) = 1;
11638 t = build1 (ADDR_EXPR, ptrtype, t);
11641 return t;
11644 tree
11645 build_fold_addr_expr (tree t)
11647 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11650 /* Given a pointer value T, return a simplified version of an indirection
11651 through T, or NULL_TREE if no simplification is possible. */
11653 static tree
11654 fold_indirect_ref_1 (tree t)
11656 tree type = TREE_TYPE (TREE_TYPE (t));
11657 tree sub = t;
11658 tree subtype;
11660 STRIP_NOPS (sub);
11661 subtype = TREE_TYPE (sub);
11662 if (!POINTER_TYPE_P (subtype))
11663 return NULL_TREE;
11665 if (TREE_CODE (sub) == ADDR_EXPR)
11667 tree op = TREE_OPERAND (sub, 0);
11668 tree optype = TREE_TYPE (op);
11669 /* *&p => p */
11670 if (lang_hooks.types_compatible_p (type, optype))
11671 return op;
11672 /* *(foo *)&fooarray => fooarray[0] */
11673 else if (TREE_CODE (optype) == ARRAY_TYPE
11674 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11676 tree type_domain = TYPE_DOMAIN (optype);
11677 tree min_val = size_zero_node;
11678 if (type_domain && TYPE_MIN_VALUE (type_domain))
11679 min_val = TYPE_MIN_VALUE (type_domain);
11680 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11684 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11685 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11686 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11688 tree type_domain;
11689 tree min_val = size_zero_node;
11690 sub = build_fold_indirect_ref (sub);
11691 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11692 if (type_domain && TYPE_MIN_VALUE (type_domain))
11693 min_val = TYPE_MIN_VALUE (type_domain);
11694 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11697 return NULL_TREE;
11700 /* Builds an expression for an indirection through T, simplifying some
11701 cases. */
11703 tree
11704 build_fold_indirect_ref (tree t)
11706 tree sub = fold_indirect_ref_1 (t);
11708 if (sub)
11709 return sub;
11710 else
11711 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
11714 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11716 tree
11717 fold_indirect_ref (tree t)
11719 tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
11721 if (sub)
11722 return sub;
11723 else
11724 return t;
11727 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11728 whose result is ignored. The type of the returned tree need not be
11729 the same as the original expression. */
11731 tree
11732 fold_ignored_result (tree t)
11734 if (!TREE_SIDE_EFFECTS (t))
11735 return integer_zero_node;
11737 for (;;)
11738 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11740 case tcc_unary:
11741 t = TREE_OPERAND (t, 0);
11742 break;
11744 case tcc_binary:
11745 case tcc_comparison:
11746 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11747 t = TREE_OPERAND (t, 0);
11748 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11749 t = TREE_OPERAND (t, 1);
11750 else
11751 return t;
11752 break;
11754 case tcc_expression:
11755 switch (TREE_CODE (t))
11757 case COMPOUND_EXPR:
11758 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11759 return t;
11760 t = TREE_OPERAND (t, 0);
11761 break;
11763 case COND_EXPR:
11764 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11765 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11766 return t;
11767 t = TREE_OPERAND (t, 0);
11768 break;
11770 default:
11771 return t;
11773 break;
11775 default:
11776 return t;
11780 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11781 This can only be applied to objects of a sizetype. */
11783 tree
11784 round_up (tree value, int divisor)
11786 tree div = NULL_TREE;
11788 gcc_assert (divisor > 0);
11789 if (divisor == 1)
11790 return value;
11792 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11793 have to do anything. Only do this when we are not given a const,
11794 because in that case, this check is more expensive than just
11795 doing it. */
11796 if (TREE_CODE (value) != INTEGER_CST)
11798 div = build_int_cst (TREE_TYPE (value), divisor);
11800 if (multiple_of_p (TREE_TYPE (value), value, div))
11801 return value;
11804 /* If divisor is a power of two, simplify this to bit manipulation. */
11805 if (divisor == (divisor & -divisor))
11807 tree t;
11809 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11810 value = size_binop (PLUS_EXPR, value, t);
11811 t = build_int_cst (TREE_TYPE (value), -divisor);
11812 value = size_binop (BIT_AND_EXPR, value, t);
11814 else
11816 if (!div)
11817 div = build_int_cst (TREE_TYPE (value), divisor);
11818 value = size_binop (CEIL_DIV_EXPR, value, div);
11819 value = size_binop (MULT_EXPR, value, div);
11822 return value;
11825 /* Likewise, but round down. */
11827 tree
11828 round_down (tree value, int divisor)
11830 tree div = NULL_TREE;
11832 gcc_assert (divisor > 0);
11833 if (divisor == 1)
11834 return value;
11836 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11837 have to do anything. Only do this when we are not given a const,
11838 because in that case, this check is more expensive than just
11839 doing it. */
11840 if (TREE_CODE (value) != INTEGER_CST)
11842 div = build_int_cst (TREE_TYPE (value), divisor);
11844 if (multiple_of_p (TREE_TYPE (value), value, div))
11845 return value;
11848 /* If divisor is a power of two, simplify this to bit manipulation. */
11849 if (divisor == (divisor & -divisor))
11851 tree t;
11853 t = build_int_cst (TREE_TYPE (value), -divisor);
11854 value = size_binop (BIT_AND_EXPR, value, t);
11856 else
11858 if (!div)
11859 div = build_int_cst (TREE_TYPE (value), divisor);
11860 value = size_binop (FLOOR_DIV_EXPR, value, div);
11861 value = size_binop (MULT_EXPR, value, div);
11864 return value;
11867 /* Returns the pointer to the base of the object addressed by EXP and
11868 extracts the information about the offset of the access, storing it
11869 to PBITPOS and POFFSET. */
11871 static tree
11872 split_address_to_core_and_offset (tree exp,
11873 HOST_WIDE_INT *pbitpos, tree *poffset)
11875 tree core;
11876 enum machine_mode mode;
11877 int unsignedp, volatilep;
11878 HOST_WIDE_INT bitsize;
11880 if (TREE_CODE (exp) == ADDR_EXPR)
11882 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11883 poffset, &mode, &unsignedp, &volatilep,
11884 false);
11886 if (TREE_CODE (core) == INDIRECT_REF)
11887 core = TREE_OPERAND (core, 0);
11889 else
11891 core = exp;
11892 *pbitpos = 0;
11893 *poffset = NULL_TREE;
11896 return core;
11899 /* Returns true if addresses of E1 and E2 differ by a constant, false
11900 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11902 bool
11903 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11905 tree core1, core2;
11906 HOST_WIDE_INT bitpos1, bitpos2;
11907 tree toffset1, toffset2, tdiff, type;
11909 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11910 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11912 if (bitpos1 % BITS_PER_UNIT != 0
11913 || bitpos2 % BITS_PER_UNIT != 0
11914 || !operand_equal_p (core1, core2, 0))
11915 return false;
11917 if (toffset1 && toffset2)
11919 type = TREE_TYPE (toffset1);
11920 if (type != TREE_TYPE (toffset2))
11921 toffset2 = fold_convert (type, toffset2);
11923 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11924 if (!host_integerp (tdiff, 0))
11925 return false;
11927 *diff = tree_low_cst (tdiff, 0);
11929 else if (toffset1 || toffset2)
11931 /* If only one of the offsets is non-constant, the difference cannot
11932 be a constant. */
11933 return false;
11935 else
11936 *diff = 0;
11938 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11939 return true;
11942 /* Simplify the floating point expression EXP when the sign of the
11943 result is not significant. Return NULL_TREE if no simplification
11944 is possible. */
11946 tree
11947 fold_strip_sign_ops (tree exp)
11949 tree arg0, arg1;
11951 switch (TREE_CODE (exp))
11953 case ABS_EXPR:
11954 case NEGATE_EXPR:
11955 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11956 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11958 case MULT_EXPR:
11959 case RDIV_EXPR:
11960 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11961 return NULL_TREE;
11962 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11963 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11964 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11965 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11966 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11967 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11968 break;
11970 default:
11971 break;
11973 return NULL_TREE;