Fix last but one entry.
[official-gcc.git] / gcc / fold-const.c
blobefbf8ed5c78bb6b4b031ce9ae13a484855879f9b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_relational_hi_lo (enum tree_code *, const tree,
137 tree *, tree *);
138 static bool tree_expr_nonzero_p (tree);
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
143 addition.
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 sign. */
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
155 #define LOWPART(x) \
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
165 static void
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178 static void
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
180 HOST_WIDE_INT *hi)
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is nonzero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is nonzero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
201 tree
202 force_fit_type (tree t, int overflowable,
203 bool overflowed, bool overflowed_const)
205 unsigned HOST_WIDE_INT low;
206 HOST_WIDE_INT high;
207 unsigned int prec;
208 int sign_extended_type;
210 gcc_assert (TREE_CODE (t) == INTEGER_CST);
212 low = TREE_INT_CST_LOW (t);
213 high = TREE_INT_CST_HIGH (t);
215 if (POINTER_TYPE_P (TREE_TYPE (t))
216 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
217 prec = POINTER_SIZE;
218 else
219 prec = TYPE_PRECISION (TREE_TYPE (t));
220 /* Size types *are* sign extended. */
221 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
222 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
223 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
225 /* First clear all bits that are beyond the type's precision. */
227 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
229 else if (prec > HOST_BITS_PER_WIDE_INT)
230 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 else
233 high = 0;
234 if (prec < HOST_BITS_PER_WIDE_INT)
235 low &= ~((HOST_WIDE_INT) (-1) << prec);
238 if (!sign_extended_type)
239 /* No sign extension */;
240 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
241 /* Correct width already. */;
242 else if (prec > HOST_BITS_PER_WIDE_INT)
244 /* Sign extend top half? */
245 if (high & ((unsigned HOST_WIDE_INT)1
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
249 else if (prec == HOST_BITS_PER_WIDE_INT)
251 if ((HOST_WIDE_INT)low < 0)
252 high = -1;
254 else
256 /* Sign extend bottom half? */
257 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
259 high = -1;
260 low |= (HOST_WIDE_INT)(-1) << prec;
264 /* If the value changed, return a new node. */
265 if (overflowed || overflowed_const
266 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
268 t = build_int_cst_wide (TREE_TYPE (t), low, high);
270 if (overflowed
271 || overflowable < 0
272 || (overflowable > 0 && sign_extended_type))
274 t = copy_node (t);
275 TREE_OVERFLOW (t) = 1;
276 TREE_CONSTANT_OVERFLOW (t) = 1;
278 else if (overflowed_const)
280 t = copy_node (t);
281 TREE_CONSTANT_OVERFLOW (t) = 1;
285 return t;
288 /* Add two doubleword integers with doubleword result.
289 Each argument is given as two `HOST_WIDE_INT' pieces.
290 One argument is L1 and H1; the other, L2 and H2.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
294 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
295 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
296 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
298 unsigned HOST_WIDE_INT l;
299 HOST_WIDE_INT h;
301 l = l1 + l2;
302 h = h1 + h2 + (l < l1);
304 *lv = l;
305 *hv = h;
306 return OVERFLOW_SUM_SIGN (h1, h2, h);
309 /* Negate a doubleword integer with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
312 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
315 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
316 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 if (l1 == 0)
320 *lv = 0;
321 *hv = - h1;
322 return (*hv & h1) < 0;
324 else
326 *lv = -l1;
327 *hv = ~h1;
328 return 0;
332 /* Multiply two doubleword integers with doubleword result.
333 Return nonzero if the operation overflows, assuming it's signed.
334 Each argument is given as two `HOST_WIDE_INT' pieces.
335 One argument is L1 and H1; the other, L2 and H2.
336 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
339 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
340 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
341 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
343 HOST_WIDE_INT arg1[4];
344 HOST_WIDE_INT arg2[4];
345 HOST_WIDE_INT prod[4 * 2];
346 unsigned HOST_WIDE_INT carry;
347 int i, j, k;
348 unsigned HOST_WIDE_INT toplow, neglow;
349 HOST_WIDE_INT tophigh, neghigh;
351 encode (arg1, l1, h1);
352 encode (arg2, l2, h2);
354 memset (prod, 0, sizeof prod);
356 for (i = 0; i < 4; i++)
358 carry = 0;
359 for (j = 0; j < 4; j++)
361 k = i + j;
362 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
363 carry += arg1[i] * arg2[j];
364 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
365 carry += prod[k];
366 prod[k] = LOWPART (carry);
367 carry = HIGHPART (carry);
369 prod[i + 4] = carry;
372 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
374 /* Check for overflow by calculating the top half of the answer in full;
375 it should agree with the low half's sign bit. */
376 decode (prod + 4, &toplow, &tophigh);
377 if (h1 < 0)
379 neg_double (l2, h2, &neglow, &neghigh);
380 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
382 if (h2 < 0)
384 neg_double (l1, h1, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
387 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
390 /* Shift the doubleword integer in L1, H1 left by COUNT places
391 keeping only PREC bits of result.
392 Shift right if COUNT is negative.
393 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
394 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
396 void
397 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
398 HOST_WIDE_INT count, unsigned int prec,
399 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
401 unsigned HOST_WIDE_INT signmask;
403 if (count < 0)
405 rshift_double (l1, h1, -count, prec, lv, hv, arith);
406 return;
409 if (SHIFT_COUNT_TRUNCATED)
410 count %= prec;
412 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
414 /* Shifting by the host word size is undefined according to the
415 ANSI standard, so we must handle this as a special case. */
416 *hv = 0;
417 *lv = 0;
419 else if (count >= HOST_BITS_PER_WIDE_INT)
421 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *lv = 0;
424 else
426 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
427 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
428 *lv = l1 << count;
431 /* Sign extend all bits that are beyond the precision. */
433 signmask = -((prec > HOST_BITS_PER_WIDE_INT
434 ? ((unsigned HOST_WIDE_INT) *hv
435 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
436 : (*lv >> (prec - 1))) & 1);
438 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
440 else if (prec >= HOST_BITS_PER_WIDE_INT)
442 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
443 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
445 else
447 *hv = signmask;
448 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
449 *lv |= signmask << prec;
453 /* Shift the doubleword integer in L1, H1 right by COUNT places
454 keeping only PREC bits of result. COUNT must be positive.
455 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
456 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
458 void
459 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
460 HOST_WIDE_INT count, unsigned int prec,
461 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
462 int arith)
464 unsigned HOST_WIDE_INT signmask;
466 signmask = (arith
467 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
468 : 0);
470 if (SHIFT_COUNT_TRUNCATED)
471 count %= prec;
473 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
475 /* Shifting by the host word size is undefined according to the
476 ANSI standard, so we must handle this as a special case. */
477 *hv = 0;
478 *lv = 0;
480 else if (count >= HOST_BITS_PER_WIDE_INT)
482 *hv = 0;
483 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
488 *lv = ((l1 >> count)
489 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
492 /* Zero / sign extend all bits that are beyond the precision. */
494 if (count >= (HOST_WIDE_INT)prec)
496 *hv = signmask;
497 *lv = signmask;
499 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
501 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
503 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
504 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
506 else
508 *hv = signmask;
509 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
510 *lv |= signmask << (prec - count);
514 /* Rotate the doubleword integer in L1, H1 left by COUNT places
515 keeping only PREC bits of result.
516 Rotate right if COUNT is negative.
517 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
519 void
520 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
521 HOST_WIDE_INT count, unsigned int prec,
522 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
524 unsigned HOST_WIDE_INT s1l, s2l;
525 HOST_WIDE_INT s1h, s2h;
527 count %= prec;
528 if (count < 0)
529 count += prec;
531 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
532 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 *lv = s1l | s2l;
534 *hv = s1h | s2h;
537 /* Rotate the doubleword integer in L1, H1 left by COUNT places
538 keeping only PREC bits of result. COUNT must be positive.
539 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
541 void
542 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
543 HOST_WIDE_INT count, unsigned int prec,
544 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
546 unsigned HOST_WIDE_INT s1l, s2l;
547 HOST_WIDE_INT s1h, s2h;
549 count %= prec;
550 if (count < 0)
551 count += prec;
553 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
554 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 *lv = s1l | s2l;
556 *hv = s1h | s2h;
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 or EXACT_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
569 div_and_round_double (enum tree_code code, int uns,
570 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
571 HOST_WIDE_INT hnum_orig,
572 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
573 HOST_WIDE_INT hden_orig,
574 unsigned HOST_WIDE_INT *lquo,
575 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
576 HOST_WIDE_INT *hrem)
578 int quo_neg = 0;
579 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den[4], quo[4];
581 int i, j;
582 unsigned HOST_WIDE_INT work;
583 unsigned HOST_WIDE_INT carry = 0;
584 unsigned HOST_WIDE_INT lnum = lnum_orig;
585 HOST_WIDE_INT hnum = hnum_orig;
586 unsigned HOST_WIDE_INT lden = lden_orig;
587 HOST_WIDE_INT hden = hden_orig;
588 int overflow = 0;
590 if (hden == 0 && lden == 0)
591 overflow = 1, lden = 1;
593 /* Calculate quotient sign and convert operands to unsigned. */
594 if (!uns)
596 if (hnum < 0)
598 quo_neg = ~ quo_neg;
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum, hnum, &lnum, &hnum)
601 && ((HOST_WIDE_INT) lden & hden) == -1)
602 overflow = 1;
604 if (hden < 0)
606 quo_neg = ~ quo_neg;
607 neg_double (lden, hden, &lden, &hden);
611 if (hnum == 0 && hden == 0)
612 { /* single precision */
613 *hquo = *hrem = 0;
614 /* This unsigned division rounds toward zero. */
615 *lquo = lnum / lden;
616 goto finish_up;
619 if (hnum == 0)
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
622 *hquo = *lquo = 0;
623 *hrem = hnum;
624 *lrem = lnum;
625 goto finish_up;
628 memset (quo, 0, sizeof quo);
630 memset (num, 0, sizeof num); /* to zero 9th element */
631 memset (den, 0, sizeof den);
633 encode (num, lnum, hnum);
634 encode (den, lden, hden);
636 /* Special code for when the divisor < BASE. */
637 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
639 /* hnum != 0 already checked. */
640 for (i = 4 - 1; i >= 0; i--)
642 work = num[i] + carry * BASE;
643 quo[i] = work / lden;
644 carry = work % lden;
647 else
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig, den_hi_sig;
652 unsigned HOST_WIDE_INT quo_est, scale;
654 /* Find the highest nonzero divisor digit. */
655 for (i = 4 - 1;; i--)
656 if (den[i] != 0)
658 den_hi_sig = i;
659 break;
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
665 scale = BASE / (den[den_hi_sig] + 1);
666 if (scale > 1)
667 { /* scale divisor and dividend */
668 carry = 0;
669 for (i = 0; i <= 4 - 1; i++)
671 work = (num[i] * scale) + carry;
672 num[i] = LOWPART (work);
673 carry = HIGHPART (work);
676 num[4] = carry;
677 carry = 0;
678 for (i = 0; i <= 4 - 1; i++)
680 work = (den[i] * scale) + carry;
681 den[i] = LOWPART (work);
682 carry = HIGHPART (work);
683 if (den[i] != 0) den_hi_sig = i;
687 num_hi_sig = 4;
689 /* Main loop */
690 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp;
697 num_hi_sig = i + den_hi_sig + 1;
698 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
699 if (num[num_hi_sig] != den[den_hi_sig])
700 quo_est = work / den[den_hi_sig];
701 else
702 quo_est = BASE - 1;
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp = work - quo_est * den[den_hi_sig];
706 if (tmp < BASE
707 && (den[den_hi_sig - 1] * quo_est
708 > (tmp * BASE + num[num_hi_sig - 2])))
709 quo_est--;
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
715 carry = 0;
716 for (j = 0; j <= den_hi_sig; j++)
718 work = quo_est * den[j] + carry;
719 carry = HIGHPART (work);
720 work = num[i + j] - LOWPART (work);
721 num[i + j] = LOWPART (work);
722 carry += HIGHPART (work) != 0;
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
729 quo_est--;
730 carry = 0; /* add divisor back in */
731 for (j = 0; j <= den_hi_sig; j++)
733 work = num[i + j] + den[j] + carry;
734 carry = HIGHPART (work);
735 num[i + j] = LOWPART (work);
738 num [num_hi_sig] += carry;
741 /* Store the quotient digit. */
742 quo[i] = quo_est;
746 decode (quo, lquo, hquo);
748 finish_up:
749 /* If result is negative, make it so. */
750 if (quo_neg)
751 neg_double (*lquo, *hquo, lquo, hquo);
753 /* Compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
755 neg_double (*lrem, *hrem, lrem, hrem);
756 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
758 switch (code)
760 case TRUNC_DIV_EXPR:
761 case TRUNC_MOD_EXPR: /* round toward zero */
762 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 return overflow;
765 case FLOOR_DIV_EXPR:
766 case FLOOR_MOD_EXPR: /* round toward negative infinity */
767 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
769 /* quo = quo - 1; */
770 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
771 lquo, hquo);
773 else
774 return overflow;
775 break;
777 case CEIL_DIV_EXPR:
778 case CEIL_MOD_EXPR: /* round toward positive infinity */
779 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
781 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
782 lquo, hquo);
784 else
785 return overflow;
786 break;
788 case ROUND_DIV_EXPR:
789 case ROUND_MOD_EXPR: /* round to closest integer */
791 unsigned HOST_WIDE_INT labs_rem = *lrem;
792 HOST_WIDE_INT habs_rem = *hrem;
793 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
794 HOST_WIDE_INT habs_den = hden, htwice;
796 /* Get absolute values. */
797 if (*hrem < 0)
798 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 if (hden < 0)
800 neg_double (lden, hden, &labs_den, &habs_den);
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
804 labs_rem, habs_rem, &ltwice, &htwice);
806 if (((unsigned HOST_WIDE_INT) habs_den
807 < (unsigned HOST_WIDE_INT) htwice)
808 || (((unsigned HOST_WIDE_INT) habs_den
809 == (unsigned HOST_WIDE_INT) htwice)
810 && (labs_den < ltwice)))
812 if (*hquo < 0)
813 /* quo = quo - 1; */
814 add_double (*lquo, *hquo,
815 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
816 else
817 /* quo = quo + 1; */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
821 else
822 return overflow;
824 break;
826 default:
827 gcc_unreachable ();
830 /* Compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
832 neg_double (*lrem, *hrem, lrem, hrem);
833 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 return overflow;
837 /* Return true if built-in mathematical function specified by CODE
838 preserves the sign of it argument, i.e. -f(x) == f(-x). */
840 static bool
841 negate_mathfn_p (enum built_in_function code)
843 switch (code)
845 case BUILT_IN_ASIN:
846 case BUILT_IN_ASINF:
847 case BUILT_IN_ASINL:
848 case BUILT_IN_ATAN:
849 case BUILT_IN_ATANF:
850 case BUILT_IN_ATANL:
851 case BUILT_IN_SIN:
852 case BUILT_IN_SINF:
853 case BUILT_IN_SINL:
854 case BUILT_IN_TAN:
855 case BUILT_IN_TANF:
856 case BUILT_IN_TANL:
857 return true;
859 default:
860 break;
862 return false;
865 /* Check whether we may negate an integer constant T without causing
866 overflow. */
868 bool
869 may_negate_without_overflow_p (tree t)
871 unsigned HOST_WIDE_INT val;
872 unsigned int prec;
873 tree type;
875 gcc_assert (TREE_CODE (t) == INTEGER_CST);
877 type = TREE_TYPE (t);
878 if (TYPE_UNSIGNED (type))
879 return false;
881 prec = TYPE_PRECISION (type);
882 if (prec > HOST_BITS_PER_WIDE_INT)
884 if (TREE_INT_CST_LOW (t) != 0)
885 return true;
886 prec -= HOST_BITS_PER_WIDE_INT;
887 val = TREE_INT_CST_HIGH (t);
889 else
890 val = TREE_INT_CST_LOW (t);
891 if (prec < HOST_BITS_PER_WIDE_INT)
892 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
893 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
896 /* Determine whether an expression T can be cheaply negated using
897 the function negate_expr. */
899 static bool
900 negate_expr_p (tree t)
902 tree type;
904 if (t == 0)
905 return false;
907 type = TREE_TYPE (t);
909 STRIP_SIGN_NOPS (t);
910 switch (TREE_CODE (t))
912 case INTEGER_CST:
913 if (TYPE_UNSIGNED (type) || ! flag_trapv)
914 return true;
916 /* Check that -CST will not overflow type. */
917 return may_negate_without_overflow_p (t);
919 case REAL_CST:
920 case NEGATE_EXPR:
921 return true;
923 case COMPLEX_CST:
924 return negate_expr_p (TREE_REALPART (t))
925 && negate_expr_p (TREE_IMAGPART (t));
927 case PLUS_EXPR:
928 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
929 return false;
930 /* -(A + B) -> (-B) - A. */
931 if (negate_expr_p (TREE_OPERAND (t, 1))
932 && reorder_operands_p (TREE_OPERAND (t, 0),
933 TREE_OPERAND (t, 1)))
934 return true;
935 /* -(A + B) -> (-A) - B. */
936 return negate_expr_p (TREE_OPERAND (t, 0));
938 case MINUS_EXPR:
939 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
940 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
941 && reorder_operands_p (TREE_OPERAND (t, 0),
942 TREE_OPERAND (t, 1));
944 case MULT_EXPR:
945 if (TYPE_UNSIGNED (TREE_TYPE (t)))
946 break;
948 /* Fall through. */
950 case RDIV_EXPR:
951 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
952 return negate_expr_p (TREE_OPERAND (t, 1))
953 || negate_expr_p (TREE_OPERAND (t, 0));
954 break;
956 case NOP_EXPR:
957 /* Negate -((double)float) as (double)(-float). */
958 if (TREE_CODE (type) == REAL_TYPE)
960 tree tem = strip_float_extensions (t);
961 if (tem != t)
962 return negate_expr_p (tem);
964 break;
966 case CALL_EXPR:
967 /* Negate -f(x) as f(-x). */
968 if (negate_mathfn_p (builtin_mathfn_code (t)))
969 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
970 break;
972 case RSHIFT_EXPR:
973 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
974 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
976 tree op1 = TREE_OPERAND (t, 1);
977 if (TREE_INT_CST_HIGH (op1) == 0
978 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
979 == TREE_INT_CST_LOW (op1))
980 return true;
982 break;
984 default:
985 break;
987 return false;
990 /* Given T, an expression, return the negation of T. Allow for T to be
991 null, in which case return null. */
993 static tree
994 negate_expr (tree t)
996 tree type;
997 tree tem;
999 if (t == 0)
1000 return 0;
1002 type = TREE_TYPE (t);
1003 STRIP_SIGN_NOPS (t);
1005 switch (TREE_CODE (t))
1007 case INTEGER_CST:
1008 tem = fold_negate_const (t, type);
1009 if (! TREE_OVERFLOW (tem)
1010 || TYPE_UNSIGNED (type)
1011 || ! flag_trapv)
1012 return tem;
1013 break;
1015 case REAL_CST:
1016 tem = fold_negate_const (t, type);
1017 /* Two's complement FP formats, such as c4x, may overflow. */
1018 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1019 return fold_convert (type, tem);
1020 break;
1022 case COMPLEX_CST:
1024 tree rpart = negate_expr (TREE_REALPART (t));
1025 tree ipart = negate_expr (TREE_IMAGPART (t));
1027 if ((TREE_CODE (rpart) == REAL_CST
1028 && TREE_CODE (ipart) == REAL_CST)
1029 || (TREE_CODE (rpart) == INTEGER_CST
1030 && TREE_CODE (ipart) == INTEGER_CST))
1031 return build_complex (type, rpart, ipart);
1033 break;
1035 case NEGATE_EXPR:
1036 return fold_convert (type, TREE_OPERAND (t, 0));
1038 case PLUS_EXPR:
1039 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1041 /* -(A + B) -> (-B) - A. */
1042 if (negate_expr_p (TREE_OPERAND (t, 1))
1043 && reorder_operands_p (TREE_OPERAND (t, 0),
1044 TREE_OPERAND (t, 1)))
1046 tem = negate_expr (TREE_OPERAND (t, 1));
1047 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1048 tem, TREE_OPERAND (t, 0));
1049 return fold_convert (type, tem);
1052 /* -(A + B) -> (-A) - B. */
1053 if (negate_expr_p (TREE_OPERAND (t, 0)))
1055 tem = negate_expr (TREE_OPERAND (t, 0));
1056 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1057 tem, TREE_OPERAND (t, 1));
1058 return fold_convert (type, tem);
1061 break;
1063 case MINUS_EXPR:
1064 /* - (A - B) -> B - A */
1065 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1066 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1067 return fold_convert (type,
1068 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1069 TREE_OPERAND (t, 1),
1070 TREE_OPERAND (t, 0)));
1071 break;
1073 case MULT_EXPR:
1074 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1075 break;
1077 /* Fall through. */
1079 case RDIV_EXPR:
1080 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1082 tem = TREE_OPERAND (t, 1);
1083 if (negate_expr_p (tem))
1084 return fold_convert (type,
1085 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1086 TREE_OPERAND (t, 0),
1087 negate_expr (tem)));
1088 tem = TREE_OPERAND (t, 0);
1089 if (negate_expr_p (tem))
1090 return fold_convert (type,
1091 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1092 negate_expr (tem),
1093 TREE_OPERAND (t, 1)));
1095 break;
1097 case NOP_EXPR:
1098 /* Convert -((double)float) into (double)(-float). */
1099 if (TREE_CODE (type) == REAL_TYPE)
1101 tem = strip_float_extensions (t);
1102 if (tem != t && negate_expr_p (tem))
1103 return fold_convert (type, negate_expr (tem));
1105 break;
1107 case CALL_EXPR:
1108 /* Negate -f(x) as f(-x). */
1109 if (negate_mathfn_p (builtin_mathfn_code (t))
1110 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1112 tree fndecl, arg, arglist;
1114 fndecl = get_callee_fndecl (t);
1115 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1116 arglist = build_tree_list (NULL_TREE, arg);
1117 return build_function_call_expr (fndecl, arglist);
1119 break;
1121 case RSHIFT_EXPR:
1122 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1123 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1125 tree op1 = TREE_OPERAND (t, 1);
1126 if (TREE_INT_CST_HIGH (op1) == 0
1127 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1128 == TREE_INT_CST_LOW (op1))
1130 tree ntype = TYPE_UNSIGNED (type)
1131 ? lang_hooks.types.signed_type (type)
1132 : lang_hooks.types.unsigned_type (type);
1133 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1134 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1135 return fold_convert (type, temp);
1138 break;
1140 default:
1141 break;
1144 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1145 return fold_convert (type, tem);
1148 /* Split a tree IN into a constant, literal and variable parts that could be
1149 combined with CODE to make IN. "constant" means an expression with
1150 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1151 commutative arithmetic operation. Store the constant part into *CONP,
1152 the literal in *LITP and return the variable part. If a part isn't
1153 present, set it to null. If the tree does not decompose in this way,
1154 return the entire tree as the variable part and the other parts as null.
1156 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1157 case, we negate an operand that was subtracted. Except if it is a
1158 literal for which we use *MINUS_LITP instead.
1160 If NEGATE_P is true, we are negating all of IN, again except a literal
1161 for which we use *MINUS_LITP instead.
1163 If IN is itself a literal or constant, return it as appropriate.
1165 Note that we do not guarantee that any of the three values will be the
1166 same type as IN, but they will have the same signedness and mode. */
1168 static tree
1169 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1170 tree *minus_litp, int negate_p)
1172 tree var = 0;
1174 *conp = 0;
1175 *litp = 0;
1176 *minus_litp = 0;
1178 /* Strip any conversions that don't change the machine mode or signedness. */
1179 STRIP_SIGN_NOPS (in);
1181 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1182 *litp = in;
1183 else if (TREE_CODE (in) == code
1184 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1185 /* We can associate addition and subtraction together (even
1186 though the C standard doesn't say so) for integers because
1187 the value is not affected. For reals, the value might be
1188 affected, so we can't. */
1189 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1190 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1192 tree op0 = TREE_OPERAND (in, 0);
1193 tree op1 = TREE_OPERAND (in, 1);
1194 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1195 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1197 /* First see if either of the operands is a literal, then a constant. */
1198 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1199 *litp = op0, op0 = 0;
1200 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1201 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1203 if (op0 != 0 && TREE_CONSTANT (op0))
1204 *conp = op0, op0 = 0;
1205 else if (op1 != 0 && TREE_CONSTANT (op1))
1206 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1208 /* If we haven't dealt with either operand, this is not a case we can
1209 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1210 if (op0 != 0 && op1 != 0)
1211 var = in;
1212 else if (op0 != 0)
1213 var = op0;
1214 else
1215 var = op1, neg_var_p = neg1_p;
1217 /* Now do any needed negations. */
1218 if (neg_litp_p)
1219 *minus_litp = *litp, *litp = 0;
1220 if (neg_conp_p)
1221 *conp = negate_expr (*conp);
1222 if (neg_var_p)
1223 var = negate_expr (var);
1225 else if (TREE_CONSTANT (in))
1226 *conp = in;
1227 else
1228 var = in;
1230 if (negate_p)
1232 if (*litp)
1233 *minus_litp = *litp, *litp = 0;
1234 else if (*minus_litp)
1235 *litp = *minus_litp, *minus_litp = 0;
1236 *conp = negate_expr (*conp);
1237 var = negate_expr (var);
1240 return var;
1243 /* Re-associate trees split by the above function. T1 and T2 are either
1244 expressions to associate or null. Return the new expression, if any. If
1245 we build an operation, do it in TYPE and with CODE. */
1247 static tree
1248 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1250 if (t1 == 0)
1251 return t2;
1252 else if (t2 == 0)
1253 return t1;
1255 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1256 try to fold this since we will have infinite recursion. But do
1257 deal with any NEGATE_EXPRs. */
1258 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1259 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1261 if (code == PLUS_EXPR)
1263 if (TREE_CODE (t1) == NEGATE_EXPR)
1264 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1265 fold_convert (type, TREE_OPERAND (t1, 0)));
1266 else if (TREE_CODE (t2) == NEGATE_EXPR)
1267 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1268 fold_convert (type, TREE_OPERAND (t2, 0)));
1269 else if (integer_zerop (t2))
1270 return fold_convert (type, t1);
1272 else if (code == MINUS_EXPR)
1274 if (integer_zerop (t2))
1275 return fold_convert (type, t1);
1278 return build2 (code, type, fold_convert (type, t1),
1279 fold_convert (type, t2));
1282 return fold_build2 (code, type, fold_convert (type, t1),
1283 fold_convert (type, t2));
1286 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1287 to produce a new constant.
1289 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1291 tree
1292 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1294 unsigned HOST_WIDE_INT int1l, int2l;
1295 HOST_WIDE_INT int1h, int2h;
1296 unsigned HOST_WIDE_INT low;
1297 HOST_WIDE_INT hi;
1298 unsigned HOST_WIDE_INT garbagel;
1299 HOST_WIDE_INT garbageh;
1300 tree t;
1301 tree type = TREE_TYPE (arg1);
1302 int uns = TYPE_UNSIGNED (type);
1303 int is_sizetype
1304 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1305 int overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1312 switch (code)
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 break;
1336 case RROTATE_EXPR:
1337 int2l = - int2l;
1338 case LROTATE_EXPR:
1339 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 &low, &hi);
1341 break;
1343 case PLUS_EXPR:
1344 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1345 break;
1347 case MINUS_EXPR:
1348 neg_double (int2l, int2h, &low, &hi);
1349 add_double (int1l, int1h, low, hi, &low, &hi);
1350 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1351 break;
1353 case MULT_EXPR:
1354 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1355 break;
1357 case TRUNC_DIV_EXPR:
1358 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1359 case EXACT_DIV_EXPR:
1360 /* This is a shortcut for a common special case. */
1361 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1362 && ! TREE_CONSTANT_OVERFLOW (arg1)
1363 && ! TREE_CONSTANT_OVERFLOW (arg2)
1364 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1366 if (code == CEIL_DIV_EXPR)
1367 int1l += int2l - 1;
1369 low = int1l / int2l, hi = 0;
1370 break;
1373 /* ... fall through ... */
1375 case ROUND_DIV_EXPR:
1376 if (int2h == 0 && int2l == 1)
1378 low = int1l, hi = int1h;
1379 break;
1381 if (int1l == int2l && int1h == int2h
1382 && ! (int1l == 0 && int1h == 0))
1384 low = 1, hi = 0;
1385 break;
1387 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1388 &low, &hi, &garbagel, &garbageh);
1389 break;
1391 case TRUNC_MOD_EXPR:
1392 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1393 /* This is a shortcut for a common special case. */
1394 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1395 && ! TREE_CONSTANT_OVERFLOW (arg1)
1396 && ! TREE_CONSTANT_OVERFLOW (arg2)
1397 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1399 if (code == CEIL_MOD_EXPR)
1400 int1l += int2l - 1;
1401 low = int1l % int2l, hi = 0;
1402 break;
1405 /* ... fall through ... */
1407 case ROUND_MOD_EXPR:
1408 overflow = div_and_round_double (code, uns,
1409 int1l, int1h, int2l, int2h,
1410 &garbagel, &garbageh, &low, &hi);
1411 break;
1413 case MIN_EXPR:
1414 case MAX_EXPR:
1415 if (uns)
1416 low = (((unsigned HOST_WIDE_INT) int1h
1417 < (unsigned HOST_WIDE_INT) int2h)
1418 || (((unsigned HOST_WIDE_INT) int1h
1419 == (unsigned HOST_WIDE_INT) int2h)
1420 && int1l < int2l));
1421 else
1422 low = (int1h < int2h
1423 || (int1h == int2h && int1l < int2l));
1425 if (low == (code == MIN_EXPR))
1426 low = int1l, hi = int1h;
1427 else
1428 low = int2l, hi = int2h;
1429 break;
1431 default:
1432 gcc_unreachable ();
1435 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1437 if (notrunc)
1439 /* Propagate overflow flags ourselves. */
1440 if (((!uns || is_sizetype) && overflow)
1441 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1443 t = copy_node (t);
1444 TREE_OVERFLOW (t) = 1;
1445 TREE_CONSTANT_OVERFLOW (t) = 1;
1447 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1449 t = copy_node (t);
1450 TREE_CONSTANT_OVERFLOW (t) = 1;
1453 else
1454 t = force_fit_type (t, 1,
1455 ((!uns || is_sizetype) && overflow)
1456 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1457 TREE_CONSTANT_OVERFLOW (arg1)
1458 | TREE_CONSTANT_OVERFLOW (arg2));
1460 return t;
1463 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1464 constant. We assume ARG1 and ARG2 have the same data type, or at least
1465 are the same kind of constant and the same machine mode.
1467 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1469 static tree
1470 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1472 STRIP_NOPS (arg1);
1473 STRIP_NOPS (arg2);
1475 if (TREE_CODE (arg1) == INTEGER_CST)
1476 return int_const_binop (code, arg1, arg2, notrunc);
1478 if (TREE_CODE (arg1) == REAL_CST)
1480 enum machine_mode mode;
1481 REAL_VALUE_TYPE d1;
1482 REAL_VALUE_TYPE d2;
1483 REAL_VALUE_TYPE value;
1484 REAL_VALUE_TYPE result;
1485 bool inexact;
1486 tree t, type;
1488 d1 = TREE_REAL_CST (arg1);
1489 d2 = TREE_REAL_CST (arg2);
1491 type = TREE_TYPE (arg1);
1492 mode = TYPE_MODE (type);
1494 /* Don't perform operation if we honor signaling NaNs and
1495 either operand is a NaN. */
1496 if (HONOR_SNANS (mode)
1497 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1498 return NULL_TREE;
1500 /* Don't perform operation if it would raise a division
1501 by zero exception. */
1502 if (code == RDIV_EXPR
1503 && REAL_VALUES_EQUAL (d2, dconst0)
1504 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1505 return NULL_TREE;
1507 /* If either operand is a NaN, just return it. Otherwise, set up
1508 for floating-point trap; we return an overflow. */
1509 if (REAL_VALUE_ISNAN (d1))
1510 return arg1;
1511 else if (REAL_VALUE_ISNAN (d2))
1512 return arg2;
1514 inexact = real_arithmetic (&value, code, &d1, &d2);
1515 real_convert (&result, mode, &value);
1517 /* Don't constant fold this floating point operation if the
1518 result may dependent upon the run-time rounding mode and
1519 flag_rounding_math is set, or if GCC's software emulation
1520 is unable to accurately represent the result. */
1522 if ((flag_rounding_math
1523 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1524 && !flag_unsafe_math_optimizations))
1525 && (inexact || !real_identical (&result, &value)))
1526 return NULL_TREE;
1528 t = build_real (type, result);
1530 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1531 TREE_CONSTANT_OVERFLOW (t)
1532 = TREE_OVERFLOW (t)
1533 | TREE_CONSTANT_OVERFLOW (arg1)
1534 | TREE_CONSTANT_OVERFLOW (arg2);
1535 return t;
1537 if (TREE_CODE (arg1) == COMPLEX_CST)
1539 tree type = TREE_TYPE (arg1);
1540 tree r1 = TREE_REALPART (arg1);
1541 tree i1 = TREE_IMAGPART (arg1);
1542 tree r2 = TREE_REALPART (arg2);
1543 tree i2 = TREE_IMAGPART (arg2);
1544 tree t;
1546 switch (code)
1548 case PLUS_EXPR:
1549 t = build_complex (type,
1550 const_binop (PLUS_EXPR, r1, r2, notrunc),
1551 const_binop (PLUS_EXPR, i1, i2, notrunc));
1552 break;
1554 case MINUS_EXPR:
1555 t = build_complex (type,
1556 const_binop (MINUS_EXPR, r1, r2, notrunc),
1557 const_binop (MINUS_EXPR, i1, i2, notrunc));
1558 break;
1560 case MULT_EXPR:
1561 t = build_complex (type,
1562 const_binop (MINUS_EXPR,
1563 const_binop (MULT_EXPR,
1564 r1, r2, notrunc),
1565 const_binop (MULT_EXPR,
1566 i1, i2, notrunc),
1567 notrunc),
1568 const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR,
1570 r1, i2, notrunc),
1571 const_binop (MULT_EXPR,
1572 i1, r2, notrunc),
1573 notrunc));
1574 break;
1576 case RDIV_EXPR:
1578 tree magsquared
1579 = const_binop (PLUS_EXPR,
1580 const_binop (MULT_EXPR, r2, r2, notrunc),
1581 const_binop (MULT_EXPR, i2, i2, notrunc),
1582 notrunc);
1584 t = build_complex (type,
1585 const_binop
1586 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1587 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1588 const_binop (PLUS_EXPR,
1589 const_binop (MULT_EXPR, r1, r2,
1590 notrunc),
1591 const_binop (MULT_EXPR, i1, i2,
1592 notrunc),
1593 notrunc),
1594 magsquared, notrunc),
1595 const_binop
1596 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1597 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1598 const_binop (MINUS_EXPR,
1599 const_binop (MULT_EXPR, i1, r2,
1600 notrunc),
1601 const_binop (MULT_EXPR, r1, i2,
1602 notrunc),
1603 notrunc),
1604 magsquared, notrunc));
1606 break;
1608 default:
1609 gcc_unreachable ();
1611 return t;
1613 return 0;
1616 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1617 indicates which particular sizetype to create. */
1619 tree
1620 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1622 return build_int_cst (sizetype_tab[(int) kind], number);
1625 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1626 is a tree code. The type of the result is taken from the operands.
1627 Both must be the same type integer type and it must be a size type.
1628 If the operands are constant, so is the result. */
1630 tree
1631 size_binop (enum tree_code code, tree arg0, tree arg1)
1633 tree type = TREE_TYPE (arg0);
1635 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1636 && type == TREE_TYPE (arg1));
1638 /* Handle the special case of two integer constants faster. */
1639 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1641 /* And some specific cases even faster than that. */
1642 if (code == PLUS_EXPR && integer_zerop (arg0))
1643 return arg1;
1644 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1645 && integer_zerop (arg1))
1646 return arg0;
1647 else if (code == MULT_EXPR && integer_onep (arg0))
1648 return arg1;
1650 /* Handle general case of two integer constants. */
1651 return int_const_binop (code, arg0, arg1, 0);
1654 if (arg0 == error_mark_node || arg1 == error_mark_node)
1655 return error_mark_node;
1657 return fold_build2 (code, type, arg0, arg1);
1660 /* Given two values, either both of sizetype or both of bitsizetype,
1661 compute the difference between the two values. Return the value
1662 in signed type corresponding to the type of the operands. */
1664 tree
1665 size_diffop (tree arg0, tree arg1)
1667 tree type = TREE_TYPE (arg0);
1668 tree ctype;
1670 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1671 && type == TREE_TYPE (arg1));
1673 /* If the type is already signed, just do the simple thing. */
1674 if (!TYPE_UNSIGNED (type))
1675 return size_binop (MINUS_EXPR, arg0, arg1);
1677 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1679 /* If either operand is not a constant, do the conversions to the signed
1680 type and subtract. The hardware will do the right thing with any
1681 overflow in the subtraction. */
1682 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1683 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1684 fold_convert (ctype, arg1));
1686 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1687 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1688 overflow) and negate (which can't either). Special-case a result
1689 of zero while we're here. */
1690 if (tree_int_cst_equal (arg0, arg1))
1691 return fold_convert (ctype, integer_zero_node);
1692 else if (tree_int_cst_lt (arg1, arg0))
1693 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1694 else
1695 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1696 fold_convert (ctype, size_binop (MINUS_EXPR,
1697 arg1, arg0)));
1700 /* A subroutine of fold_convert_const handling conversions of an
1701 INTEGER_CST to another integer type. */
1703 static tree
1704 fold_convert_const_int_from_int (tree type, tree arg1)
1706 tree t;
1708 /* Given an integer constant, make new constant with new type,
1709 appropriately sign-extended or truncated. */
1710 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1711 TREE_INT_CST_HIGH (arg1));
1713 t = force_fit_type (t,
1714 /* Don't set the overflow when
1715 converting a pointer */
1716 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1717 (TREE_INT_CST_HIGH (arg1) < 0
1718 && (TYPE_UNSIGNED (type)
1719 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1720 | TREE_OVERFLOW (arg1),
1721 TREE_CONSTANT_OVERFLOW (arg1));
1723 return t;
1726 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1727 to an integer type. */
1729 static tree
1730 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1732 int overflow = 0;
1733 tree t;
1735 /* The following code implements the floating point to integer
1736 conversion rules required by the Java Language Specification,
1737 that IEEE NaNs are mapped to zero and values that overflow
1738 the target precision saturate, i.e. values greater than
1739 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1740 are mapped to INT_MIN. These semantics are allowed by the
1741 C and C++ standards that simply state that the behavior of
1742 FP-to-integer conversion is unspecified upon overflow. */
1744 HOST_WIDE_INT high, low;
1745 REAL_VALUE_TYPE r;
1746 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1748 switch (code)
1750 case FIX_TRUNC_EXPR:
1751 real_trunc (&r, VOIDmode, &x);
1752 break;
1754 case FIX_CEIL_EXPR:
1755 real_ceil (&r, VOIDmode, &x);
1756 break;
1758 case FIX_FLOOR_EXPR:
1759 real_floor (&r, VOIDmode, &x);
1760 break;
1762 case FIX_ROUND_EXPR:
1763 real_round (&r, VOIDmode, &x);
1764 break;
1766 default:
1767 gcc_unreachable ();
1770 /* If R is NaN, return zero and show we have an overflow. */
1771 if (REAL_VALUE_ISNAN (r))
1773 overflow = 1;
1774 high = 0;
1775 low = 0;
1778 /* See if R is less than the lower bound or greater than the
1779 upper bound. */
1781 if (! overflow)
1783 tree lt = TYPE_MIN_VALUE (type);
1784 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1785 if (REAL_VALUES_LESS (r, l))
1787 overflow = 1;
1788 high = TREE_INT_CST_HIGH (lt);
1789 low = TREE_INT_CST_LOW (lt);
1793 if (! overflow)
1795 tree ut = TYPE_MAX_VALUE (type);
1796 if (ut)
1798 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1799 if (REAL_VALUES_LESS (u, r))
1801 overflow = 1;
1802 high = TREE_INT_CST_HIGH (ut);
1803 low = TREE_INT_CST_LOW (ut);
1808 if (! overflow)
1809 REAL_VALUE_TO_INT (&low, &high, r);
1811 t = build_int_cst_wide (type, low, high);
1813 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1814 TREE_CONSTANT_OVERFLOW (arg1));
1815 return t;
1818 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1819 to another floating point type. */
1821 static tree
1822 fold_convert_const_real_from_real (tree type, tree arg1)
1824 REAL_VALUE_TYPE value;
1825 tree t;
1827 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1828 t = build_real (type, value);
1830 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1831 TREE_CONSTANT_OVERFLOW (t)
1832 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1833 return t;
1836 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1837 type TYPE. If no simplification can be done return NULL_TREE. */
1839 static tree
1840 fold_convert_const (enum tree_code code, tree type, tree arg1)
1842 if (TREE_TYPE (arg1) == type)
1843 return arg1;
1845 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return fold_convert_const_int_from_int (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_int_from_real (code, type, arg1);
1852 else if (TREE_CODE (type) == REAL_TYPE)
1854 if (TREE_CODE (arg1) == INTEGER_CST)
1855 return build_real_from_int_cst (type, arg1);
1856 if (TREE_CODE (arg1) == REAL_CST)
1857 return fold_convert_const_real_from_real (type, arg1);
1859 return NULL_TREE;
1862 /* Construct a vector of zero elements of vector type TYPE. */
1864 static tree
1865 build_zero_vector (tree type)
1867 tree elem, list;
1868 int i, units;
1870 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1871 units = TYPE_VECTOR_SUBPARTS (type);
1873 list = NULL_TREE;
1874 for (i = 0; i < units; i++)
1875 list = tree_cons (NULL_TREE, elem, list);
1876 return build_vector (type, list);
1879 /* Convert expression ARG to type TYPE. Used by the middle-end for
1880 simple conversions in preference to calling the front-end's convert. */
1882 tree
1883 fold_convert (tree type, tree arg)
1885 tree orig = TREE_TYPE (arg);
1886 tree tem;
1888 if (type == orig)
1889 return arg;
1891 if (TREE_CODE (arg) == ERROR_MARK
1892 || TREE_CODE (type) == ERROR_MARK
1893 || TREE_CODE (orig) == ERROR_MARK)
1894 return error_mark_node;
1896 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1897 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1898 TYPE_MAIN_VARIANT (orig)))
1899 return fold_build1 (NOP_EXPR, type, arg);
1901 switch (TREE_CODE (type))
1903 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1904 case POINTER_TYPE: case REFERENCE_TYPE:
1905 case OFFSET_TYPE:
1906 if (TREE_CODE (arg) == INTEGER_CST)
1908 tem = fold_convert_const (NOP_EXPR, type, arg);
1909 if (tem != NULL_TREE)
1910 return tem;
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1913 || TREE_CODE (orig) == OFFSET_TYPE)
1914 return fold_build1 (NOP_EXPR, type, arg);
1915 if (TREE_CODE (orig) == COMPLEX_TYPE)
1917 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1918 return fold_convert (type, tem);
1920 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922 return fold_build1 (NOP_EXPR, type, arg);
1924 case REAL_TYPE:
1925 if (TREE_CODE (arg) == INTEGER_CST)
1927 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1931 else if (TREE_CODE (arg) == REAL_CST)
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1938 switch (TREE_CODE (orig))
1940 case INTEGER_TYPE: case CHAR_TYPE:
1941 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1942 case POINTER_TYPE: case REFERENCE_TYPE:
1943 return fold_build1 (FLOAT_EXPR, type, arg);
1945 case REAL_TYPE:
1946 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1947 type, arg);
1949 case COMPLEX_TYPE:
1950 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1951 return fold_convert (type, tem);
1953 default:
1954 gcc_unreachable ();
1957 case COMPLEX_TYPE:
1958 switch (TREE_CODE (orig))
1960 case INTEGER_TYPE: case CHAR_TYPE:
1961 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1962 case POINTER_TYPE: case REFERENCE_TYPE:
1963 case REAL_TYPE:
1964 return build2 (COMPLEX_EXPR, type,
1965 fold_convert (TREE_TYPE (type), arg),
1966 fold_convert (TREE_TYPE (type), integer_zero_node));
1967 case COMPLEX_TYPE:
1969 tree rpart, ipart;
1971 if (TREE_CODE (arg) == COMPLEX_EXPR)
1973 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1974 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1975 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
1978 arg = save_expr (arg);
1979 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1980 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
1981 rpart = fold_convert (TREE_TYPE (type), rpart);
1982 ipart = fold_convert (TREE_TYPE (type), ipart);
1983 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
1986 default:
1987 gcc_unreachable ();
1990 case VECTOR_TYPE:
1991 if (integer_zerop (arg))
1992 return build_zero_vector (type);
1993 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1994 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1995 || TREE_CODE (orig) == VECTOR_TYPE);
1996 return fold_build1 (NOP_EXPR, type, arg);
1998 case VOID_TYPE:
1999 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2001 default:
2002 gcc_unreachable ();
2006 /* Return false if expr can be assumed not to be an value, true
2007 otherwise. */
2008 /* Return an expr equal to X but certainly not valid as an lvalue. */
2010 static bool
2011 maybe_lvalue_p (tree x)
2013 /* We only need to wrap lvalue tree codes. */
2014 switch (TREE_CODE (x))
2016 case VAR_DECL:
2017 case PARM_DECL:
2018 case RESULT_DECL:
2019 case LABEL_DECL:
2020 case FUNCTION_DECL:
2021 case SSA_NAME:
2023 case COMPONENT_REF:
2024 case INDIRECT_REF:
2025 case ALIGN_INDIRECT_REF:
2026 case MISALIGNED_INDIRECT_REF:
2027 case ARRAY_REF:
2028 case ARRAY_RANGE_REF:
2029 case BIT_FIELD_REF:
2030 case OBJ_TYPE_REF:
2032 case REALPART_EXPR:
2033 case IMAGPART_EXPR:
2034 case PREINCREMENT_EXPR:
2035 case PREDECREMENT_EXPR:
2036 case SAVE_EXPR:
2037 case TRY_CATCH_EXPR:
2038 case WITH_CLEANUP_EXPR:
2039 case COMPOUND_EXPR:
2040 case MODIFY_EXPR:
2041 case TARGET_EXPR:
2042 case COND_EXPR:
2043 case BIND_EXPR:
2044 case MIN_EXPR:
2045 case MAX_EXPR:
2046 break;
2048 default:
2049 /* Assume the worst for front-end tree codes. */
2050 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2051 break;
2052 return false;
2055 return true;
2058 /* Return an expr equal to X but certainly not valid as an lvalue. */
2060 tree
2061 non_lvalue (tree x)
2063 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2064 us. */
2065 if (in_gimple_form)
2066 return x;
2068 if (! maybe_lvalue_p (x))
2069 return x;
2070 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2073 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2074 Zero means allow extended lvalues. */
2076 int pedantic_lvalues;
2078 /* When pedantic, return an expr equal to X but certainly not valid as a
2079 pedantic lvalue. Otherwise, return X. */
2081 static tree
2082 pedantic_non_lvalue (tree x)
2084 if (pedantic_lvalues)
2085 return non_lvalue (x);
2086 else
2087 return x;
2090 /* Given a tree comparison code, return the code that is the logical inverse
2091 of the given code. It is not safe to do this for floating-point
2092 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2093 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2095 static enum tree_code
2096 invert_tree_comparison (enum tree_code code, bool honor_nans)
2098 if (honor_nans && flag_trapping_math)
2099 return ERROR_MARK;
2101 switch (code)
2103 case EQ_EXPR:
2104 return NE_EXPR;
2105 case NE_EXPR:
2106 return EQ_EXPR;
2107 case GT_EXPR:
2108 return honor_nans ? UNLE_EXPR : LE_EXPR;
2109 case GE_EXPR:
2110 return honor_nans ? UNLT_EXPR : LT_EXPR;
2111 case LT_EXPR:
2112 return honor_nans ? UNGE_EXPR : GE_EXPR;
2113 case LE_EXPR:
2114 return honor_nans ? UNGT_EXPR : GT_EXPR;
2115 case LTGT_EXPR:
2116 return UNEQ_EXPR;
2117 case UNEQ_EXPR:
2118 return LTGT_EXPR;
2119 case UNGT_EXPR:
2120 return LE_EXPR;
2121 case UNGE_EXPR:
2122 return LT_EXPR;
2123 case UNLT_EXPR:
2124 return GE_EXPR;
2125 case UNLE_EXPR:
2126 return GT_EXPR;
2127 case ORDERED_EXPR:
2128 return UNORDERED_EXPR;
2129 case UNORDERED_EXPR:
2130 return ORDERED_EXPR;
2131 default:
2132 gcc_unreachable ();
2136 /* Similar, but return the comparison that results if the operands are
2137 swapped. This is safe for floating-point. */
2139 enum tree_code
2140 swap_tree_comparison (enum tree_code code)
2142 switch (code)
2144 case EQ_EXPR:
2145 case NE_EXPR:
2146 return code;
2147 case GT_EXPR:
2148 return LT_EXPR;
2149 case GE_EXPR:
2150 return LE_EXPR;
2151 case LT_EXPR:
2152 return GT_EXPR;
2153 case LE_EXPR:
2154 return GE_EXPR;
2155 default:
2156 gcc_unreachable ();
2161 /* Convert a comparison tree code from an enum tree_code representation
2162 into a compcode bit-based encoding. This function is the inverse of
2163 compcode_to_comparison. */
2165 static enum comparison_code
2166 comparison_to_compcode (enum tree_code code)
2168 switch (code)
2170 case LT_EXPR:
2171 return COMPCODE_LT;
2172 case EQ_EXPR:
2173 return COMPCODE_EQ;
2174 case LE_EXPR:
2175 return COMPCODE_LE;
2176 case GT_EXPR:
2177 return COMPCODE_GT;
2178 case NE_EXPR:
2179 return COMPCODE_NE;
2180 case GE_EXPR:
2181 return COMPCODE_GE;
2182 case ORDERED_EXPR:
2183 return COMPCODE_ORD;
2184 case UNORDERED_EXPR:
2185 return COMPCODE_UNORD;
2186 case UNLT_EXPR:
2187 return COMPCODE_UNLT;
2188 case UNEQ_EXPR:
2189 return COMPCODE_UNEQ;
2190 case UNLE_EXPR:
2191 return COMPCODE_UNLE;
2192 case UNGT_EXPR:
2193 return COMPCODE_UNGT;
2194 case LTGT_EXPR:
2195 return COMPCODE_LTGT;
2196 case UNGE_EXPR:
2197 return COMPCODE_UNGE;
2198 default:
2199 gcc_unreachable ();
2203 /* Convert a compcode bit-based encoding of a comparison operator back
2204 to GCC's enum tree_code representation. This function is the
2205 inverse of comparison_to_compcode. */
2207 static enum tree_code
2208 compcode_to_comparison (enum comparison_code code)
2210 switch (code)
2212 case COMPCODE_LT:
2213 return LT_EXPR;
2214 case COMPCODE_EQ:
2215 return EQ_EXPR;
2216 case COMPCODE_LE:
2217 return LE_EXPR;
2218 case COMPCODE_GT:
2219 return GT_EXPR;
2220 case COMPCODE_NE:
2221 return NE_EXPR;
2222 case COMPCODE_GE:
2223 return GE_EXPR;
2224 case COMPCODE_ORD:
2225 return ORDERED_EXPR;
2226 case COMPCODE_UNORD:
2227 return UNORDERED_EXPR;
2228 case COMPCODE_UNLT:
2229 return UNLT_EXPR;
2230 case COMPCODE_UNEQ:
2231 return UNEQ_EXPR;
2232 case COMPCODE_UNLE:
2233 return UNLE_EXPR;
2234 case COMPCODE_UNGT:
2235 return UNGT_EXPR;
2236 case COMPCODE_LTGT:
2237 return LTGT_EXPR;
2238 case COMPCODE_UNGE:
2239 return UNGE_EXPR;
2240 default:
2241 gcc_unreachable ();
2245 /* Return a tree for the comparison which is the combination of
2246 doing the AND or OR (depending on CODE) of the two operations LCODE
2247 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2248 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2249 if this makes the transformation invalid. */
2251 tree
2252 combine_comparisons (enum tree_code code, enum tree_code lcode,
2253 enum tree_code rcode, tree truth_type,
2254 tree ll_arg, tree lr_arg)
2256 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2257 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2258 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2259 enum comparison_code compcode;
2261 switch (code)
2263 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2264 compcode = lcompcode & rcompcode;
2265 break;
2267 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2268 compcode = lcompcode | rcompcode;
2269 break;
2271 default:
2272 return NULL_TREE;
2275 if (!honor_nans)
2277 /* Eliminate unordered comparisons, as well as LTGT and ORD
2278 which are not used unless the mode has NaNs. */
2279 compcode &= ~COMPCODE_UNORD;
2280 if (compcode == COMPCODE_LTGT)
2281 compcode = COMPCODE_NE;
2282 else if (compcode == COMPCODE_ORD)
2283 compcode = COMPCODE_TRUE;
2285 else if (flag_trapping_math)
2287 /* Check that the original operation and the optimized ones will trap
2288 under the same condition. */
2289 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2290 && (lcompcode != COMPCODE_EQ)
2291 && (lcompcode != COMPCODE_ORD);
2292 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2293 && (rcompcode != COMPCODE_EQ)
2294 && (rcompcode != COMPCODE_ORD);
2295 bool trap = (compcode & COMPCODE_UNORD) == 0
2296 && (compcode != COMPCODE_EQ)
2297 && (compcode != COMPCODE_ORD);
2299 /* In a short-circuited boolean expression the LHS might be
2300 such that the RHS, if evaluated, will never trap. For
2301 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2302 if neither x nor y is NaN. (This is a mixed blessing: for
2303 example, the expression above will never trap, hence
2304 optimizing it to x < y would be invalid). */
2305 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2306 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2307 rtrap = false;
2309 /* If the comparison was short-circuited, and only the RHS
2310 trapped, we may now generate a spurious trap. */
2311 if (rtrap && !ltrap
2312 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2313 return NULL_TREE;
2315 /* If we changed the conditions that cause a trap, we lose. */
2316 if ((ltrap || rtrap) != trap)
2317 return NULL_TREE;
2320 if (compcode == COMPCODE_TRUE)
2321 return constant_boolean_node (true, truth_type);
2322 else if (compcode == COMPCODE_FALSE)
2323 return constant_boolean_node (false, truth_type);
2324 else
2325 return fold_build2 (compcode_to_comparison (compcode),
2326 truth_type, ll_arg, lr_arg);
2329 /* Return nonzero if CODE is a tree code that represents a truth value. */
2331 static int
2332 truth_value_p (enum tree_code code)
2334 return (TREE_CODE_CLASS (code) == tcc_comparison
2335 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2336 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2337 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2340 /* Return nonzero if two operands (typically of the same tree node)
2341 are necessarily equal. If either argument has side-effects this
2342 function returns zero. FLAGS modifies behavior as follows:
2344 If OEP_ONLY_CONST is set, only return nonzero for constants.
2345 This function tests whether the operands are indistinguishable;
2346 it does not test whether they are equal using C's == operation.
2347 The distinction is important for IEEE floating point, because
2348 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2349 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2351 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2352 even though it may hold multiple values during a function.
2353 This is because a GCC tree node guarantees that nothing else is
2354 executed between the evaluation of its "operands" (which may often
2355 be evaluated in arbitrary order). Hence if the operands themselves
2356 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2357 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2358 unset means assuming isochronic (or instantaneous) tree equivalence.
2359 Unless comparing arbitrary expression trees, such as from different
2360 statements, this flag can usually be left unset.
2362 If OEP_PURE_SAME is set, then pure functions with identical arguments
2363 are considered the same. It is used when the caller has other ways
2364 to ensure that global memory is unchanged in between. */
2367 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2369 /* If either is ERROR_MARK, they aren't equal. */
2370 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2371 return 0;
2373 /* If both types don't have the same signedness, then we can't consider
2374 them equal. We must check this before the STRIP_NOPS calls
2375 because they may change the signedness of the arguments. */
2376 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2377 return 0;
2379 STRIP_NOPS (arg0);
2380 STRIP_NOPS (arg1);
2382 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2383 /* This is needed for conversions and for COMPONENT_REF.
2384 Might as well play it safe and always test this. */
2385 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2386 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2387 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2388 return 0;
2390 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2391 We don't care about side effects in that case because the SAVE_EXPR
2392 takes care of that for us. In all other cases, two expressions are
2393 equal if they have no side effects. If we have two identical
2394 expressions with side effects that should be treated the same due
2395 to the only side effects being identical SAVE_EXPR's, that will
2396 be detected in the recursive calls below. */
2397 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2398 && (TREE_CODE (arg0) == SAVE_EXPR
2399 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2400 return 1;
2402 /* Next handle constant cases, those for which we can return 1 even
2403 if ONLY_CONST is set. */
2404 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2405 switch (TREE_CODE (arg0))
2407 case INTEGER_CST:
2408 return (! TREE_CONSTANT_OVERFLOW (arg0)
2409 && ! TREE_CONSTANT_OVERFLOW (arg1)
2410 && tree_int_cst_equal (arg0, arg1));
2412 case REAL_CST:
2413 return (! TREE_CONSTANT_OVERFLOW (arg0)
2414 && ! TREE_CONSTANT_OVERFLOW (arg1)
2415 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2416 TREE_REAL_CST (arg1)));
2418 case VECTOR_CST:
2420 tree v1, v2;
2422 if (TREE_CONSTANT_OVERFLOW (arg0)
2423 || TREE_CONSTANT_OVERFLOW (arg1))
2424 return 0;
2426 v1 = TREE_VECTOR_CST_ELTS (arg0);
2427 v2 = TREE_VECTOR_CST_ELTS (arg1);
2428 while (v1 && v2)
2430 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2431 flags))
2432 return 0;
2433 v1 = TREE_CHAIN (v1);
2434 v2 = TREE_CHAIN (v2);
2437 return 1;
2440 case COMPLEX_CST:
2441 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2442 flags)
2443 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2444 flags));
2446 case STRING_CST:
2447 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2448 && ! memcmp (TREE_STRING_POINTER (arg0),
2449 TREE_STRING_POINTER (arg1),
2450 TREE_STRING_LENGTH (arg0)));
2452 case ADDR_EXPR:
2453 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2455 default:
2456 break;
2459 if (flags & OEP_ONLY_CONST)
2460 return 0;
2462 /* Define macros to test an operand from arg0 and arg1 for equality and a
2463 variant that allows null and views null as being different from any
2464 non-null value. In the latter case, if either is null, the both
2465 must be; otherwise, do the normal comparison. */
2466 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2467 TREE_OPERAND (arg1, N), flags)
2469 #define OP_SAME_WITH_NULL(N) \
2470 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2471 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2473 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2475 case tcc_unary:
2476 /* Two conversions are equal only if signedness and modes match. */
2477 switch (TREE_CODE (arg0))
2479 case NOP_EXPR:
2480 case CONVERT_EXPR:
2481 case FIX_CEIL_EXPR:
2482 case FIX_TRUNC_EXPR:
2483 case FIX_FLOOR_EXPR:
2484 case FIX_ROUND_EXPR:
2485 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2486 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2487 return 0;
2488 break;
2489 default:
2490 break;
2493 return OP_SAME (0);
2496 case tcc_comparison:
2497 case tcc_binary:
2498 if (OP_SAME (0) && OP_SAME (1))
2499 return 1;
2501 /* For commutative ops, allow the other order. */
2502 return (commutative_tree_code (TREE_CODE (arg0))
2503 && operand_equal_p (TREE_OPERAND (arg0, 0),
2504 TREE_OPERAND (arg1, 1), flags)
2505 && operand_equal_p (TREE_OPERAND (arg0, 1),
2506 TREE_OPERAND (arg1, 0), flags));
2508 case tcc_reference:
2509 /* If either of the pointer (or reference) expressions we are
2510 dereferencing contain a side effect, these cannot be equal. */
2511 if (TREE_SIDE_EFFECTS (arg0)
2512 || TREE_SIDE_EFFECTS (arg1))
2513 return 0;
2515 switch (TREE_CODE (arg0))
2517 case INDIRECT_REF:
2518 case ALIGN_INDIRECT_REF:
2519 case MISALIGNED_INDIRECT_REF:
2520 case REALPART_EXPR:
2521 case IMAGPART_EXPR:
2522 return OP_SAME (0);
2524 case ARRAY_REF:
2525 case ARRAY_RANGE_REF:
2526 /* Operands 2 and 3 may be null. */
2527 return (OP_SAME (0)
2528 && OP_SAME (1)
2529 && OP_SAME_WITH_NULL (2)
2530 && OP_SAME_WITH_NULL (3));
2532 case COMPONENT_REF:
2533 /* Handle operand 2 the same as for ARRAY_REF. */
2534 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2536 case BIT_FIELD_REF:
2537 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2539 default:
2540 return 0;
2543 case tcc_expression:
2544 switch (TREE_CODE (arg0))
2546 case ADDR_EXPR:
2547 case TRUTH_NOT_EXPR:
2548 return OP_SAME (0);
2550 case TRUTH_ANDIF_EXPR:
2551 case TRUTH_ORIF_EXPR:
2552 return OP_SAME (0) && OP_SAME (1);
2554 case TRUTH_AND_EXPR:
2555 case TRUTH_OR_EXPR:
2556 case TRUTH_XOR_EXPR:
2557 if (OP_SAME (0) && OP_SAME (1))
2558 return 1;
2560 /* Otherwise take into account this is a commutative operation. */
2561 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2562 TREE_OPERAND (arg1, 1), flags)
2563 && operand_equal_p (TREE_OPERAND (arg0, 1),
2564 TREE_OPERAND (arg1, 0), flags));
2566 case CALL_EXPR:
2567 /* If the CALL_EXPRs call different functions, then they
2568 clearly can not be equal. */
2569 if (!OP_SAME (0))
2570 return 0;
2573 unsigned int cef = call_expr_flags (arg0);
2574 if (flags & OEP_PURE_SAME)
2575 cef &= ECF_CONST | ECF_PURE;
2576 else
2577 cef &= ECF_CONST;
2578 if (!cef)
2579 return 0;
2582 /* Now see if all the arguments are the same. operand_equal_p
2583 does not handle TREE_LIST, so we walk the operands here
2584 feeding them to operand_equal_p. */
2585 arg0 = TREE_OPERAND (arg0, 1);
2586 arg1 = TREE_OPERAND (arg1, 1);
2587 while (arg0 && arg1)
2589 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2590 flags))
2591 return 0;
2593 arg0 = TREE_CHAIN (arg0);
2594 arg1 = TREE_CHAIN (arg1);
2597 /* If we get here and both argument lists are exhausted
2598 then the CALL_EXPRs are equal. */
2599 return ! (arg0 || arg1);
2601 default:
2602 return 0;
2605 case tcc_declaration:
2606 /* Consider __builtin_sqrt equal to sqrt. */
2607 return (TREE_CODE (arg0) == FUNCTION_DECL
2608 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2609 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2610 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2612 default:
2613 return 0;
2616 #undef OP_SAME
2617 #undef OP_SAME_WITH_NULL
2620 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2621 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2623 When in doubt, return 0. */
2625 static int
2626 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2628 int unsignedp1, unsignedpo;
2629 tree primarg0, primarg1, primother;
2630 unsigned int correct_width;
2632 if (operand_equal_p (arg0, arg1, 0))
2633 return 1;
2635 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2636 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2637 return 0;
2639 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2640 and see if the inner values are the same. This removes any
2641 signedness comparison, which doesn't matter here. */
2642 primarg0 = arg0, primarg1 = arg1;
2643 STRIP_NOPS (primarg0);
2644 STRIP_NOPS (primarg1);
2645 if (operand_equal_p (primarg0, primarg1, 0))
2646 return 1;
2648 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2649 actual comparison operand, ARG0.
2651 First throw away any conversions to wider types
2652 already present in the operands. */
2654 primarg1 = get_narrower (arg1, &unsignedp1);
2655 primother = get_narrower (other, &unsignedpo);
2657 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2658 if (unsignedp1 == unsignedpo
2659 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2660 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2662 tree type = TREE_TYPE (arg0);
2664 /* Make sure shorter operand is extended the right way
2665 to match the longer operand. */
2666 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2667 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2669 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2670 return 1;
2673 return 0;
2676 /* See if ARG is an expression that is either a comparison or is performing
2677 arithmetic on comparisons. The comparisons must only be comparing
2678 two different values, which will be stored in *CVAL1 and *CVAL2; if
2679 they are nonzero it means that some operands have already been found.
2680 No variables may be used anywhere else in the expression except in the
2681 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2682 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2684 If this is true, return 1. Otherwise, return zero. */
2686 static int
2687 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2689 enum tree_code code = TREE_CODE (arg);
2690 enum tree_code_class class = TREE_CODE_CLASS (code);
2692 /* We can handle some of the tcc_expression cases here. */
2693 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2694 class = tcc_unary;
2695 else if (class == tcc_expression
2696 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2697 || code == COMPOUND_EXPR))
2698 class = tcc_binary;
2700 else if (class == tcc_expression && code == SAVE_EXPR
2701 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2703 /* If we've already found a CVAL1 or CVAL2, this expression is
2704 two complex to handle. */
2705 if (*cval1 || *cval2)
2706 return 0;
2708 class = tcc_unary;
2709 *save_p = 1;
2712 switch (class)
2714 case tcc_unary:
2715 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2717 case tcc_binary:
2718 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2719 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2720 cval1, cval2, save_p));
2722 case tcc_constant:
2723 return 1;
2725 case tcc_expression:
2726 if (code == COND_EXPR)
2727 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2728 cval1, cval2, save_p)
2729 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2730 cval1, cval2, save_p)
2731 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2732 cval1, cval2, save_p));
2733 return 0;
2735 case tcc_comparison:
2736 /* First see if we can handle the first operand, then the second. For
2737 the second operand, we know *CVAL1 can't be zero. It must be that
2738 one side of the comparison is each of the values; test for the
2739 case where this isn't true by failing if the two operands
2740 are the same. */
2742 if (operand_equal_p (TREE_OPERAND (arg, 0),
2743 TREE_OPERAND (arg, 1), 0))
2744 return 0;
2746 if (*cval1 == 0)
2747 *cval1 = TREE_OPERAND (arg, 0);
2748 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2750 else if (*cval2 == 0)
2751 *cval2 = TREE_OPERAND (arg, 0);
2752 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2754 else
2755 return 0;
2757 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2759 else if (*cval2 == 0)
2760 *cval2 = TREE_OPERAND (arg, 1);
2761 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2763 else
2764 return 0;
2766 return 1;
2768 default:
2769 return 0;
2773 /* ARG is a tree that is known to contain just arithmetic operations and
2774 comparisons. Evaluate the operations in the tree substituting NEW0 for
2775 any occurrence of OLD0 as an operand of a comparison and likewise for
2776 NEW1 and OLD1. */
2778 static tree
2779 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2781 tree type = TREE_TYPE (arg);
2782 enum tree_code code = TREE_CODE (arg);
2783 enum tree_code_class class = TREE_CODE_CLASS (code);
2785 /* We can handle some of the tcc_expression cases here. */
2786 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2787 class = tcc_unary;
2788 else if (class == tcc_expression
2789 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2790 class = tcc_binary;
2792 switch (class)
2794 case tcc_unary:
2795 return fold_build1 (code, type,
2796 eval_subst (TREE_OPERAND (arg, 0),
2797 old0, new0, old1, new1));
2799 case tcc_binary:
2800 return fold_build2 (code, type,
2801 eval_subst (TREE_OPERAND (arg, 0),
2802 old0, new0, old1, new1),
2803 eval_subst (TREE_OPERAND (arg, 1),
2804 old0, new0, old1, new1));
2806 case tcc_expression:
2807 switch (code)
2809 case SAVE_EXPR:
2810 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2812 case COMPOUND_EXPR:
2813 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2815 case COND_EXPR:
2816 return fold_build3 (code, type,
2817 eval_subst (TREE_OPERAND (arg, 0),
2818 old0, new0, old1, new1),
2819 eval_subst (TREE_OPERAND (arg, 1),
2820 old0, new0, old1, new1),
2821 eval_subst (TREE_OPERAND (arg, 2),
2822 old0, new0, old1, new1));
2823 default:
2824 break;
2826 /* Fall through - ??? */
2828 case tcc_comparison:
2830 tree arg0 = TREE_OPERAND (arg, 0);
2831 tree arg1 = TREE_OPERAND (arg, 1);
2833 /* We need to check both for exact equality and tree equality. The
2834 former will be true if the operand has a side-effect. In that
2835 case, we know the operand occurred exactly once. */
2837 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2838 arg0 = new0;
2839 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2840 arg0 = new1;
2842 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2843 arg1 = new0;
2844 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2845 arg1 = new1;
2847 return fold_build2 (code, type, arg0, arg1);
2850 default:
2851 return arg;
2855 /* Return a tree for the case when the result of an expression is RESULT
2856 converted to TYPE and OMITTED was previously an operand of the expression
2857 but is now not needed (e.g., we folded OMITTED * 0).
2859 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2860 the conversion of RESULT to TYPE. */
2862 tree
2863 omit_one_operand (tree type, tree result, tree omitted)
2865 tree t = fold_convert (type, result);
2867 if (TREE_SIDE_EFFECTS (omitted))
2868 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2870 return non_lvalue (t);
2873 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2875 static tree
2876 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2878 tree t = fold_convert (type, result);
2880 if (TREE_SIDE_EFFECTS (omitted))
2881 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2883 return pedantic_non_lvalue (t);
2886 /* Return a tree for the case when the result of an expression is RESULT
2887 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2888 of the expression but are now not needed.
2890 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2891 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2892 evaluated before OMITTED2. Otherwise, if neither has side effects,
2893 just do the conversion of RESULT to TYPE. */
2895 tree
2896 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2898 tree t = fold_convert (type, result);
2900 if (TREE_SIDE_EFFECTS (omitted2))
2901 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2902 if (TREE_SIDE_EFFECTS (omitted1))
2903 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2905 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2909 /* Return a simplified tree node for the truth-negation of ARG. This
2910 never alters ARG itself. We assume that ARG is an operation that
2911 returns a truth value (0 or 1).
2913 FIXME: one would think we would fold the result, but it causes
2914 problems with the dominator optimizer. */
2915 tree
2916 invert_truthvalue (tree arg)
2918 tree type = TREE_TYPE (arg);
2919 enum tree_code code = TREE_CODE (arg);
2921 if (code == ERROR_MARK)
2922 return arg;
2924 /* If this is a comparison, we can simply invert it, except for
2925 floating-point non-equality comparisons, in which case we just
2926 enclose a TRUTH_NOT_EXPR around what we have. */
2928 if (TREE_CODE_CLASS (code) == tcc_comparison)
2930 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2931 if (FLOAT_TYPE_P (op_type)
2932 && flag_trapping_math
2933 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2934 && code != NE_EXPR && code != EQ_EXPR)
2935 return build1 (TRUTH_NOT_EXPR, type, arg);
2936 else
2938 code = invert_tree_comparison (code,
2939 HONOR_NANS (TYPE_MODE (op_type)));
2940 if (code == ERROR_MARK)
2941 return build1 (TRUTH_NOT_EXPR, type, arg);
2942 else
2943 return build2 (code, type,
2944 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2948 switch (code)
2950 case INTEGER_CST:
2951 return constant_boolean_node (integer_zerop (arg), type);
2953 case TRUTH_AND_EXPR:
2954 return build2 (TRUTH_OR_EXPR, type,
2955 invert_truthvalue (TREE_OPERAND (arg, 0)),
2956 invert_truthvalue (TREE_OPERAND (arg, 1)));
2958 case TRUTH_OR_EXPR:
2959 return build2 (TRUTH_AND_EXPR, type,
2960 invert_truthvalue (TREE_OPERAND (arg, 0)),
2961 invert_truthvalue (TREE_OPERAND (arg, 1)));
2963 case TRUTH_XOR_EXPR:
2964 /* Here we can invert either operand. We invert the first operand
2965 unless the second operand is a TRUTH_NOT_EXPR in which case our
2966 result is the XOR of the first operand with the inside of the
2967 negation of the second operand. */
2969 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2970 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2971 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2972 else
2973 return build2 (TRUTH_XOR_EXPR, type,
2974 invert_truthvalue (TREE_OPERAND (arg, 0)),
2975 TREE_OPERAND (arg, 1));
2977 case TRUTH_ANDIF_EXPR:
2978 return build2 (TRUTH_ORIF_EXPR, type,
2979 invert_truthvalue (TREE_OPERAND (arg, 0)),
2980 invert_truthvalue (TREE_OPERAND (arg, 1)));
2982 case TRUTH_ORIF_EXPR:
2983 return build2 (TRUTH_ANDIF_EXPR, type,
2984 invert_truthvalue (TREE_OPERAND (arg, 0)),
2985 invert_truthvalue (TREE_OPERAND (arg, 1)));
2987 case TRUTH_NOT_EXPR:
2988 return TREE_OPERAND (arg, 0);
2990 case COND_EXPR:
2991 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2992 invert_truthvalue (TREE_OPERAND (arg, 1)),
2993 invert_truthvalue (TREE_OPERAND (arg, 2)));
2995 case COMPOUND_EXPR:
2996 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2997 invert_truthvalue (TREE_OPERAND (arg, 1)));
2999 case NON_LVALUE_EXPR:
3000 return invert_truthvalue (TREE_OPERAND (arg, 0));
3002 case NOP_EXPR:
3003 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3004 break;
3006 case CONVERT_EXPR:
3007 case FLOAT_EXPR:
3008 return build1 (TREE_CODE (arg), type,
3009 invert_truthvalue (TREE_OPERAND (arg, 0)));
3011 case BIT_AND_EXPR:
3012 if (!integer_onep (TREE_OPERAND (arg, 1)))
3013 break;
3014 return build2 (EQ_EXPR, type, arg,
3015 fold_convert (type, integer_zero_node));
3017 case SAVE_EXPR:
3018 return build1 (TRUTH_NOT_EXPR, type, arg);
3020 case CLEANUP_POINT_EXPR:
3021 return build1 (CLEANUP_POINT_EXPR, type,
3022 invert_truthvalue (TREE_OPERAND (arg, 0)));
3024 default:
3025 break;
3027 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3028 return build1 (TRUTH_NOT_EXPR, type, arg);
3031 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3032 operands are another bit-wise operation with a common input. If so,
3033 distribute the bit operations to save an operation and possibly two if
3034 constants are involved. For example, convert
3035 (A | B) & (A | C) into A | (B & C)
3036 Further simplification will occur if B and C are constants.
3038 If this optimization cannot be done, 0 will be returned. */
3040 static tree
3041 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3043 tree common;
3044 tree left, right;
3046 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3047 || TREE_CODE (arg0) == code
3048 || (TREE_CODE (arg0) != BIT_AND_EXPR
3049 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3050 return 0;
3052 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3054 common = TREE_OPERAND (arg0, 0);
3055 left = TREE_OPERAND (arg0, 1);
3056 right = TREE_OPERAND (arg1, 1);
3058 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3060 common = TREE_OPERAND (arg0, 0);
3061 left = TREE_OPERAND (arg0, 1);
3062 right = TREE_OPERAND (arg1, 0);
3064 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3066 common = TREE_OPERAND (arg0, 1);
3067 left = TREE_OPERAND (arg0, 0);
3068 right = TREE_OPERAND (arg1, 1);
3070 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3072 common = TREE_OPERAND (arg0, 1);
3073 left = TREE_OPERAND (arg0, 0);
3074 right = TREE_OPERAND (arg1, 0);
3076 else
3077 return 0;
3079 return fold_build2 (TREE_CODE (arg0), type, common,
3080 fold_build2 (code, type, left, right));
3083 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3084 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3086 static tree
3087 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3088 int unsignedp)
3090 tree result;
3092 if (bitpos == 0)
3094 tree size = TYPE_SIZE (TREE_TYPE (inner));
3095 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3096 || POINTER_TYPE_P (TREE_TYPE (inner)))
3097 && host_integerp (size, 0)
3098 && tree_low_cst (size, 0) == bitsize)
3099 return fold_convert (type, inner);
3102 result = build3 (BIT_FIELD_REF, type, inner,
3103 size_int (bitsize), bitsize_int (bitpos));
3105 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3107 return result;
3110 /* Optimize a bit-field compare.
3112 There are two cases: First is a compare against a constant and the
3113 second is a comparison of two items where the fields are at the same
3114 bit position relative to the start of a chunk (byte, halfword, word)
3115 large enough to contain it. In these cases we can avoid the shift
3116 implicit in bitfield extractions.
3118 For constants, we emit a compare of the shifted constant with the
3119 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3120 compared. For two fields at the same position, we do the ANDs with the
3121 similar mask and compare the result of the ANDs.
3123 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3124 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3125 are the left and right operands of the comparison, respectively.
3127 If the optimization described above can be done, we return the resulting
3128 tree. Otherwise we return zero. */
3130 static tree
3131 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3132 tree lhs, tree rhs)
3134 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3135 tree type = TREE_TYPE (lhs);
3136 tree signed_type, unsigned_type;
3137 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3138 enum machine_mode lmode, rmode, nmode;
3139 int lunsignedp, runsignedp;
3140 int lvolatilep = 0, rvolatilep = 0;
3141 tree linner, rinner = NULL_TREE;
3142 tree mask;
3143 tree offset;
3145 /* Get all the information about the extractions being done. If the bit size
3146 if the same as the size of the underlying object, we aren't doing an
3147 extraction at all and so can do nothing. We also don't want to
3148 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3149 then will no longer be able to replace it. */
3150 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3151 &lunsignedp, &lvolatilep, false);
3152 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3153 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3154 return 0;
3156 if (!const_p)
3158 /* If this is not a constant, we can only do something if bit positions,
3159 sizes, and signedness are the same. */
3160 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3161 &runsignedp, &rvolatilep, false);
3163 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3164 || lunsignedp != runsignedp || offset != 0
3165 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3166 return 0;
3169 /* See if we can find a mode to refer to this field. We should be able to,
3170 but fail if we can't. */
3171 nmode = get_best_mode (lbitsize, lbitpos,
3172 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3173 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3174 TYPE_ALIGN (TREE_TYPE (rinner))),
3175 word_mode, lvolatilep || rvolatilep);
3176 if (nmode == VOIDmode)
3177 return 0;
3179 /* Set signed and unsigned types of the precision of this mode for the
3180 shifts below. */
3181 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3182 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3184 /* Compute the bit position and size for the new reference and our offset
3185 within it. If the new reference is the same size as the original, we
3186 won't optimize anything, so return zero. */
3187 nbitsize = GET_MODE_BITSIZE (nmode);
3188 nbitpos = lbitpos & ~ (nbitsize - 1);
3189 lbitpos -= nbitpos;
3190 if (nbitsize == lbitsize)
3191 return 0;
3193 if (BYTES_BIG_ENDIAN)
3194 lbitpos = nbitsize - lbitsize - lbitpos;
3196 /* Make the mask to be used against the extracted field. */
3197 mask = build_int_cst (unsigned_type, -1);
3198 mask = force_fit_type (mask, 0, false, false);
3199 mask = fold_convert (unsigned_type, mask);
3200 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3201 mask = const_binop (RSHIFT_EXPR, mask,
3202 size_int (nbitsize - lbitsize - lbitpos), 0);
3204 if (! const_p)
3205 /* If not comparing with constant, just rework the comparison
3206 and return. */
3207 return build2 (code, compare_type,
3208 build2 (BIT_AND_EXPR, unsigned_type,
3209 make_bit_field_ref (linner, unsigned_type,
3210 nbitsize, nbitpos, 1),
3211 mask),
3212 build2 (BIT_AND_EXPR, unsigned_type,
3213 make_bit_field_ref (rinner, unsigned_type,
3214 nbitsize, nbitpos, 1),
3215 mask));
3217 /* Otherwise, we are handling the constant case. See if the constant is too
3218 big for the field. Warn and return a tree of for 0 (false) if so. We do
3219 this not only for its own sake, but to avoid having to test for this
3220 error case below. If we didn't, we might generate wrong code.
3222 For unsigned fields, the constant shifted right by the field length should
3223 be all zero. For signed fields, the high-order bits should agree with
3224 the sign bit. */
3226 if (lunsignedp)
3228 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3229 fold_convert (unsigned_type, rhs),
3230 size_int (lbitsize), 0)))
3232 warning ("comparison is always %d due to width of bit-field",
3233 code == NE_EXPR);
3234 return constant_boolean_node (code == NE_EXPR, compare_type);
3237 else
3239 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3240 size_int (lbitsize - 1), 0);
3241 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3243 warning ("comparison is always %d due to width of bit-field",
3244 code == NE_EXPR);
3245 return constant_boolean_node (code == NE_EXPR, compare_type);
3249 /* Single-bit compares should always be against zero. */
3250 if (lbitsize == 1 && ! integer_zerop (rhs))
3252 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3253 rhs = fold_convert (type, integer_zero_node);
3256 /* Make a new bitfield reference, shift the constant over the
3257 appropriate number of bits and mask it with the computed mask
3258 (in case this was a signed field). If we changed it, make a new one. */
3259 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3260 if (lvolatilep)
3262 TREE_SIDE_EFFECTS (lhs) = 1;
3263 TREE_THIS_VOLATILE (lhs) = 1;
3266 rhs = fold (const_binop (BIT_AND_EXPR,
3267 const_binop (LSHIFT_EXPR,
3268 fold_convert (unsigned_type, rhs),
3269 size_int (lbitpos), 0),
3270 mask, 0));
3272 return build2 (code, compare_type,
3273 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3274 rhs);
3277 /* Subroutine for fold_truthop: decode a field reference.
3279 If EXP is a comparison reference, we return the innermost reference.
3281 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3282 set to the starting bit number.
3284 If the innermost field can be completely contained in a mode-sized
3285 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3287 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3288 otherwise it is not changed.
3290 *PUNSIGNEDP is set to the signedness of the field.
3292 *PMASK is set to the mask used. This is either contained in a
3293 BIT_AND_EXPR or derived from the width of the field.
3295 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3297 Return 0 if this is not a component reference or is one that we can't
3298 do anything with. */
3300 static tree
3301 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3302 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3303 int *punsignedp, int *pvolatilep,
3304 tree *pmask, tree *pand_mask)
3306 tree outer_type = 0;
3307 tree and_mask = 0;
3308 tree mask, inner, offset;
3309 tree unsigned_type;
3310 unsigned int precision;
3312 /* All the optimizations using this function assume integer fields.
3313 There are problems with FP fields since the type_for_size call
3314 below can fail for, e.g., XFmode. */
3315 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3316 return 0;
3318 /* We are interested in the bare arrangement of bits, so strip everything
3319 that doesn't affect the machine mode. However, record the type of the
3320 outermost expression if it may matter below. */
3321 if (TREE_CODE (exp) == NOP_EXPR
3322 || TREE_CODE (exp) == CONVERT_EXPR
3323 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3324 outer_type = TREE_TYPE (exp);
3325 STRIP_NOPS (exp);
3327 if (TREE_CODE (exp) == BIT_AND_EXPR)
3329 and_mask = TREE_OPERAND (exp, 1);
3330 exp = TREE_OPERAND (exp, 0);
3331 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3332 if (TREE_CODE (and_mask) != INTEGER_CST)
3333 return 0;
3336 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3337 punsignedp, pvolatilep, false);
3338 if ((inner == exp && and_mask == 0)
3339 || *pbitsize < 0 || offset != 0
3340 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3341 return 0;
3343 /* If the number of bits in the reference is the same as the bitsize of
3344 the outer type, then the outer type gives the signedness. Otherwise
3345 (in case of a small bitfield) the signedness is unchanged. */
3346 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3347 *punsignedp = TYPE_UNSIGNED (outer_type);
3349 /* Compute the mask to access the bitfield. */
3350 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3351 precision = TYPE_PRECISION (unsigned_type);
3353 mask = build_int_cst (unsigned_type, -1);
3354 mask = force_fit_type (mask, 0, false, false);
3356 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3357 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3359 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3360 if (and_mask != 0)
3361 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3362 fold_convert (unsigned_type, and_mask), mask);
3364 *pmask = mask;
3365 *pand_mask = and_mask;
3366 return inner;
3369 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3370 bit positions. */
3372 static int
3373 all_ones_mask_p (tree mask, int size)
3375 tree type = TREE_TYPE (mask);
3376 unsigned int precision = TYPE_PRECISION (type);
3377 tree tmask;
3379 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3380 tmask = force_fit_type (tmask, 0, false, false);
3382 return
3383 tree_int_cst_equal (mask,
3384 const_binop (RSHIFT_EXPR,
3385 const_binop (LSHIFT_EXPR, tmask,
3386 size_int (precision - size),
3388 size_int (precision - size), 0));
3391 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3392 represents the sign bit of EXP's type. If EXP represents a sign
3393 or zero extension, also test VAL against the unextended type.
3394 The return value is the (sub)expression whose sign bit is VAL,
3395 or NULL_TREE otherwise. */
3397 static tree
3398 sign_bit_p (tree exp, tree val)
3400 unsigned HOST_WIDE_INT mask_lo, lo;
3401 HOST_WIDE_INT mask_hi, hi;
3402 int width;
3403 tree t;
3405 /* Tree EXP must have an integral type. */
3406 t = TREE_TYPE (exp);
3407 if (! INTEGRAL_TYPE_P (t))
3408 return NULL_TREE;
3410 /* Tree VAL must be an integer constant. */
3411 if (TREE_CODE (val) != INTEGER_CST
3412 || TREE_CONSTANT_OVERFLOW (val))
3413 return NULL_TREE;
3415 width = TYPE_PRECISION (t);
3416 if (width > HOST_BITS_PER_WIDE_INT)
3418 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3419 lo = 0;
3421 mask_hi = ((unsigned HOST_WIDE_INT) -1
3422 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3423 mask_lo = -1;
3425 else
3427 hi = 0;
3428 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3430 mask_hi = 0;
3431 mask_lo = ((unsigned HOST_WIDE_INT) -1
3432 >> (HOST_BITS_PER_WIDE_INT - width));
3435 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3436 treat VAL as if it were unsigned. */
3437 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3438 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3439 return exp;
3441 /* Handle extension from a narrower type. */
3442 if (TREE_CODE (exp) == NOP_EXPR
3443 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3444 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3446 return NULL_TREE;
3449 /* Subroutine for fold_truthop: determine if an operand is simple enough
3450 to be evaluated unconditionally. */
3452 static int
3453 simple_operand_p (tree exp)
3455 /* Strip any conversions that don't change the machine mode. */
3456 STRIP_NOPS (exp);
3458 return (CONSTANT_CLASS_P (exp)
3459 || TREE_CODE (exp) == SSA_NAME
3460 || (DECL_P (exp)
3461 && ! TREE_ADDRESSABLE (exp)
3462 && ! TREE_THIS_VOLATILE (exp)
3463 && ! DECL_NONLOCAL (exp)
3464 /* Don't regard global variables as simple. They may be
3465 allocated in ways unknown to the compiler (shared memory,
3466 #pragma weak, etc). */
3467 && ! TREE_PUBLIC (exp)
3468 && ! DECL_EXTERNAL (exp)
3469 /* Loading a static variable is unduly expensive, but global
3470 registers aren't expensive. */
3471 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3474 /* The following functions are subroutines to fold_range_test and allow it to
3475 try to change a logical combination of comparisons into a range test.
3477 For example, both
3478 X == 2 || X == 3 || X == 4 || X == 5
3480 X >= 2 && X <= 5
3481 are converted to
3482 (unsigned) (X - 2) <= 3
3484 We describe each set of comparisons as being either inside or outside
3485 a range, using a variable named like IN_P, and then describe the
3486 range with a lower and upper bound. If one of the bounds is omitted,
3487 it represents either the highest or lowest value of the type.
3489 In the comments below, we represent a range by two numbers in brackets
3490 preceded by a "+" to designate being inside that range, or a "-" to
3491 designate being outside that range, so the condition can be inverted by
3492 flipping the prefix. An omitted bound is represented by a "-". For
3493 example, "- [-, 10]" means being outside the range starting at the lowest
3494 possible value and ending at 10, in other words, being greater than 10.
3495 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3496 always false.
3498 We set up things so that the missing bounds are handled in a consistent
3499 manner so neither a missing bound nor "true" and "false" need to be
3500 handled using a special case. */
3502 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3503 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3504 and UPPER1_P are nonzero if the respective argument is an upper bound
3505 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3506 must be specified for a comparison. ARG1 will be converted to ARG0's
3507 type if both are specified. */
3509 static tree
3510 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3511 tree arg1, int upper1_p)
3513 tree tem;
3514 int result;
3515 int sgn0, sgn1;
3517 /* If neither arg represents infinity, do the normal operation.
3518 Else, if not a comparison, return infinity. Else handle the special
3519 comparison rules. Note that most of the cases below won't occur, but
3520 are handled for consistency. */
3522 if (arg0 != 0 && arg1 != 0)
3524 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3525 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3526 STRIP_NOPS (tem);
3527 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3530 if (TREE_CODE_CLASS (code) != tcc_comparison)
3531 return 0;
3533 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3534 for neither. In real maths, we cannot assume open ended ranges are
3535 the same. But, this is computer arithmetic, where numbers are finite.
3536 We can therefore make the transformation of any unbounded range with
3537 the value Z, Z being greater than any representable number. This permits
3538 us to treat unbounded ranges as equal. */
3539 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3540 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3541 switch (code)
3543 case EQ_EXPR:
3544 result = sgn0 == sgn1;
3545 break;
3546 case NE_EXPR:
3547 result = sgn0 != sgn1;
3548 break;
3549 case LT_EXPR:
3550 result = sgn0 < sgn1;
3551 break;
3552 case LE_EXPR:
3553 result = sgn0 <= sgn1;
3554 break;
3555 case GT_EXPR:
3556 result = sgn0 > sgn1;
3557 break;
3558 case GE_EXPR:
3559 result = sgn0 >= sgn1;
3560 break;
3561 default:
3562 gcc_unreachable ();
3565 return constant_boolean_node (result, type);
3568 /* Given EXP, a logical expression, set the range it is testing into
3569 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3570 actually being tested. *PLOW and *PHIGH will be made of the same type
3571 as the returned expression. If EXP is not a comparison, we will most
3572 likely not be returning a useful value and range. */
3574 static tree
3575 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3577 enum tree_code code;
3578 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3579 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3580 int in_p, n_in_p;
3581 tree low, high, n_low, n_high;
3583 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3584 and see if we can refine the range. Some of the cases below may not
3585 happen, but it doesn't seem worth worrying about this. We "continue"
3586 the outer loop when we've changed something; otherwise we "break"
3587 the switch, which will "break" the while. */
3589 in_p = 0;
3590 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3592 while (1)
3594 code = TREE_CODE (exp);
3595 exp_type = TREE_TYPE (exp);
3597 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3599 if (TREE_CODE_LENGTH (code) > 0)
3600 arg0 = TREE_OPERAND (exp, 0);
3601 if (TREE_CODE_CLASS (code) == tcc_comparison
3602 || TREE_CODE_CLASS (code) == tcc_unary
3603 || TREE_CODE_CLASS (code) == tcc_binary)
3604 arg0_type = TREE_TYPE (arg0);
3605 if (TREE_CODE_CLASS (code) == tcc_binary
3606 || TREE_CODE_CLASS (code) == tcc_comparison
3607 || (TREE_CODE_CLASS (code) == tcc_expression
3608 && TREE_CODE_LENGTH (code) > 1))
3609 arg1 = TREE_OPERAND (exp, 1);
3612 switch (code)
3614 case TRUTH_NOT_EXPR:
3615 in_p = ! in_p, exp = arg0;
3616 continue;
3618 case EQ_EXPR: case NE_EXPR:
3619 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3620 /* We can only do something if the range is testing for zero
3621 and if the second operand is an integer constant. Note that
3622 saying something is "in" the range we make is done by
3623 complementing IN_P since it will set in the initial case of
3624 being not equal to zero; "out" is leaving it alone. */
3625 if (low == 0 || high == 0
3626 || ! integer_zerop (low) || ! integer_zerop (high)
3627 || TREE_CODE (arg1) != INTEGER_CST)
3628 break;
3630 switch (code)
3632 case NE_EXPR: /* - [c, c] */
3633 low = high = arg1;
3634 break;
3635 case EQ_EXPR: /* + [c, c] */
3636 in_p = ! in_p, low = high = arg1;
3637 break;
3638 case GT_EXPR: /* - [-, c] */
3639 low = 0, high = arg1;
3640 break;
3641 case GE_EXPR: /* + [c, -] */
3642 in_p = ! in_p, low = arg1, high = 0;
3643 break;
3644 case LT_EXPR: /* - [c, -] */
3645 low = arg1, high = 0;
3646 break;
3647 case LE_EXPR: /* + [-, c] */
3648 in_p = ! in_p, low = 0, high = arg1;
3649 break;
3650 default:
3651 gcc_unreachable ();
3654 /* If this is an unsigned comparison, we also know that EXP is
3655 greater than or equal to zero. We base the range tests we make
3656 on that fact, so we record it here so we can parse existing
3657 range tests. We test arg0_type since often the return type
3658 of, e.g. EQ_EXPR, is boolean. */
3659 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3661 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3662 in_p, low, high, 1,
3663 fold_convert (arg0_type, integer_zero_node),
3664 NULL_TREE))
3665 break;
3667 in_p = n_in_p, low = n_low, high = n_high;
3669 /* If the high bound is missing, but we have a nonzero low
3670 bound, reverse the range so it goes from zero to the low bound
3671 minus 1. */
3672 if (high == 0 && low && ! integer_zerop (low))
3674 in_p = ! in_p;
3675 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3676 integer_one_node, 0);
3677 low = fold_convert (arg0_type, integer_zero_node);
3681 exp = arg0;
3682 continue;
3684 case NEGATE_EXPR:
3685 /* (-x) IN [a,b] -> x in [-b, -a] */
3686 n_low = range_binop (MINUS_EXPR, exp_type,
3687 fold_convert (exp_type, integer_zero_node),
3688 0, high, 1);
3689 n_high = range_binop (MINUS_EXPR, exp_type,
3690 fold_convert (exp_type, integer_zero_node),
3691 0, low, 0);
3692 low = n_low, high = n_high;
3693 exp = arg0;
3694 continue;
3696 case BIT_NOT_EXPR:
3697 /* ~ X -> -X - 1 */
3698 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3699 fold_convert (exp_type, integer_one_node));
3700 continue;
3702 case PLUS_EXPR: case MINUS_EXPR:
3703 if (TREE_CODE (arg1) != INTEGER_CST)
3704 break;
3706 /* If EXP is signed, any overflow in the computation is undefined,
3707 so we don't worry about it so long as our computations on
3708 the bounds don't overflow. For unsigned, overflow is defined
3709 and this is exactly the right thing. */
3710 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3711 arg0_type, low, 0, arg1, 0);
3712 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3713 arg0_type, high, 1, arg1, 0);
3714 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3715 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3716 break;
3718 /* Check for an unsigned range which has wrapped around the maximum
3719 value thus making n_high < n_low, and normalize it. */
3720 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3722 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3723 integer_one_node, 0);
3724 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3725 integer_one_node, 0);
3727 /* If the range is of the form +/- [ x+1, x ], we won't
3728 be able to normalize it. But then, it represents the
3729 whole range or the empty set, so make it
3730 +/- [ -, - ]. */
3731 if (tree_int_cst_equal (n_low, low)
3732 && tree_int_cst_equal (n_high, high))
3733 low = high = 0;
3734 else
3735 in_p = ! in_p;
3737 else
3738 low = n_low, high = n_high;
3740 exp = arg0;
3741 continue;
3743 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3744 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3745 break;
3747 if (! INTEGRAL_TYPE_P (arg0_type)
3748 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3749 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3750 break;
3752 n_low = low, n_high = high;
3754 if (n_low != 0)
3755 n_low = fold_convert (arg0_type, n_low);
3757 if (n_high != 0)
3758 n_high = fold_convert (arg0_type, n_high);
3761 /* If we're converting arg0 from an unsigned type, to exp,
3762 a signed type, we will be doing the comparison as unsigned.
3763 The tests above have already verified that LOW and HIGH
3764 are both positive.
3766 So we have to ensure that we will handle large unsigned
3767 values the same way that the current signed bounds treat
3768 negative values. */
3770 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3772 tree high_positive;
3773 tree equiv_type = lang_hooks.types.type_for_mode
3774 (TYPE_MODE (arg0_type), 1);
3776 /* A range without an upper bound is, naturally, unbounded.
3777 Since convert would have cropped a very large value, use
3778 the max value for the destination type. */
3779 high_positive
3780 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3781 : TYPE_MAX_VALUE (arg0_type);
3783 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3784 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3785 fold_convert (arg0_type,
3786 high_positive),
3787 fold_convert (arg0_type,
3788 integer_one_node));
3790 /* If the low bound is specified, "and" the range with the
3791 range for which the original unsigned value will be
3792 positive. */
3793 if (low != 0)
3795 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3796 1, n_low, n_high, 1,
3797 fold_convert (arg0_type,
3798 integer_zero_node),
3799 high_positive))
3800 break;
3802 in_p = (n_in_p == in_p);
3804 else
3806 /* Otherwise, "or" the range with the range of the input
3807 that will be interpreted as negative. */
3808 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3809 0, n_low, n_high, 1,
3810 fold_convert (arg0_type,
3811 integer_zero_node),
3812 high_positive))
3813 break;
3815 in_p = (in_p != n_in_p);
3819 exp = arg0;
3820 low = n_low, high = n_high;
3821 continue;
3823 default:
3824 break;
3827 break;
3830 /* If EXP is a constant, we can evaluate whether this is true or false. */
3831 if (TREE_CODE (exp) == INTEGER_CST)
3833 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3834 exp, 0, low, 0))
3835 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3836 exp, 1, high, 1)));
3837 low = high = 0;
3838 exp = 0;
3841 *pin_p = in_p, *plow = low, *phigh = high;
3842 return exp;
3845 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3846 type, TYPE, return an expression to test if EXP is in (or out of, depending
3847 on IN_P) the range. Return 0 if the test couldn't be created. */
3849 static tree
3850 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3852 tree etype = TREE_TYPE (exp);
3853 tree value;
3855 if (! in_p)
3857 value = build_range_check (type, exp, 1, low, high);
3858 if (value != 0)
3859 return invert_truthvalue (value);
3861 return 0;
3864 if (low == 0 && high == 0)
3865 return fold_convert (type, integer_one_node);
3867 if (low == 0)
3868 return fold_build2 (LE_EXPR, type, exp, high);
3870 if (high == 0)
3871 return fold_build2 (GE_EXPR, type, exp, low);
3873 if (operand_equal_p (low, high, 0))
3874 return fold_build2 (EQ_EXPR, type, exp, low);
3876 if (integer_zerop (low))
3878 if (! TYPE_UNSIGNED (etype))
3880 etype = lang_hooks.types.unsigned_type (etype);
3881 high = fold_convert (etype, high);
3882 exp = fold_convert (etype, exp);
3884 return build_range_check (type, exp, 1, 0, high);
3887 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3888 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3890 unsigned HOST_WIDE_INT lo;
3891 HOST_WIDE_INT hi;
3892 int prec;
3894 prec = TYPE_PRECISION (etype);
3895 if (prec <= HOST_BITS_PER_WIDE_INT)
3897 hi = 0;
3898 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3900 else
3902 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3903 lo = (unsigned HOST_WIDE_INT) -1;
3906 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3908 if (TYPE_UNSIGNED (etype))
3910 etype = lang_hooks.types.signed_type (etype);
3911 exp = fold_convert (etype, exp);
3913 return fold_build2 (GT_EXPR, type, exp,
3914 fold_convert (etype, integer_zero_node));
3918 value = const_binop (MINUS_EXPR, high, low, 0);
3919 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3921 tree utype, minv, maxv;
3923 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3924 for the type in question, as we rely on this here. */
3925 switch (TREE_CODE (etype))
3927 case INTEGER_TYPE:
3928 case ENUMERAL_TYPE:
3929 case CHAR_TYPE:
3930 utype = lang_hooks.types.unsigned_type (etype);
3931 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3932 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3933 integer_one_node, 1);
3934 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3935 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3936 minv, 1, maxv, 1)))
3938 etype = utype;
3939 high = fold_convert (etype, high);
3940 low = fold_convert (etype, low);
3941 exp = fold_convert (etype, exp);
3942 value = const_binop (MINUS_EXPR, high, low, 0);
3944 break;
3945 default:
3946 break;
3950 if (value != 0 && ! TREE_OVERFLOW (value))
3951 return build_range_check (type,
3952 fold_build2 (MINUS_EXPR, etype, exp, low),
3953 1, fold_convert (etype, integer_zero_node),
3954 value);
3956 return 0;
3959 /* Given two ranges, see if we can merge them into one. Return 1 if we
3960 can, 0 if we can't. Set the output range into the specified parameters. */
3962 static int
3963 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3964 tree high0, int in1_p, tree low1, tree high1)
3966 int no_overlap;
3967 int subset;
3968 int temp;
3969 tree tem;
3970 int in_p;
3971 tree low, high;
3972 int lowequal = ((low0 == 0 && low1 == 0)
3973 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3974 low0, 0, low1, 0)));
3975 int highequal = ((high0 == 0 && high1 == 0)
3976 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3977 high0, 1, high1, 1)));
3979 /* Make range 0 be the range that starts first, or ends last if they
3980 start at the same value. Swap them if it isn't. */
3981 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3982 low0, 0, low1, 0))
3983 || (lowequal
3984 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3985 high1, 1, high0, 1))))
3987 temp = in0_p, in0_p = in1_p, in1_p = temp;
3988 tem = low0, low0 = low1, low1 = tem;
3989 tem = high0, high0 = high1, high1 = tem;
3992 /* Now flag two cases, whether the ranges are disjoint or whether the
3993 second range is totally subsumed in the first. Note that the tests
3994 below are simplified by the ones above. */
3995 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3996 high0, 1, low1, 0));
3997 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3998 high1, 1, high0, 1));
4000 /* We now have four cases, depending on whether we are including or
4001 excluding the two ranges. */
4002 if (in0_p && in1_p)
4004 /* If they don't overlap, the result is false. If the second range
4005 is a subset it is the result. Otherwise, the range is from the start
4006 of the second to the end of the first. */
4007 if (no_overlap)
4008 in_p = 0, low = high = 0;
4009 else if (subset)
4010 in_p = 1, low = low1, high = high1;
4011 else
4012 in_p = 1, low = low1, high = high0;
4015 else if (in0_p && ! in1_p)
4017 /* If they don't overlap, the result is the first range. If they are
4018 equal, the result is false. If the second range is a subset of the
4019 first, and the ranges begin at the same place, we go from just after
4020 the end of the first range to the end of the second. If the second
4021 range is not a subset of the first, or if it is a subset and both
4022 ranges end at the same place, the range starts at the start of the
4023 first range and ends just before the second range.
4024 Otherwise, we can't describe this as a single range. */
4025 if (no_overlap)
4026 in_p = 1, low = low0, high = high0;
4027 else if (lowequal && highequal)
4028 in_p = 0, low = high = 0;
4029 else if (subset && lowequal)
4031 in_p = 1, high = high0;
4032 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4033 integer_one_node, 0);
4035 else if (! subset || highequal)
4037 in_p = 1, low = low0;
4038 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4039 integer_one_node, 0);
4041 else
4042 return 0;
4045 else if (! in0_p && in1_p)
4047 /* If they don't overlap, the result is the second range. If the second
4048 is a subset of the first, the result is false. Otherwise,
4049 the range starts just after the first range and ends at the
4050 end of the second. */
4051 if (no_overlap)
4052 in_p = 1, low = low1, high = high1;
4053 else if (subset || highequal)
4054 in_p = 0, low = high = 0;
4055 else
4057 in_p = 1, high = high1;
4058 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4059 integer_one_node, 0);
4063 else
4065 /* The case where we are excluding both ranges. Here the complex case
4066 is if they don't overlap. In that case, the only time we have a
4067 range is if they are adjacent. If the second is a subset of the
4068 first, the result is the first. Otherwise, the range to exclude
4069 starts at the beginning of the first range and ends at the end of the
4070 second. */
4071 if (no_overlap)
4073 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4074 range_binop (PLUS_EXPR, NULL_TREE,
4075 high0, 1,
4076 integer_one_node, 1),
4077 1, low1, 0)))
4078 in_p = 0, low = low0, high = high1;
4079 else
4081 /* Canonicalize - [min, x] into - [-, x]. */
4082 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4083 switch (TREE_CODE (TREE_TYPE (low0)))
4085 case ENUMERAL_TYPE:
4086 if (TYPE_PRECISION (TREE_TYPE (low0))
4087 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4088 break;
4089 /* FALLTHROUGH */
4090 case INTEGER_TYPE:
4091 case CHAR_TYPE:
4092 if (tree_int_cst_equal (low0,
4093 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4094 low0 = 0;
4095 break;
4096 case POINTER_TYPE:
4097 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4098 && integer_zerop (low0))
4099 low0 = 0;
4100 break;
4101 default:
4102 break;
4105 /* Canonicalize - [x, max] into - [x, -]. */
4106 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4107 switch (TREE_CODE (TREE_TYPE (high1)))
4109 case ENUMERAL_TYPE:
4110 if (TYPE_PRECISION (TREE_TYPE (high1))
4111 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4112 break;
4113 /* FALLTHROUGH */
4114 case INTEGER_TYPE:
4115 case CHAR_TYPE:
4116 if (tree_int_cst_equal (high1,
4117 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4118 high1 = 0;
4119 break;
4120 case POINTER_TYPE:
4121 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4122 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4123 high1, 1,
4124 integer_one_node, 1)))
4125 high1 = 0;
4126 break;
4127 default:
4128 break;
4131 /* The ranges might be also adjacent between the maximum and
4132 minimum values of the given type. For
4133 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4134 return + [x + 1, y - 1]. */
4135 if (low0 == 0 && high1 == 0)
4137 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4138 integer_one_node, 1);
4139 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4140 integer_one_node, 0);
4141 if (low == 0 || high == 0)
4142 return 0;
4144 in_p = 1;
4146 else
4147 return 0;
4150 else if (subset)
4151 in_p = 0, low = low0, high = high0;
4152 else
4153 in_p = 0, low = low0, high = high1;
4156 *pin_p = in_p, *plow = low, *phigh = high;
4157 return 1;
4161 /* Subroutine of fold, looking inside expressions of the form
4162 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4163 of the COND_EXPR. This function is being used also to optimize
4164 A op B ? C : A, by reversing the comparison first.
4166 Return a folded expression whose code is not a COND_EXPR
4167 anymore, or NULL_TREE if no folding opportunity is found. */
4169 static tree
4170 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4172 enum tree_code comp_code = TREE_CODE (arg0);
4173 tree arg00 = TREE_OPERAND (arg0, 0);
4174 tree arg01 = TREE_OPERAND (arg0, 1);
4175 tree arg1_type = TREE_TYPE (arg1);
4176 tree tem;
4178 STRIP_NOPS (arg1);
4179 STRIP_NOPS (arg2);
4181 /* If we have A op 0 ? A : -A, consider applying the following
4182 transformations:
4184 A == 0? A : -A same as -A
4185 A != 0? A : -A same as A
4186 A >= 0? A : -A same as abs (A)
4187 A > 0? A : -A same as abs (A)
4188 A <= 0? A : -A same as -abs (A)
4189 A < 0? A : -A same as -abs (A)
4191 None of these transformations work for modes with signed
4192 zeros. If A is +/-0, the first two transformations will
4193 change the sign of the result (from +0 to -0, or vice
4194 versa). The last four will fix the sign of the result,
4195 even though the original expressions could be positive or
4196 negative, depending on the sign of A.
4198 Note that all these transformations are correct if A is
4199 NaN, since the two alternatives (A and -A) are also NaNs. */
4200 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4201 ? real_zerop (arg01)
4202 : integer_zerop (arg01))
4203 && ((TREE_CODE (arg2) == NEGATE_EXPR
4204 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4205 /* In the case that A is of the form X-Y, '-A' (arg2) may
4206 have already been folded to Y-X, check for that. */
4207 || (TREE_CODE (arg1) == MINUS_EXPR
4208 && TREE_CODE (arg2) == MINUS_EXPR
4209 && operand_equal_p (TREE_OPERAND (arg1, 0),
4210 TREE_OPERAND (arg2, 1), 0)
4211 && operand_equal_p (TREE_OPERAND (arg1, 1),
4212 TREE_OPERAND (arg2, 0), 0))))
4213 switch (comp_code)
4215 case EQ_EXPR:
4216 case UNEQ_EXPR:
4217 tem = fold_convert (arg1_type, arg1);
4218 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4219 case NE_EXPR:
4220 case LTGT_EXPR:
4221 return pedantic_non_lvalue (fold_convert (type, arg1));
4222 case UNGE_EXPR:
4223 case UNGT_EXPR:
4224 if (flag_trapping_math)
4225 break;
4226 /* Fall through. */
4227 case GE_EXPR:
4228 case GT_EXPR:
4229 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4230 arg1 = fold_convert (lang_hooks.types.signed_type
4231 (TREE_TYPE (arg1)), arg1);
4232 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4233 return pedantic_non_lvalue (fold_convert (type, tem));
4234 case UNLE_EXPR:
4235 case UNLT_EXPR:
4236 if (flag_trapping_math)
4237 break;
4238 case LE_EXPR:
4239 case LT_EXPR:
4240 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4241 arg1 = fold_convert (lang_hooks.types.signed_type
4242 (TREE_TYPE (arg1)), arg1);
4243 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4244 return negate_expr (fold_convert (type, tem));
4245 default:
4246 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4247 break;
4250 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4251 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4252 both transformations are correct when A is NaN: A != 0
4253 is then true, and A == 0 is false. */
4255 if (integer_zerop (arg01) && integer_zerop (arg2))
4257 if (comp_code == NE_EXPR)
4258 return pedantic_non_lvalue (fold_convert (type, arg1));
4259 else if (comp_code == EQ_EXPR)
4260 return fold_convert (type, integer_zero_node);
4263 /* Try some transformations of A op B ? A : B.
4265 A == B? A : B same as B
4266 A != B? A : B same as A
4267 A >= B? A : B same as max (A, B)
4268 A > B? A : B same as max (B, A)
4269 A <= B? A : B same as min (A, B)
4270 A < B? A : B same as min (B, A)
4272 As above, these transformations don't work in the presence
4273 of signed zeros. For example, if A and B are zeros of
4274 opposite sign, the first two transformations will change
4275 the sign of the result. In the last four, the original
4276 expressions give different results for (A=+0, B=-0) and
4277 (A=-0, B=+0), but the transformed expressions do not.
4279 The first two transformations are correct if either A or B
4280 is a NaN. In the first transformation, the condition will
4281 be false, and B will indeed be chosen. In the case of the
4282 second transformation, the condition A != B will be true,
4283 and A will be chosen.
4285 The conversions to max() and min() are not correct if B is
4286 a number and A is not. The conditions in the original
4287 expressions will be false, so all four give B. The min()
4288 and max() versions would give a NaN instead. */
4289 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4290 /* Avoid these transformations if the COND_EXPR may be used
4291 as an lvalue in the C++ front-end. PR c++/19199. */
4292 && (in_gimple_form
4293 || strcmp (lang_hooks.name, "GNU C++") != 0
4294 || ! maybe_lvalue_p (arg1)
4295 || ! maybe_lvalue_p (arg2)))
4297 tree comp_op0 = arg00;
4298 tree comp_op1 = arg01;
4299 tree comp_type = TREE_TYPE (comp_op0);
4301 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4302 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4304 comp_type = type;
4305 comp_op0 = arg1;
4306 comp_op1 = arg2;
4309 switch (comp_code)
4311 case EQ_EXPR:
4312 return pedantic_non_lvalue (fold_convert (type, arg2));
4313 case NE_EXPR:
4314 return pedantic_non_lvalue (fold_convert (type, arg1));
4315 case LE_EXPR:
4316 case LT_EXPR:
4317 case UNLE_EXPR:
4318 case UNLT_EXPR:
4319 /* In C++ a ?: expression can be an lvalue, so put the
4320 operand which will be used if they are equal first
4321 so that we can convert this back to the
4322 corresponding COND_EXPR. */
4323 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4325 comp_op0 = fold_convert (comp_type, comp_op0);
4326 comp_op1 = fold_convert (comp_type, comp_op1);
4327 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4328 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4329 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4330 return pedantic_non_lvalue (fold_convert (type, tem));
4332 break;
4333 case GE_EXPR:
4334 case GT_EXPR:
4335 case UNGE_EXPR:
4336 case UNGT_EXPR:
4337 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4339 comp_op0 = fold_convert (comp_type, comp_op0);
4340 comp_op1 = fold_convert (comp_type, comp_op1);
4341 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4342 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4343 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4344 return pedantic_non_lvalue (fold_convert (type, tem));
4346 break;
4347 case UNEQ_EXPR:
4348 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4349 return pedantic_non_lvalue (fold_convert (type, arg2));
4350 break;
4351 case LTGT_EXPR:
4352 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4353 return pedantic_non_lvalue (fold_convert (type, arg1));
4354 break;
4355 default:
4356 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4357 break;
4361 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4362 we might still be able to simplify this. For example,
4363 if C1 is one less or one more than C2, this might have started
4364 out as a MIN or MAX and been transformed by this function.
4365 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4367 if (INTEGRAL_TYPE_P (type)
4368 && TREE_CODE (arg01) == INTEGER_CST
4369 && TREE_CODE (arg2) == INTEGER_CST)
4370 switch (comp_code)
4372 case EQ_EXPR:
4373 /* We can replace A with C1 in this case. */
4374 arg1 = fold_convert (type, arg01);
4375 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4377 case LT_EXPR:
4378 /* If C1 is C2 + 1, this is min(A, C2). */
4379 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4380 OEP_ONLY_CONST)
4381 && operand_equal_p (arg01,
4382 const_binop (PLUS_EXPR, arg2,
4383 integer_one_node, 0),
4384 OEP_ONLY_CONST))
4385 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4386 type, arg1, arg2));
4387 break;
4389 case LE_EXPR:
4390 /* If C1 is C2 - 1, this is min(A, C2). */
4391 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4392 OEP_ONLY_CONST)
4393 && operand_equal_p (arg01,
4394 const_binop (MINUS_EXPR, arg2,
4395 integer_one_node, 0),
4396 OEP_ONLY_CONST))
4397 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4398 type, arg1, arg2));
4399 break;
4401 case GT_EXPR:
4402 /* If C1 is C2 - 1, this is max(A, C2). */
4403 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4404 OEP_ONLY_CONST)
4405 && operand_equal_p (arg01,
4406 const_binop (MINUS_EXPR, arg2,
4407 integer_one_node, 0),
4408 OEP_ONLY_CONST))
4409 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4410 type, arg1, arg2));
4411 break;
4413 case GE_EXPR:
4414 /* If C1 is C2 + 1, this is max(A, C2). */
4415 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4416 OEP_ONLY_CONST)
4417 && operand_equal_p (arg01,
4418 const_binop (PLUS_EXPR, arg2,
4419 integer_one_node, 0),
4420 OEP_ONLY_CONST))
4421 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4422 type, arg1, arg2));
4423 break;
4424 case NE_EXPR:
4425 break;
4426 default:
4427 gcc_unreachable ();
4430 return NULL_TREE;
4435 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4436 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4437 #endif
4439 /* EXP is some logical combination of boolean tests. See if we can
4440 merge it into some range test. Return the new tree if so. */
4442 static tree
4443 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4445 int or_op = (code == TRUTH_ORIF_EXPR
4446 || code == TRUTH_OR_EXPR);
4447 int in0_p, in1_p, in_p;
4448 tree low0, low1, low, high0, high1, high;
4449 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4450 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4451 tree tem;
4453 /* If this is an OR operation, invert both sides; we will invert
4454 again at the end. */
4455 if (or_op)
4456 in0_p = ! in0_p, in1_p = ! in1_p;
4458 /* If both expressions are the same, if we can merge the ranges, and we
4459 can build the range test, return it or it inverted. If one of the
4460 ranges is always true or always false, consider it to be the same
4461 expression as the other. */
4462 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4463 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4464 in1_p, low1, high1)
4465 && 0 != (tem = (build_range_check (type,
4466 lhs != 0 ? lhs
4467 : rhs != 0 ? rhs : integer_zero_node,
4468 in_p, low, high))))
4469 return or_op ? invert_truthvalue (tem) : tem;
4471 /* On machines where the branch cost is expensive, if this is a
4472 short-circuited branch and the underlying object on both sides
4473 is the same, make a non-short-circuit operation. */
4474 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4475 && lhs != 0 && rhs != 0
4476 && (code == TRUTH_ANDIF_EXPR
4477 || code == TRUTH_ORIF_EXPR)
4478 && operand_equal_p (lhs, rhs, 0))
4480 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4481 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4482 which cases we can't do this. */
4483 if (simple_operand_p (lhs))
4484 return build2 (code == TRUTH_ANDIF_EXPR
4485 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4486 type, op0, op1);
4488 else if (lang_hooks.decls.global_bindings_p () == 0
4489 && ! CONTAINS_PLACEHOLDER_P (lhs))
4491 tree common = save_expr (lhs);
4493 if (0 != (lhs = build_range_check (type, common,
4494 or_op ? ! in0_p : in0_p,
4495 low0, high0))
4496 && (0 != (rhs = build_range_check (type, common,
4497 or_op ? ! in1_p : in1_p,
4498 low1, high1))))
4499 return build2 (code == TRUTH_ANDIF_EXPR
4500 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4501 type, lhs, rhs);
4505 return 0;
4508 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4509 bit value. Arrange things so the extra bits will be set to zero if and
4510 only if C is signed-extended to its full width. If MASK is nonzero,
4511 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4513 static tree
4514 unextend (tree c, int p, int unsignedp, tree mask)
4516 tree type = TREE_TYPE (c);
4517 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4518 tree temp;
4520 if (p == modesize || unsignedp)
4521 return c;
4523 /* We work by getting just the sign bit into the low-order bit, then
4524 into the high-order bit, then sign-extend. We then XOR that value
4525 with C. */
4526 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4527 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4529 /* We must use a signed type in order to get an arithmetic right shift.
4530 However, we must also avoid introducing accidental overflows, so that
4531 a subsequent call to integer_zerop will work. Hence we must
4532 do the type conversion here. At this point, the constant is either
4533 zero or one, and the conversion to a signed type can never overflow.
4534 We could get an overflow if this conversion is done anywhere else. */
4535 if (TYPE_UNSIGNED (type))
4536 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4538 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4539 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4540 if (mask != 0)
4541 temp = const_binop (BIT_AND_EXPR, temp,
4542 fold_convert (TREE_TYPE (c), mask), 0);
4543 /* If necessary, convert the type back to match the type of C. */
4544 if (TYPE_UNSIGNED (type))
4545 temp = fold_convert (type, temp);
4547 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4550 /* Find ways of folding logical expressions of LHS and RHS:
4551 Try to merge two comparisons to the same innermost item.
4552 Look for range tests like "ch >= '0' && ch <= '9'".
4553 Look for combinations of simple terms on machines with expensive branches
4554 and evaluate the RHS unconditionally.
4556 For example, if we have p->a == 2 && p->b == 4 and we can make an
4557 object large enough to span both A and B, we can do this with a comparison
4558 against the object ANDed with the a mask.
4560 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4561 operations to do this with one comparison.
4563 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4564 function and the one above.
4566 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4567 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4569 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4570 two operands.
4572 We return the simplified tree or 0 if no optimization is possible. */
4574 static tree
4575 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4577 /* If this is the "or" of two comparisons, we can do something if
4578 the comparisons are NE_EXPR. If this is the "and", we can do something
4579 if the comparisons are EQ_EXPR. I.e.,
4580 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4582 WANTED_CODE is this operation code. For single bit fields, we can
4583 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4584 comparison for one-bit fields. */
4586 enum tree_code wanted_code;
4587 enum tree_code lcode, rcode;
4588 tree ll_arg, lr_arg, rl_arg, rr_arg;
4589 tree ll_inner, lr_inner, rl_inner, rr_inner;
4590 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4591 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4592 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4593 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4594 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4595 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4596 enum machine_mode lnmode, rnmode;
4597 tree ll_mask, lr_mask, rl_mask, rr_mask;
4598 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4599 tree l_const, r_const;
4600 tree lntype, rntype, result;
4601 int first_bit, end_bit;
4602 int volatilep;
4604 /* Start by getting the comparison codes. Fail if anything is volatile.
4605 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4606 it were surrounded with a NE_EXPR. */
4608 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4609 return 0;
4611 lcode = TREE_CODE (lhs);
4612 rcode = TREE_CODE (rhs);
4614 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4616 lhs = build2 (NE_EXPR, truth_type, lhs,
4617 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4618 lcode = NE_EXPR;
4621 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4623 rhs = build2 (NE_EXPR, truth_type, rhs,
4624 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4625 rcode = NE_EXPR;
4628 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4629 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4630 return 0;
4632 ll_arg = TREE_OPERAND (lhs, 0);
4633 lr_arg = TREE_OPERAND (lhs, 1);
4634 rl_arg = TREE_OPERAND (rhs, 0);
4635 rr_arg = TREE_OPERAND (rhs, 1);
4637 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4638 if (simple_operand_p (ll_arg)
4639 && simple_operand_p (lr_arg))
4641 tree result;
4642 if (operand_equal_p (ll_arg, rl_arg, 0)
4643 && operand_equal_p (lr_arg, rr_arg, 0))
4645 result = combine_comparisons (code, lcode, rcode,
4646 truth_type, ll_arg, lr_arg);
4647 if (result)
4648 return result;
4650 else if (operand_equal_p (ll_arg, rr_arg, 0)
4651 && operand_equal_p (lr_arg, rl_arg, 0))
4653 result = combine_comparisons (code, lcode,
4654 swap_tree_comparison (rcode),
4655 truth_type, ll_arg, lr_arg);
4656 if (result)
4657 return result;
4661 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4662 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4664 /* If the RHS can be evaluated unconditionally and its operands are
4665 simple, it wins to evaluate the RHS unconditionally on machines
4666 with expensive branches. In this case, this isn't a comparison
4667 that can be merged. Avoid doing this if the RHS is a floating-point
4668 comparison since those can trap. */
4670 if (BRANCH_COST >= 2
4671 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4672 && simple_operand_p (rl_arg)
4673 && simple_operand_p (rr_arg))
4675 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4676 if (code == TRUTH_OR_EXPR
4677 && lcode == NE_EXPR && integer_zerop (lr_arg)
4678 && rcode == NE_EXPR && integer_zerop (rr_arg)
4679 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4680 return build2 (NE_EXPR, truth_type,
4681 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4682 ll_arg, rl_arg),
4683 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4685 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4686 if (code == TRUTH_AND_EXPR
4687 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4688 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4689 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4690 return build2 (EQ_EXPR, truth_type,
4691 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4692 ll_arg, rl_arg),
4693 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4695 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4696 return build2 (code, truth_type, lhs, rhs);
4699 /* See if the comparisons can be merged. Then get all the parameters for
4700 each side. */
4702 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4703 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4704 return 0;
4706 volatilep = 0;
4707 ll_inner = decode_field_reference (ll_arg,
4708 &ll_bitsize, &ll_bitpos, &ll_mode,
4709 &ll_unsignedp, &volatilep, &ll_mask,
4710 &ll_and_mask);
4711 lr_inner = decode_field_reference (lr_arg,
4712 &lr_bitsize, &lr_bitpos, &lr_mode,
4713 &lr_unsignedp, &volatilep, &lr_mask,
4714 &lr_and_mask);
4715 rl_inner = decode_field_reference (rl_arg,
4716 &rl_bitsize, &rl_bitpos, &rl_mode,
4717 &rl_unsignedp, &volatilep, &rl_mask,
4718 &rl_and_mask);
4719 rr_inner = decode_field_reference (rr_arg,
4720 &rr_bitsize, &rr_bitpos, &rr_mode,
4721 &rr_unsignedp, &volatilep, &rr_mask,
4722 &rr_and_mask);
4724 /* It must be true that the inner operation on the lhs of each
4725 comparison must be the same if we are to be able to do anything.
4726 Then see if we have constants. If not, the same must be true for
4727 the rhs's. */
4728 if (volatilep || ll_inner == 0 || rl_inner == 0
4729 || ! operand_equal_p (ll_inner, rl_inner, 0))
4730 return 0;
4732 if (TREE_CODE (lr_arg) == INTEGER_CST
4733 && TREE_CODE (rr_arg) == INTEGER_CST)
4734 l_const = lr_arg, r_const = rr_arg;
4735 else if (lr_inner == 0 || rr_inner == 0
4736 || ! operand_equal_p (lr_inner, rr_inner, 0))
4737 return 0;
4738 else
4739 l_const = r_const = 0;
4741 /* If either comparison code is not correct for our logical operation,
4742 fail. However, we can convert a one-bit comparison against zero into
4743 the opposite comparison against that bit being set in the field. */
4745 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4746 if (lcode != wanted_code)
4748 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4750 /* Make the left operand unsigned, since we are only interested
4751 in the value of one bit. Otherwise we are doing the wrong
4752 thing below. */
4753 ll_unsignedp = 1;
4754 l_const = ll_mask;
4756 else
4757 return 0;
4760 /* This is analogous to the code for l_const above. */
4761 if (rcode != wanted_code)
4763 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4765 rl_unsignedp = 1;
4766 r_const = rl_mask;
4768 else
4769 return 0;
4772 /* After this point all optimizations will generate bit-field
4773 references, which we might not want. */
4774 if (! lang_hooks.can_use_bit_fields_p ())
4775 return 0;
4777 /* See if we can find a mode that contains both fields being compared on
4778 the left. If we can't, fail. Otherwise, update all constants and masks
4779 to be relative to a field of that size. */
4780 first_bit = MIN (ll_bitpos, rl_bitpos);
4781 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4782 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4783 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4784 volatilep);
4785 if (lnmode == VOIDmode)
4786 return 0;
4788 lnbitsize = GET_MODE_BITSIZE (lnmode);
4789 lnbitpos = first_bit & ~ (lnbitsize - 1);
4790 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4791 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4793 if (BYTES_BIG_ENDIAN)
4795 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4796 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4799 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4800 size_int (xll_bitpos), 0);
4801 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4802 size_int (xrl_bitpos), 0);
4804 if (l_const)
4806 l_const = fold_convert (lntype, l_const);
4807 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4808 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4809 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4810 fold_build1 (BIT_NOT_EXPR,
4811 lntype, ll_mask),
4812 0)))
4814 warning ("comparison is always %d", wanted_code == NE_EXPR);
4816 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4819 if (r_const)
4821 r_const = fold_convert (lntype, r_const);
4822 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4823 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4824 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4825 fold_build1 (BIT_NOT_EXPR,
4826 lntype, rl_mask),
4827 0)))
4829 warning ("comparison is always %d", wanted_code == NE_EXPR);
4831 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4835 /* If the right sides are not constant, do the same for it. Also,
4836 disallow this optimization if a size or signedness mismatch occurs
4837 between the left and right sides. */
4838 if (l_const == 0)
4840 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4841 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4842 /* Make sure the two fields on the right
4843 correspond to the left without being swapped. */
4844 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4845 return 0;
4847 first_bit = MIN (lr_bitpos, rr_bitpos);
4848 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4849 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4850 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4851 volatilep);
4852 if (rnmode == VOIDmode)
4853 return 0;
4855 rnbitsize = GET_MODE_BITSIZE (rnmode);
4856 rnbitpos = first_bit & ~ (rnbitsize - 1);
4857 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4858 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4860 if (BYTES_BIG_ENDIAN)
4862 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4863 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4866 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4867 size_int (xlr_bitpos), 0);
4868 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4869 size_int (xrr_bitpos), 0);
4871 /* Make a mask that corresponds to both fields being compared.
4872 Do this for both items being compared. If the operands are the
4873 same size and the bits being compared are in the same position
4874 then we can do this by masking both and comparing the masked
4875 results. */
4876 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4877 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4878 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4880 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4881 ll_unsignedp || rl_unsignedp);
4882 if (! all_ones_mask_p (ll_mask, lnbitsize))
4883 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4885 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4886 lr_unsignedp || rr_unsignedp);
4887 if (! all_ones_mask_p (lr_mask, rnbitsize))
4888 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4890 return build2 (wanted_code, truth_type, lhs, rhs);
4893 /* There is still another way we can do something: If both pairs of
4894 fields being compared are adjacent, we may be able to make a wider
4895 field containing them both.
4897 Note that we still must mask the lhs/rhs expressions. Furthermore,
4898 the mask must be shifted to account for the shift done by
4899 make_bit_field_ref. */
4900 if ((ll_bitsize + ll_bitpos == rl_bitpos
4901 && lr_bitsize + lr_bitpos == rr_bitpos)
4902 || (ll_bitpos == rl_bitpos + rl_bitsize
4903 && lr_bitpos == rr_bitpos + rr_bitsize))
4905 tree type;
4907 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4908 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4909 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4910 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4912 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4913 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4914 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4915 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4917 /* Convert to the smaller type before masking out unwanted bits. */
4918 type = lntype;
4919 if (lntype != rntype)
4921 if (lnbitsize > rnbitsize)
4923 lhs = fold_convert (rntype, lhs);
4924 ll_mask = fold_convert (rntype, ll_mask);
4925 type = rntype;
4927 else if (lnbitsize < rnbitsize)
4929 rhs = fold_convert (lntype, rhs);
4930 lr_mask = fold_convert (lntype, lr_mask);
4931 type = lntype;
4935 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4936 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4938 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4939 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4941 return build2 (wanted_code, truth_type, lhs, rhs);
4944 return 0;
4947 /* Handle the case of comparisons with constants. If there is something in
4948 common between the masks, those bits of the constants must be the same.
4949 If not, the condition is always false. Test for this to avoid generating
4950 incorrect code below. */
4951 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4952 if (! integer_zerop (result)
4953 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4954 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4956 if (wanted_code == NE_EXPR)
4958 warning ("%<or%> of unmatched not-equal tests is always 1");
4959 return constant_boolean_node (true, truth_type);
4961 else
4963 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4964 return constant_boolean_node (false, truth_type);
4968 /* Construct the expression we will return. First get the component
4969 reference we will make. Unless the mask is all ones the width of
4970 that field, perform the mask operation. Then compare with the
4971 merged constant. */
4972 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4973 ll_unsignedp || rl_unsignedp);
4975 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4976 if (! all_ones_mask_p (ll_mask, lnbitsize))
4977 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4979 return build2 (wanted_code, truth_type, result,
4980 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4983 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4984 constant. */
4986 static tree
4987 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
4989 tree arg0 = op0;
4990 enum tree_code op_code;
4991 tree comp_const = op1;
4992 tree minmax_const;
4993 int consts_equal, consts_lt;
4994 tree inner;
4996 STRIP_SIGN_NOPS (arg0);
4998 op_code = TREE_CODE (arg0);
4999 minmax_const = TREE_OPERAND (arg0, 1);
5000 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5001 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5002 inner = TREE_OPERAND (arg0, 0);
5004 /* If something does not permit us to optimize, return the original tree. */
5005 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5006 || TREE_CODE (comp_const) != INTEGER_CST
5007 || TREE_CONSTANT_OVERFLOW (comp_const)
5008 || TREE_CODE (minmax_const) != INTEGER_CST
5009 || TREE_CONSTANT_OVERFLOW (minmax_const))
5010 return NULL_TREE;
5012 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5013 and GT_EXPR, doing the rest with recursive calls using logical
5014 simplifications. */
5015 switch (code)
5017 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5019 /* FIXME: We should be able to invert code without building a
5020 scratch tree node, but doing so would require us to
5021 duplicate a part of invert_truthvalue here. */
5022 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5023 tem = optimize_minmax_comparison (TREE_CODE (tem),
5024 TREE_TYPE (tem),
5025 TREE_OPERAND (tem, 0),
5026 TREE_OPERAND (tem, 1));
5027 return invert_truthvalue (tem);
5030 case GE_EXPR:
5031 return
5032 fold_build2 (TRUTH_ORIF_EXPR, type,
5033 optimize_minmax_comparison
5034 (EQ_EXPR, type, arg0, comp_const),
5035 optimize_minmax_comparison
5036 (GT_EXPR, type, arg0, comp_const));
5038 case EQ_EXPR:
5039 if (op_code == MAX_EXPR && consts_equal)
5040 /* MAX (X, 0) == 0 -> X <= 0 */
5041 return fold_build2 (LE_EXPR, type, inner, comp_const);
5043 else if (op_code == MAX_EXPR && consts_lt)
5044 /* MAX (X, 0) == 5 -> X == 5 */
5045 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5047 else if (op_code == MAX_EXPR)
5048 /* MAX (X, 0) == -1 -> false */
5049 return omit_one_operand (type, integer_zero_node, inner);
5051 else if (consts_equal)
5052 /* MIN (X, 0) == 0 -> X >= 0 */
5053 return fold_build2 (GE_EXPR, type, inner, comp_const);
5055 else if (consts_lt)
5056 /* MIN (X, 0) == 5 -> false */
5057 return omit_one_operand (type, integer_zero_node, inner);
5059 else
5060 /* MIN (X, 0) == -1 -> X == -1 */
5061 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5063 case GT_EXPR:
5064 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5065 /* MAX (X, 0) > 0 -> X > 0
5066 MAX (X, 0) > 5 -> X > 5 */
5067 return fold_build2 (GT_EXPR, type, inner, comp_const);
5069 else if (op_code == MAX_EXPR)
5070 /* MAX (X, 0) > -1 -> true */
5071 return omit_one_operand (type, integer_one_node, inner);
5073 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5074 /* MIN (X, 0) > 0 -> false
5075 MIN (X, 0) > 5 -> false */
5076 return omit_one_operand (type, integer_zero_node, inner);
5078 else
5079 /* MIN (X, 0) > -1 -> X > -1 */
5080 return fold_build2 (GT_EXPR, type, inner, comp_const);
5082 default:
5083 return NULL_TREE;
5087 /* T is an integer expression that is being multiplied, divided, or taken a
5088 modulus (CODE says which and what kind of divide or modulus) by a
5089 constant C. See if we can eliminate that operation by folding it with
5090 other operations already in T. WIDE_TYPE, if non-null, is a type that
5091 should be used for the computation if wider than our type.
5093 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5094 (X * 2) + (Y * 4). We must, however, be assured that either the original
5095 expression would not overflow or that overflow is undefined for the type
5096 in the language in question.
5098 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5099 the machine has a multiply-accumulate insn or that this is part of an
5100 addressing calculation.
5102 If we return a non-null expression, it is an equivalent form of the
5103 original computation, but need not be in the original type. */
5105 static tree
5106 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5108 /* To avoid exponential search depth, refuse to allow recursion past
5109 three levels. Beyond that (1) it's highly unlikely that we'll find
5110 something interesting and (2) we've probably processed it before
5111 when we built the inner expression. */
5113 static int depth;
5114 tree ret;
5116 if (depth > 3)
5117 return NULL;
5119 depth++;
5120 ret = extract_muldiv_1 (t, c, code, wide_type);
5121 depth--;
5123 return ret;
5126 static tree
5127 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5129 tree type = TREE_TYPE (t);
5130 enum tree_code tcode = TREE_CODE (t);
5131 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5132 > GET_MODE_SIZE (TYPE_MODE (type)))
5133 ? wide_type : type);
5134 tree t1, t2;
5135 int same_p = tcode == code;
5136 tree op0 = NULL_TREE, op1 = NULL_TREE;
5138 /* Don't deal with constants of zero here; they confuse the code below. */
5139 if (integer_zerop (c))
5140 return NULL_TREE;
5142 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5143 op0 = TREE_OPERAND (t, 0);
5145 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5146 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5148 /* Note that we need not handle conditional operations here since fold
5149 already handles those cases. So just do arithmetic here. */
5150 switch (tcode)
5152 case INTEGER_CST:
5153 /* For a constant, we can always simplify if we are a multiply
5154 or (for divide and modulus) if it is a multiple of our constant. */
5155 if (code == MULT_EXPR
5156 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5157 return const_binop (code, fold_convert (ctype, t),
5158 fold_convert (ctype, c), 0);
5159 break;
5161 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5162 /* If op0 is an expression ... */
5163 if ((COMPARISON_CLASS_P (op0)
5164 || UNARY_CLASS_P (op0)
5165 || BINARY_CLASS_P (op0)
5166 || EXPRESSION_CLASS_P (op0))
5167 /* ... and is unsigned, and its type is smaller than ctype,
5168 then we cannot pass through as widening. */
5169 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5170 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5171 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5172 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5173 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5174 /* ... or this is a truncation (t is narrower than op0),
5175 then we cannot pass through this narrowing. */
5176 || (GET_MODE_SIZE (TYPE_MODE (type))
5177 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5178 /* ... or signedness changes for division or modulus,
5179 then we cannot pass through this conversion. */
5180 || (code != MULT_EXPR
5181 && (TYPE_UNSIGNED (ctype)
5182 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5183 break;
5185 /* Pass the constant down and see if we can make a simplification. If
5186 we can, replace this expression with the inner simplification for
5187 possible later conversion to our or some other type. */
5188 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5189 && TREE_CODE (t2) == INTEGER_CST
5190 && ! TREE_CONSTANT_OVERFLOW (t2)
5191 && (0 != (t1 = extract_muldiv (op0, t2, code,
5192 code == MULT_EXPR
5193 ? ctype : NULL_TREE))))
5194 return t1;
5195 break;
5197 case ABS_EXPR:
5198 /* If widening the type changes it from signed to unsigned, then we
5199 must avoid building ABS_EXPR itself as unsigned. */
5200 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5202 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5203 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5205 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5206 return fold_convert (ctype, t1);
5208 break;
5210 /* FALLTHROUGH */
5211 case NEGATE_EXPR:
5212 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5213 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5214 break;
5216 case MIN_EXPR: case MAX_EXPR:
5217 /* If widening the type changes the signedness, then we can't perform
5218 this optimization as that changes the result. */
5219 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5220 break;
5222 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5223 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5224 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5226 if (tree_int_cst_sgn (c) < 0)
5227 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5229 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5230 fold_convert (ctype, t2));
5232 break;
5234 case LSHIFT_EXPR: case RSHIFT_EXPR:
5235 /* If the second operand is constant, this is a multiplication
5236 or floor division, by a power of two, so we can treat it that
5237 way unless the multiplier or divisor overflows. Signed
5238 left-shift overflow is implementation-defined rather than
5239 undefined in C90, so do not convert signed left shift into
5240 multiplication. */
5241 if (TREE_CODE (op1) == INTEGER_CST
5242 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5243 /* const_binop may not detect overflow correctly,
5244 so check for it explicitly here. */
5245 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5246 && TREE_INT_CST_HIGH (op1) == 0
5247 && 0 != (t1 = fold_convert (ctype,
5248 const_binop (LSHIFT_EXPR,
5249 size_one_node,
5250 op1, 0)))
5251 && ! TREE_OVERFLOW (t1))
5252 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5253 ? MULT_EXPR : FLOOR_DIV_EXPR,
5254 ctype, fold_convert (ctype, op0), t1),
5255 c, code, wide_type);
5256 break;
5258 case PLUS_EXPR: case MINUS_EXPR:
5259 /* See if we can eliminate the operation on both sides. If we can, we
5260 can return a new PLUS or MINUS. If we can't, the only remaining
5261 cases where we can do anything are if the second operand is a
5262 constant. */
5263 t1 = extract_muldiv (op0, c, code, wide_type);
5264 t2 = extract_muldiv (op1, c, code, wide_type);
5265 if (t1 != 0 && t2 != 0
5266 && (code == MULT_EXPR
5267 /* If not multiplication, we can only do this if both operands
5268 are divisible by c. */
5269 || (multiple_of_p (ctype, op0, c)
5270 && multiple_of_p (ctype, op1, c))))
5271 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5272 fold_convert (ctype, t2));
5274 /* If this was a subtraction, negate OP1 and set it to be an addition.
5275 This simplifies the logic below. */
5276 if (tcode == MINUS_EXPR)
5277 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5279 if (TREE_CODE (op1) != INTEGER_CST)
5280 break;
5282 /* If either OP1 or C are negative, this optimization is not safe for
5283 some of the division and remainder types while for others we need
5284 to change the code. */
5285 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5287 if (code == CEIL_DIV_EXPR)
5288 code = FLOOR_DIV_EXPR;
5289 else if (code == FLOOR_DIV_EXPR)
5290 code = CEIL_DIV_EXPR;
5291 else if (code != MULT_EXPR
5292 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5293 break;
5296 /* If it's a multiply or a division/modulus operation of a multiple
5297 of our constant, do the operation and verify it doesn't overflow. */
5298 if (code == MULT_EXPR
5299 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5301 op1 = const_binop (code, fold_convert (ctype, op1),
5302 fold_convert (ctype, c), 0);
5303 /* We allow the constant to overflow with wrapping semantics. */
5304 if (op1 == 0
5305 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5306 break;
5308 else
5309 break;
5311 /* If we have an unsigned type is not a sizetype, we cannot widen
5312 the operation since it will change the result if the original
5313 computation overflowed. */
5314 if (TYPE_UNSIGNED (ctype)
5315 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5316 && ctype != type)
5317 break;
5319 /* If we were able to eliminate our operation from the first side,
5320 apply our operation to the second side and reform the PLUS. */
5321 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5322 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5324 /* The last case is if we are a multiply. In that case, we can
5325 apply the distributive law to commute the multiply and addition
5326 if the multiplication of the constants doesn't overflow. */
5327 if (code == MULT_EXPR)
5328 return fold_build2 (tcode, ctype,
5329 fold_build2 (code, ctype,
5330 fold_convert (ctype, op0),
5331 fold_convert (ctype, c)),
5332 op1);
5334 break;
5336 case MULT_EXPR:
5337 /* We have a special case here if we are doing something like
5338 (C * 8) % 4 since we know that's zero. */
5339 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5340 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5341 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5342 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5343 return omit_one_operand (type, integer_zero_node, op0);
5345 /* ... fall through ... */
5347 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5348 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5349 /* If we can extract our operation from the LHS, do so and return a
5350 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5351 do something only if the second operand is a constant. */
5352 if (same_p
5353 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5354 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5355 fold_convert (ctype, op1));
5356 else if (tcode == MULT_EXPR && code == MULT_EXPR
5357 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5358 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5359 fold_convert (ctype, t1));
5360 else if (TREE_CODE (op1) != INTEGER_CST)
5361 return 0;
5363 /* If these are the same operation types, we can associate them
5364 assuming no overflow. */
5365 if (tcode == code
5366 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5367 fold_convert (ctype, c), 0))
5368 && ! TREE_OVERFLOW (t1))
5369 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5371 /* If these operations "cancel" each other, we have the main
5372 optimizations of this pass, which occur when either constant is a
5373 multiple of the other, in which case we replace this with either an
5374 operation or CODE or TCODE.
5376 If we have an unsigned type that is not a sizetype, we cannot do
5377 this since it will change the result if the original computation
5378 overflowed. */
5379 if ((! TYPE_UNSIGNED (ctype)
5380 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5381 && ! flag_wrapv
5382 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5383 || (tcode == MULT_EXPR
5384 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5385 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5387 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5388 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5389 fold_convert (ctype,
5390 const_binop (TRUNC_DIV_EXPR,
5391 op1, c, 0)));
5392 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5393 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5394 fold_convert (ctype,
5395 const_binop (TRUNC_DIV_EXPR,
5396 c, op1, 0)));
5398 break;
5400 default:
5401 break;
5404 return 0;
5407 /* Return a node which has the indicated constant VALUE (either 0 or
5408 1), and is of the indicated TYPE. */
5410 tree
5411 constant_boolean_node (int value, tree type)
5413 if (type == integer_type_node)
5414 return value ? integer_one_node : integer_zero_node;
5415 else if (type == boolean_type_node)
5416 return value ? boolean_true_node : boolean_false_node;
5417 else
5418 return build_int_cst (type, value);
5422 /* Return true if expr looks like an ARRAY_REF and set base and
5423 offset to the appropriate trees. If there is no offset,
5424 offset is set to NULL_TREE. */
5426 static bool
5427 extract_array_ref (tree expr, tree *base, tree *offset)
5429 /* We have to be careful with stripping nops as with the
5430 base type the meaning of the offset can change. */
5431 tree inner_expr = expr;
5432 STRIP_NOPS (inner_expr);
5433 /* One canonical form is a PLUS_EXPR with the first
5434 argument being an ADDR_EXPR with a possible NOP_EXPR
5435 attached. */
5436 if (TREE_CODE (expr) == PLUS_EXPR)
5438 tree op0 = TREE_OPERAND (expr, 0);
5439 STRIP_NOPS (op0);
5440 if (TREE_CODE (op0) == ADDR_EXPR)
5442 *base = TREE_OPERAND (expr, 0);
5443 *offset = TREE_OPERAND (expr, 1);
5444 return true;
5447 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5448 which we transform into an ADDR_EXPR with appropriate
5449 offset. For other arguments to the ADDR_EXPR we assume
5450 zero offset and as such do not care about the ADDR_EXPR
5451 type and strip possible nops from it. */
5452 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5454 tree op0 = TREE_OPERAND (inner_expr, 0);
5455 if (TREE_CODE (op0) == ARRAY_REF)
5457 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5458 *offset = TREE_OPERAND (op0, 1);
5460 else
5462 *base = inner_expr;
5463 *offset = NULL_TREE;
5465 return true;
5468 return false;
5472 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5473 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5474 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5475 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5476 COND is the first argument to CODE; otherwise (as in the example
5477 given here), it is the second argument. TYPE is the type of the
5478 original expression. Return NULL_TREE if no simplification is
5479 possible. */
5481 static tree
5482 fold_binary_op_with_conditional_arg (enum tree_code code,
5483 tree type, tree op0, tree op1,
5484 tree cond, tree arg, int cond_first_p)
5486 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5487 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5488 tree test, true_value, false_value;
5489 tree lhs = NULL_TREE;
5490 tree rhs = NULL_TREE;
5492 /* This transformation is only worthwhile if we don't have to wrap
5493 arg in a SAVE_EXPR, and the operation can be simplified on at least
5494 one of the branches once its pushed inside the COND_EXPR. */
5495 if (!TREE_CONSTANT (arg))
5496 return NULL_TREE;
5498 if (TREE_CODE (cond) == COND_EXPR)
5500 test = TREE_OPERAND (cond, 0);
5501 true_value = TREE_OPERAND (cond, 1);
5502 false_value = TREE_OPERAND (cond, 2);
5503 /* If this operand throws an expression, then it does not make
5504 sense to try to perform a logical or arithmetic operation
5505 involving it. */
5506 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5507 lhs = true_value;
5508 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5509 rhs = false_value;
5511 else
5513 tree testtype = TREE_TYPE (cond);
5514 test = cond;
5515 true_value = constant_boolean_node (true, testtype);
5516 false_value = constant_boolean_node (false, testtype);
5519 arg = fold_convert (arg_type, arg);
5520 if (lhs == 0)
5522 true_value = fold_convert (cond_type, true_value);
5523 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5524 : build2 (code, type, arg, true_value));
5526 if (rhs == 0)
5528 false_value = fold_convert (cond_type, false_value);
5529 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5530 : build2 (code, type, arg, false_value));
5533 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5534 return fold_convert (type, test);
5538 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5540 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5541 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5542 ADDEND is the same as X.
5544 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5545 and finite. The problematic cases are when X is zero, and its mode
5546 has signed zeros. In the case of rounding towards -infinity,
5547 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5548 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5550 static bool
5551 fold_real_zero_addition_p (tree type, tree addend, int negate)
5553 if (!real_zerop (addend))
5554 return false;
5556 /* Don't allow the fold with -fsignaling-nans. */
5557 if (HONOR_SNANS (TYPE_MODE (type)))
5558 return false;
5560 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5561 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5562 return true;
5564 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5565 if (TREE_CODE (addend) == REAL_CST
5566 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5567 negate = !negate;
5569 /* The mode has signed zeros, and we have to honor their sign.
5570 In this situation, there is only one case we can return true for.
5571 X - 0 is the same as X unless rounding towards -infinity is
5572 supported. */
5573 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5576 /* Subroutine of fold() that checks comparisons of built-in math
5577 functions against real constants.
5579 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5580 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5581 is the type of the result and ARG0 and ARG1 are the operands of the
5582 comparison. ARG1 must be a TREE_REAL_CST.
5584 The function returns the constant folded tree if a simplification
5585 can be made, and NULL_TREE otherwise. */
5587 static tree
5588 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5589 tree type, tree arg0, tree arg1)
5591 REAL_VALUE_TYPE c;
5593 if (BUILTIN_SQRT_P (fcode))
5595 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5596 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5598 c = TREE_REAL_CST (arg1);
5599 if (REAL_VALUE_NEGATIVE (c))
5601 /* sqrt(x) < y is always false, if y is negative. */
5602 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5603 return omit_one_operand (type, integer_zero_node, arg);
5605 /* sqrt(x) > y is always true, if y is negative and we
5606 don't care about NaNs, i.e. negative values of x. */
5607 if (code == NE_EXPR || !HONOR_NANS (mode))
5608 return omit_one_operand (type, integer_one_node, arg);
5610 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5611 return fold_build2 (GE_EXPR, type, arg,
5612 build_real (TREE_TYPE (arg), dconst0));
5614 else if (code == GT_EXPR || code == GE_EXPR)
5616 REAL_VALUE_TYPE c2;
5618 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5619 real_convert (&c2, mode, &c2);
5621 if (REAL_VALUE_ISINF (c2))
5623 /* sqrt(x) > y is x == +Inf, when y is very large. */
5624 if (HONOR_INFINITIES (mode))
5625 return fold_build2 (EQ_EXPR, type, arg,
5626 build_real (TREE_TYPE (arg), c2));
5628 /* sqrt(x) > y is always false, when y is very large
5629 and we don't care about infinities. */
5630 return omit_one_operand (type, integer_zero_node, arg);
5633 /* sqrt(x) > c is the same as x > c*c. */
5634 return fold_build2 (code, type, arg,
5635 build_real (TREE_TYPE (arg), c2));
5637 else if (code == LT_EXPR || code == LE_EXPR)
5639 REAL_VALUE_TYPE c2;
5641 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5642 real_convert (&c2, mode, &c2);
5644 if (REAL_VALUE_ISINF (c2))
5646 /* sqrt(x) < y is always true, when y is a very large
5647 value and we don't care about NaNs or Infinities. */
5648 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5649 return omit_one_operand (type, integer_one_node, arg);
5651 /* sqrt(x) < y is x != +Inf when y is very large and we
5652 don't care about NaNs. */
5653 if (! HONOR_NANS (mode))
5654 return fold_build2 (NE_EXPR, type, arg,
5655 build_real (TREE_TYPE (arg), c2));
5657 /* sqrt(x) < y is x >= 0 when y is very large and we
5658 don't care about Infinities. */
5659 if (! HONOR_INFINITIES (mode))
5660 return fold_build2 (GE_EXPR, type, arg,
5661 build_real (TREE_TYPE (arg), dconst0));
5663 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5664 if (lang_hooks.decls.global_bindings_p () != 0
5665 || CONTAINS_PLACEHOLDER_P (arg))
5666 return NULL_TREE;
5668 arg = save_expr (arg);
5669 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5670 fold_build2 (GE_EXPR, type, arg,
5671 build_real (TREE_TYPE (arg),
5672 dconst0)),
5673 fold_build2 (NE_EXPR, type, arg,
5674 build_real (TREE_TYPE (arg),
5675 c2)));
5678 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5679 if (! HONOR_NANS (mode))
5680 return fold_build2 (code, type, arg,
5681 build_real (TREE_TYPE (arg), c2));
5683 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5684 if (lang_hooks.decls.global_bindings_p () == 0
5685 && ! CONTAINS_PLACEHOLDER_P (arg))
5687 arg = save_expr (arg);
5688 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5689 fold_build2 (GE_EXPR, type, arg,
5690 build_real (TREE_TYPE (arg),
5691 dconst0)),
5692 fold_build2 (code, type, arg,
5693 build_real (TREE_TYPE (arg),
5694 c2)));
5699 return NULL_TREE;
5702 /* Subroutine of fold() that optimizes comparisons against Infinities,
5703 either +Inf or -Inf.
5705 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5706 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5707 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5709 The function returns the constant folded tree if a simplification
5710 can be made, and NULL_TREE otherwise. */
5712 static tree
5713 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5715 enum machine_mode mode;
5716 REAL_VALUE_TYPE max;
5717 tree temp;
5718 bool neg;
5720 mode = TYPE_MODE (TREE_TYPE (arg0));
5722 /* For negative infinity swap the sense of the comparison. */
5723 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5724 if (neg)
5725 code = swap_tree_comparison (code);
5727 switch (code)
5729 case GT_EXPR:
5730 /* x > +Inf is always false, if with ignore sNANs. */
5731 if (HONOR_SNANS (mode))
5732 return NULL_TREE;
5733 return omit_one_operand (type, integer_zero_node, arg0);
5735 case LE_EXPR:
5736 /* x <= +Inf is always true, if we don't case about NaNs. */
5737 if (! HONOR_NANS (mode))
5738 return omit_one_operand (type, integer_one_node, arg0);
5740 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5741 if (lang_hooks.decls.global_bindings_p () == 0
5742 && ! CONTAINS_PLACEHOLDER_P (arg0))
5744 arg0 = save_expr (arg0);
5745 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5747 break;
5749 case EQ_EXPR:
5750 case GE_EXPR:
5751 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5752 real_maxval (&max, neg, mode);
5753 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5754 arg0, build_real (TREE_TYPE (arg0), max));
5756 case LT_EXPR:
5757 /* x < +Inf is always equal to x <= DBL_MAX. */
5758 real_maxval (&max, neg, mode);
5759 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5760 arg0, build_real (TREE_TYPE (arg0), max));
5762 case NE_EXPR:
5763 /* x != +Inf is always equal to !(x > DBL_MAX). */
5764 real_maxval (&max, neg, mode);
5765 if (! HONOR_NANS (mode))
5766 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5767 arg0, build_real (TREE_TYPE (arg0), max));
5769 /* The transformation below creates non-gimple code and thus is
5770 not appropriate if we are in gimple form. */
5771 if (in_gimple_form)
5772 return NULL_TREE;
5774 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5775 arg0, build_real (TREE_TYPE (arg0), max));
5776 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5778 default:
5779 break;
5782 return NULL_TREE;
5785 /* Subroutine of fold() that optimizes comparisons of a division by
5786 a nonzero integer constant against an integer constant, i.e.
5787 X/C1 op C2.
5789 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5790 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5791 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5793 The function returns the constant folded tree if a simplification
5794 can be made, and NULL_TREE otherwise. */
5796 static tree
5797 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5799 tree prod, tmp, hi, lo;
5800 tree arg00 = TREE_OPERAND (arg0, 0);
5801 tree arg01 = TREE_OPERAND (arg0, 1);
5802 unsigned HOST_WIDE_INT lpart;
5803 HOST_WIDE_INT hpart;
5804 int overflow;
5806 /* We have to do this the hard way to detect unsigned overflow.
5807 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5808 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5809 TREE_INT_CST_HIGH (arg01),
5810 TREE_INT_CST_LOW (arg1),
5811 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5812 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5813 prod = force_fit_type (prod, -1, overflow, false);
5815 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5817 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5818 lo = prod;
5820 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5821 overflow = add_double (TREE_INT_CST_LOW (prod),
5822 TREE_INT_CST_HIGH (prod),
5823 TREE_INT_CST_LOW (tmp),
5824 TREE_INT_CST_HIGH (tmp),
5825 &lpart, &hpart);
5826 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5827 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5828 TREE_CONSTANT_OVERFLOW (prod));
5830 else if (tree_int_cst_sgn (arg01) >= 0)
5832 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5833 switch (tree_int_cst_sgn (arg1))
5835 case -1:
5836 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5837 hi = prod;
5838 break;
5840 case 0:
5841 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5842 hi = tmp;
5843 break;
5845 case 1:
5846 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5847 lo = prod;
5848 break;
5850 default:
5851 gcc_unreachable ();
5854 else
5856 /* A negative divisor reverses the relational operators. */
5857 code = swap_tree_comparison (code);
5859 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5860 switch (tree_int_cst_sgn (arg1))
5862 case -1:
5863 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5864 lo = prod;
5865 break;
5867 case 0:
5868 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5869 lo = tmp;
5870 break;
5872 case 1:
5873 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5874 hi = prod;
5875 break;
5877 default:
5878 gcc_unreachable ();
5882 switch (code)
5884 case EQ_EXPR:
5885 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5886 return omit_one_operand (type, integer_zero_node, arg00);
5887 if (TREE_OVERFLOW (hi))
5888 return fold_build2 (GE_EXPR, type, arg00, lo);
5889 if (TREE_OVERFLOW (lo))
5890 return fold_build2 (LE_EXPR, type, arg00, hi);
5891 return build_range_check (type, arg00, 1, lo, hi);
5893 case NE_EXPR:
5894 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5895 return omit_one_operand (type, integer_one_node, arg00);
5896 if (TREE_OVERFLOW (hi))
5897 return fold_build2 (LT_EXPR, type, arg00, lo);
5898 if (TREE_OVERFLOW (lo))
5899 return fold_build2 (GT_EXPR, type, arg00, hi);
5900 return build_range_check (type, arg00, 0, lo, hi);
5902 case LT_EXPR:
5903 if (TREE_OVERFLOW (lo))
5904 return omit_one_operand (type, integer_zero_node, arg00);
5905 return fold_build2 (LT_EXPR, type, arg00, lo);
5907 case LE_EXPR:
5908 if (TREE_OVERFLOW (hi))
5909 return omit_one_operand (type, integer_one_node, arg00);
5910 return fold_build2 (LE_EXPR, type, arg00, hi);
5912 case GT_EXPR:
5913 if (TREE_OVERFLOW (hi))
5914 return omit_one_operand (type, integer_zero_node, arg00);
5915 return fold_build2 (GT_EXPR, type, arg00, hi);
5917 case GE_EXPR:
5918 if (TREE_OVERFLOW (lo))
5919 return omit_one_operand (type, integer_one_node, arg00);
5920 return fold_build2 (GE_EXPR, type, arg00, lo);
5922 default:
5923 break;
5926 return NULL_TREE;
5930 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5931 equality/inequality test, then return a simplified form of
5932 the test using shifts and logical operations. Otherwise return
5933 NULL. TYPE is the desired result type. */
5935 tree
5936 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5937 tree result_type)
5939 /* If this is testing a single bit, we can optimize the test. */
5940 if ((code == NE_EXPR || code == EQ_EXPR)
5941 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5942 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5944 tree inner = TREE_OPERAND (arg0, 0);
5945 tree type = TREE_TYPE (arg0);
5946 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5947 enum machine_mode operand_mode = TYPE_MODE (type);
5948 int ops_unsigned;
5949 tree signed_type, unsigned_type, intermediate_type;
5950 tree arg00;
5952 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5953 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5954 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5955 if (arg00 != NULL_TREE
5956 /* This is only a win if casting to a signed type is cheap,
5957 i.e. when arg00's type is not a partial mode. */
5958 && TYPE_PRECISION (TREE_TYPE (arg00))
5959 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5961 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5962 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5963 result_type, fold_convert (stype, arg00),
5964 fold_convert (stype, integer_zero_node));
5967 /* Otherwise we have (A & C) != 0 where C is a single bit,
5968 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5969 Similarly for (A & C) == 0. */
5971 /* If INNER is a right shift of a constant and it plus BITNUM does
5972 not overflow, adjust BITNUM and INNER. */
5973 if (TREE_CODE (inner) == RSHIFT_EXPR
5974 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5975 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5976 && bitnum < TYPE_PRECISION (type)
5977 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5978 bitnum - TYPE_PRECISION (type)))
5980 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5981 inner = TREE_OPERAND (inner, 0);
5984 /* If we are going to be able to omit the AND below, we must do our
5985 operations as unsigned. If we must use the AND, we have a choice.
5986 Normally unsigned is faster, but for some machines signed is. */
5987 #ifdef LOAD_EXTEND_OP
5988 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5989 && !flag_syntax_only) ? 0 : 1;
5990 #else
5991 ops_unsigned = 1;
5992 #endif
5994 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5995 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5996 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5997 inner = fold_convert (intermediate_type, inner);
5999 if (bitnum != 0)
6000 inner = build2 (RSHIFT_EXPR, intermediate_type,
6001 inner, size_int (bitnum));
6003 if (code == EQ_EXPR)
6004 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6005 inner, integer_one_node);
6007 /* Put the AND last so it can combine with more things. */
6008 inner = build2 (BIT_AND_EXPR, intermediate_type,
6009 inner, integer_one_node);
6011 /* Make sure to return the proper type. */
6012 inner = fold_convert (result_type, inner);
6014 return inner;
6016 return NULL_TREE;
6019 /* Check whether we are allowed to reorder operands arg0 and arg1,
6020 such that the evaluation of arg1 occurs before arg0. */
6022 static bool
6023 reorder_operands_p (tree arg0, tree arg1)
6025 if (! flag_evaluation_order)
6026 return true;
6027 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6028 return true;
6029 return ! TREE_SIDE_EFFECTS (arg0)
6030 && ! TREE_SIDE_EFFECTS (arg1);
6033 /* Test whether it is preferable two swap two operands, ARG0 and
6034 ARG1, for example because ARG0 is an integer constant and ARG1
6035 isn't. If REORDER is true, only recommend swapping if we can
6036 evaluate the operands in reverse order. */
6038 bool
6039 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6041 STRIP_SIGN_NOPS (arg0);
6042 STRIP_SIGN_NOPS (arg1);
6044 if (TREE_CODE (arg1) == INTEGER_CST)
6045 return 0;
6046 if (TREE_CODE (arg0) == INTEGER_CST)
6047 return 1;
6049 if (TREE_CODE (arg1) == REAL_CST)
6050 return 0;
6051 if (TREE_CODE (arg0) == REAL_CST)
6052 return 1;
6054 if (TREE_CODE (arg1) == COMPLEX_CST)
6055 return 0;
6056 if (TREE_CODE (arg0) == COMPLEX_CST)
6057 return 1;
6059 if (TREE_CONSTANT (arg1))
6060 return 0;
6061 if (TREE_CONSTANT (arg0))
6062 return 1;
6064 if (optimize_size)
6065 return 0;
6067 if (reorder && flag_evaluation_order
6068 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6069 return 0;
6071 if (DECL_P (arg1))
6072 return 0;
6073 if (DECL_P (arg0))
6074 return 1;
6076 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6077 for commutative and comparison operators. Ensuring a canonical
6078 form allows the optimizers to find additional redundancies without
6079 having to explicitly check for both orderings. */
6080 if (TREE_CODE (arg0) == SSA_NAME
6081 && TREE_CODE (arg1) == SSA_NAME
6082 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6083 return 1;
6085 return 0;
6088 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6089 ARG0 is extended to a wider type. */
6091 static tree
6092 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6094 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6095 tree arg1_unw;
6096 tree shorter_type, outer_type;
6097 tree min, max;
6098 bool above, below;
6100 if (arg0_unw == arg0)
6101 return NULL_TREE;
6102 shorter_type = TREE_TYPE (arg0_unw);
6104 #ifdef HAVE_canonicalize_funcptr_for_compare
6105 /* Disable this optimization if we're casting a function pointer
6106 type on targets that require function pointer canonicalization. */
6107 if (HAVE_canonicalize_funcptr_for_compare
6108 && TREE_CODE (shorter_type) == POINTER_TYPE
6109 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6110 return NULL_TREE;
6111 #endif
6113 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6114 return NULL_TREE;
6116 arg1_unw = get_unwidened (arg1, shorter_type);
6117 if (!arg1_unw)
6118 return NULL_TREE;
6120 /* If possible, express the comparison in the shorter mode. */
6121 if ((code == EQ_EXPR || code == NE_EXPR
6122 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6123 && (TREE_TYPE (arg1_unw) == shorter_type
6124 || (TREE_CODE (arg1_unw) == INTEGER_CST
6125 && TREE_CODE (shorter_type) == INTEGER_TYPE
6126 && int_fits_type_p (arg1_unw, shorter_type))))
6127 return fold_build2 (code, type, arg0_unw,
6128 fold_convert (shorter_type, arg1_unw));
6130 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6131 return NULL_TREE;
6133 /* If we are comparing with the integer that does not fit into the range
6134 of the shorter type, the result is known. */
6135 outer_type = TREE_TYPE (arg1_unw);
6136 min = lower_bound_in_type (outer_type, shorter_type);
6137 max = upper_bound_in_type (outer_type, shorter_type);
6139 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6140 max, arg1_unw));
6141 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6142 arg1_unw, min));
6144 switch (code)
6146 case EQ_EXPR:
6147 if (above || below)
6148 return omit_one_operand (type, integer_zero_node, arg0);
6149 break;
6151 case NE_EXPR:
6152 if (above || below)
6153 return omit_one_operand (type, integer_one_node, arg0);
6154 break;
6156 case LT_EXPR:
6157 case LE_EXPR:
6158 if (above)
6159 return omit_one_operand (type, integer_one_node, arg0);
6160 else if (below)
6161 return omit_one_operand (type, integer_zero_node, arg0);
6163 case GT_EXPR:
6164 case GE_EXPR:
6165 if (above)
6166 return omit_one_operand (type, integer_zero_node, arg0);
6167 else if (below)
6168 return omit_one_operand (type, integer_one_node, arg0);
6170 default:
6171 break;
6174 return NULL_TREE;
6177 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6178 ARG0 just the signedness is changed. */
6180 static tree
6181 fold_sign_changed_comparison (enum tree_code code, tree type,
6182 tree arg0, tree arg1)
6184 tree arg0_inner, tmp;
6185 tree inner_type, outer_type;
6187 if (TREE_CODE (arg0) != NOP_EXPR
6188 && TREE_CODE (arg0) != CONVERT_EXPR)
6189 return NULL_TREE;
6191 outer_type = TREE_TYPE (arg0);
6192 arg0_inner = TREE_OPERAND (arg0, 0);
6193 inner_type = TREE_TYPE (arg0_inner);
6195 #ifdef HAVE_canonicalize_funcptr_for_compare
6196 /* Disable this optimization if we're casting a function pointer
6197 type on targets that require function pointer canonicalization. */
6198 if (HAVE_canonicalize_funcptr_for_compare
6199 && TREE_CODE (inner_type) == POINTER_TYPE
6200 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6201 return NULL_TREE;
6202 #endif
6204 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6205 return NULL_TREE;
6207 if (TREE_CODE (arg1) != INTEGER_CST
6208 && !((TREE_CODE (arg1) == NOP_EXPR
6209 || TREE_CODE (arg1) == CONVERT_EXPR)
6210 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6211 return NULL_TREE;
6213 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6214 && code != NE_EXPR
6215 && code != EQ_EXPR)
6216 return NULL_TREE;
6218 if (TREE_CODE (arg1) == INTEGER_CST)
6220 tmp = build_int_cst_wide (inner_type,
6221 TREE_INT_CST_LOW (arg1),
6222 TREE_INT_CST_HIGH (arg1));
6223 arg1 = force_fit_type (tmp, 0,
6224 TREE_OVERFLOW (arg1),
6225 TREE_CONSTANT_OVERFLOW (arg1));
6227 else
6228 arg1 = fold_convert (inner_type, arg1);
6230 return fold_build2 (code, type, arg0_inner, arg1);
6233 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6234 step of the array. ADDR is the address. MULT is the multiplicative expression.
6235 If the function succeeds, the new address expression is returned. Otherwise
6236 NULL_TREE is returned. */
6238 static tree
6239 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6241 tree s, delta, step;
6242 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6243 tree ref = TREE_OPERAND (addr, 0), pref;
6244 tree ret, pos;
6245 tree itype;
6247 STRIP_NOPS (arg0);
6248 STRIP_NOPS (arg1);
6250 if (TREE_CODE (arg0) == INTEGER_CST)
6252 s = arg0;
6253 delta = arg1;
6255 else if (TREE_CODE (arg1) == INTEGER_CST)
6257 s = arg1;
6258 delta = arg0;
6260 else
6261 return NULL_TREE;
6263 for (;; ref = TREE_OPERAND (ref, 0))
6265 if (TREE_CODE (ref) == ARRAY_REF)
6267 step = array_ref_element_size (ref);
6269 if (TREE_CODE (step) != INTEGER_CST)
6270 continue;
6272 itype = TREE_TYPE (step);
6274 /* If the type sizes do not match, we might run into problems
6275 when one of them would overflow. */
6276 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6277 continue;
6279 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6280 continue;
6282 delta = fold_convert (itype, delta);
6283 break;
6286 if (!handled_component_p (ref))
6287 return NULL_TREE;
6290 /* We found the suitable array reference. So copy everything up to it,
6291 and replace the index. */
6293 pref = TREE_OPERAND (addr, 0);
6294 ret = copy_node (pref);
6295 pos = ret;
6297 while (pref != ref)
6299 pref = TREE_OPERAND (pref, 0);
6300 TREE_OPERAND (pos, 0) = copy_node (pref);
6301 pos = TREE_OPERAND (pos, 0);
6304 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6305 TREE_OPERAND (pos, 1),
6306 delta);
6308 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6312 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6313 means A >= Y && A != MAX, but in this case we know that
6314 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6316 static tree
6317 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6319 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6321 if (TREE_CODE (bound) == LT_EXPR)
6322 a = TREE_OPERAND (bound, 0);
6323 else if (TREE_CODE (bound) == GT_EXPR)
6324 a = TREE_OPERAND (bound, 1);
6325 else
6326 return NULL_TREE;
6328 typea = TREE_TYPE (a);
6329 if (!INTEGRAL_TYPE_P (typea)
6330 && !POINTER_TYPE_P (typea))
6331 return NULL_TREE;
6333 if (TREE_CODE (ineq) == LT_EXPR)
6335 a1 = TREE_OPERAND (ineq, 1);
6336 y = TREE_OPERAND (ineq, 0);
6338 else if (TREE_CODE (ineq) == GT_EXPR)
6340 a1 = TREE_OPERAND (ineq, 0);
6341 y = TREE_OPERAND (ineq, 1);
6343 else
6344 return NULL_TREE;
6346 if (TREE_TYPE (a1) != typea)
6347 return NULL_TREE;
6349 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6350 if (!integer_onep (diff))
6351 return NULL_TREE;
6353 return fold_build2 (GE_EXPR, type, a, y);
6356 /* Fold complex addition when both components are accessible by parts.
6357 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6358 or MINUS_EXPR for subtraction. */
6360 static tree
6361 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6363 tree ar, ai, br, bi, rr, ri, inner_type;
6365 if (TREE_CODE (ac) == COMPLEX_EXPR)
6366 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6367 else if (TREE_CODE (ac) == COMPLEX_CST)
6368 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6369 else
6370 return NULL;
6372 if (TREE_CODE (bc) == COMPLEX_EXPR)
6373 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6374 else if (TREE_CODE (bc) == COMPLEX_CST)
6375 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6376 else
6377 return NULL;
6379 inner_type = TREE_TYPE (type);
6381 rr = fold_build2 (code, inner_type, ar, br);
6382 ri = fold_build2 (code, inner_type, ai, bi);
6384 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6387 /* Perform some simplifications of complex multiplication when one or more
6388 of the components are constants or zeros. Return non-null if successful. */
6390 tree
6391 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6393 tree rr, ri, inner_type, zero;
6394 bool ar0, ai0, br0, bi0, bi1;
6396 inner_type = TREE_TYPE (type);
6397 zero = NULL;
6399 if (SCALAR_FLOAT_TYPE_P (inner_type))
6401 ar0 = ai0 = br0 = bi0 = bi1 = false;
6403 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6405 if (TREE_CODE (ar) == REAL_CST
6406 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6407 ar0 = true, zero = ar;
6409 if (TREE_CODE (ai) == REAL_CST
6410 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6411 ai0 = true, zero = ai;
6413 if (TREE_CODE (br) == REAL_CST
6414 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6415 br0 = true, zero = br;
6417 if (TREE_CODE (bi) == REAL_CST)
6419 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6420 bi0 = true, zero = bi;
6421 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6422 bi1 = true;
6425 else
6427 ar0 = integer_zerop (ar);
6428 if (ar0)
6429 zero = ar;
6430 ai0 = integer_zerop (ai);
6431 if (ai0)
6432 zero = ai;
6433 br0 = integer_zerop (br);
6434 if (br0)
6435 zero = br;
6436 bi0 = integer_zerop (bi);
6437 if (bi0)
6439 zero = bi;
6440 bi1 = false;
6442 else
6443 bi1 = integer_onep (bi);
6446 /* We won't optimize anything below unless something is zero. */
6447 if (zero == NULL)
6448 return NULL;
6450 if (ai0 && br0 && bi1)
6452 rr = zero;
6453 ri = ar;
6455 else if (ai0 && bi0)
6457 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6458 ri = zero;
6460 else if (ai0 && br0)
6462 rr = zero;
6463 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6465 else if (ar0 && bi0)
6467 rr = zero;
6468 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6470 else if (ar0 && br0)
6472 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6473 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6474 ri = zero;
6476 else if (bi0)
6478 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6479 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6481 else if (ai0)
6483 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6484 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6486 else if (br0)
6488 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6489 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6490 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6492 else if (ar0)
6494 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6495 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6496 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6498 else
6499 return NULL;
6501 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6504 static tree
6505 fold_complex_mult (tree type, tree ac, tree bc)
6507 tree ar, ai, br, bi;
6509 if (TREE_CODE (ac) == COMPLEX_EXPR)
6510 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6511 else if (TREE_CODE (ac) == COMPLEX_CST)
6512 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6513 else
6514 return NULL;
6516 if (TREE_CODE (bc) == COMPLEX_EXPR)
6517 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6518 else if (TREE_CODE (bc) == COMPLEX_CST)
6519 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6520 else
6521 return NULL;
6523 return fold_complex_mult_parts (type, ar, ai, br, bi);
6526 /* Perform some simplifications of complex division when one or more of
6527 the components are constants or zeros. Return non-null if successful. */
6529 tree
6530 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6531 enum tree_code code)
6533 tree rr, ri, inner_type, zero;
6534 bool ar0, ai0, br0, bi0, bi1;
6536 inner_type = TREE_TYPE (type);
6537 zero = NULL;
6539 if (SCALAR_FLOAT_TYPE_P (inner_type))
6541 ar0 = ai0 = br0 = bi0 = bi1 = false;
6543 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6545 if (TREE_CODE (ar) == REAL_CST
6546 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6547 ar0 = true, zero = ar;
6549 if (TREE_CODE (ai) == REAL_CST
6550 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6551 ai0 = true, zero = ai;
6553 if (TREE_CODE (br) == REAL_CST
6554 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6555 br0 = true, zero = br;
6557 if (TREE_CODE (bi) == REAL_CST)
6559 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6560 bi0 = true, zero = bi;
6561 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6562 bi1 = true;
6565 else
6567 ar0 = integer_zerop (ar);
6568 if (ar0)
6569 zero = ar;
6570 ai0 = integer_zerop (ai);
6571 if (ai0)
6572 zero = ai;
6573 br0 = integer_zerop (br);
6574 if (br0)
6575 zero = br;
6576 bi0 = integer_zerop (bi);
6577 if (bi0)
6579 zero = bi;
6580 bi1 = false;
6582 else
6583 bi1 = integer_onep (bi);
6586 /* We won't optimize anything below unless something is zero. */
6587 if (zero == NULL)
6588 return NULL;
6590 if (ai0 && bi0)
6592 rr = fold_build2 (code, inner_type, ar, br);
6593 ri = zero;
6595 else if (ai0 && br0)
6597 rr = zero;
6598 ri = fold_build2 (code, inner_type, ar, bi);
6599 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6601 else if (ar0 && bi0)
6603 rr = zero;
6604 ri = fold_build2 (code, inner_type, ai, br);
6606 else if (ar0 && br0)
6608 rr = fold_build2 (code, inner_type, ai, bi);
6609 ri = zero;
6611 else if (bi0)
6613 rr = fold_build2 (code, inner_type, ar, br);
6614 ri = fold_build2 (code, inner_type, ai, br);
6616 else if (br0)
6618 rr = fold_build2 (code, inner_type, ai, bi);
6619 ri = fold_build2 (code, inner_type, ar, bi);
6620 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6622 else
6623 return NULL;
6625 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6628 static tree
6629 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6631 tree ar, ai, br, bi;
6633 if (TREE_CODE (ac) == COMPLEX_EXPR)
6634 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6635 else if (TREE_CODE (ac) == COMPLEX_CST)
6636 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6637 else
6638 return NULL;
6640 if (TREE_CODE (bc) == COMPLEX_EXPR)
6641 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6642 else if (TREE_CODE (bc) == COMPLEX_CST)
6643 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6644 else
6645 return NULL;
6647 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6650 /* Fold a unary expression of code CODE and type TYPE with operand
6651 OP0. Return the folded expression if folding is successful.
6652 Otherwise, return NULL_TREE. */
6654 static tree
6655 fold_unary (enum tree_code code, tree type, tree op0)
6657 tree tem;
6658 tree arg0;
6659 enum tree_code_class kind = TREE_CODE_CLASS (code);
6661 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6662 && TREE_CODE_LENGTH (code) == 1);
6664 arg0 = op0;
6665 if (arg0)
6667 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6669 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6670 STRIP_SIGN_NOPS (arg0);
6672 else
6674 /* Strip any conversions that don't change the mode. This
6675 is safe for every expression, except for a comparison
6676 expression because its signedness is derived from its
6677 operands.
6679 Note that this is done as an internal manipulation within
6680 the constant folder, in order to find the simplest
6681 representation of the arguments so that their form can be
6682 studied. In any cases, the appropriate type conversions
6683 should be put back in the tree that will get out of the
6684 constant folder. */
6685 STRIP_NOPS (arg0);
6689 if (TREE_CODE_CLASS (code) == tcc_unary)
6691 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6692 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6693 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6694 else if (TREE_CODE (arg0) == COND_EXPR)
6696 tree arg01 = TREE_OPERAND (arg0, 1);
6697 tree arg02 = TREE_OPERAND (arg0, 2);
6698 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6699 arg01 = fold_build1 (code, type, arg01);
6700 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6701 arg02 = fold_build1 (code, type, arg02);
6702 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6703 arg01, arg02);
6705 /* If this was a conversion, and all we did was to move into
6706 inside the COND_EXPR, bring it back out. But leave it if
6707 it is a conversion from integer to integer and the
6708 result precision is no wider than a word since such a
6709 conversion is cheap and may be optimized away by combine,
6710 while it couldn't if it were outside the COND_EXPR. Then return
6711 so we don't get into an infinite recursion loop taking the
6712 conversion out and then back in. */
6714 if ((code == NOP_EXPR || code == CONVERT_EXPR
6715 || code == NON_LVALUE_EXPR)
6716 && TREE_CODE (tem) == COND_EXPR
6717 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6718 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6719 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6720 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6721 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6722 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6723 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6724 && (INTEGRAL_TYPE_P
6725 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6726 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6727 || flag_syntax_only))
6728 tem = build1 (code, type,
6729 build3 (COND_EXPR,
6730 TREE_TYPE (TREE_OPERAND
6731 (TREE_OPERAND (tem, 1), 0)),
6732 TREE_OPERAND (tem, 0),
6733 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6734 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6735 return tem;
6737 else if (COMPARISON_CLASS_P (arg0))
6739 if (TREE_CODE (type) == BOOLEAN_TYPE)
6741 arg0 = copy_node (arg0);
6742 TREE_TYPE (arg0) = type;
6743 return arg0;
6745 else if (TREE_CODE (type) != INTEGER_TYPE)
6746 return fold_build3 (COND_EXPR, type, arg0,
6747 fold_build1 (code, type,
6748 integer_one_node),
6749 fold_build1 (code, type,
6750 integer_zero_node));
6754 switch (code)
6756 case NOP_EXPR:
6757 case FLOAT_EXPR:
6758 case CONVERT_EXPR:
6759 case FIX_TRUNC_EXPR:
6760 case FIX_CEIL_EXPR:
6761 case FIX_FLOOR_EXPR:
6762 case FIX_ROUND_EXPR:
6763 if (TREE_TYPE (op0) == type)
6764 return op0;
6766 /* Handle cases of two conversions in a row. */
6767 if (TREE_CODE (op0) == NOP_EXPR
6768 || TREE_CODE (op0) == CONVERT_EXPR)
6770 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6771 tree inter_type = TREE_TYPE (op0);
6772 int inside_int = INTEGRAL_TYPE_P (inside_type);
6773 int inside_ptr = POINTER_TYPE_P (inside_type);
6774 int inside_float = FLOAT_TYPE_P (inside_type);
6775 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6776 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6777 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6778 int inter_int = INTEGRAL_TYPE_P (inter_type);
6779 int inter_ptr = POINTER_TYPE_P (inter_type);
6780 int inter_float = FLOAT_TYPE_P (inter_type);
6781 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6782 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6783 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6784 int final_int = INTEGRAL_TYPE_P (type);
6785 int final_ptr = POINTER_TYPE_P (type);
6786 int final_float = FLOAT_TYPE_P (type);
6787 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6788 unsigned int final_prec = TYPE_PRECISION (type);
6789 int final_unsignedp = TYPE_UNSIGNED (type);
6791 /* In addition to the cases of two conversions in a row
6792 handled below, if we are converting something to its own
6793 type via an object of identical or wider precision, neither
6794 conversion is needed. */
6795 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6796 && ((inter_int && final_int) || (inter_float && final_float))
6797 && inter_prec >= final_prec)
6798 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6800 /* Likewise, if the intermediate and final types are either both
6801 float or both integer, we don't need the middle conversion if
6802 it is wider than the final type and doesn't change the signedness
6803 (for integers). Avoid this if the final type is a pointer
6804 since then we sometimes need the inner conversion. Likewise if
6805 the outer has a precision not equal to the size of its mode. */
6806 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6807 || (inter_float && inside_float)
6808 || (inter_vec && inside_vec))
6809 && inter_prec >= inside_prec
6810 && (inter_float || inter_vec
6811 || inter_unsignedp == inside_unsignedp)
6812 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6813 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6814 && ! final_ptr
6815 && (! final_vec || inter_prec == inside_prec))
6816 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6818 /* If we have a sign-extension of a zero-extended value, we can
6819 replace that by a single zero-extension. */
6820 if (inside_int && inter_int && final_int
6821 && inside_prec < inter_prec && inter_prec < final_prec
6822 && inside_unsignedp && !inter_unsignedp)
6823 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6825 /* Two conversions in a row are not needed unless:
6826 - some conversion is floating-point (overstrict for now), or
6827 - some conversion is a vector (overstrict for now), or
6828 - the intermediate type is narrower than both initial and
6829 final, or
6830 - the intermediate type and innermost type differ in signedness,
6831 and the outermost type is wider than the intermediate, or
6832 - the initial type is a pointer type and the precisions of the
6833 intermediate and final types differ, or
6834 - the final type is a pointer type and the precisions of the
6835 initial and intermediate types differ. */
6836 if (! inside_float && ! inter_float && ! final_float
6837 && ! inside_vec && ! inter_vec && ! final_vec
6838 && (inter_prec > inside_prec || inter_prec > final_prec)
6839 && ! (inside_int && inter_int
6840 && inter_unsignedp != inside_unsignedp
6841 && inter_prec < final_prec)
6842 && ((inter_unsignedp && inter_prec > inside_prec)
6843 == (final_unsignedp && final_prec > inter_prec))
6844 && ! (inside_ptr && inter_prec != final_prec)
6845 && ! (final_ptr && inside_prec != inter_prec)
6846 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6847 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6848 && ! final_ptr)
6849 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6852 if (TREE_CODE (op0) == MODIFY_EXPR
6853 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6854 /* Detect assigning a bitfield. */
6855 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6856 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6858 /* Don't leave an assignment inside a conversion
6859 unless assigning a bitfield. */
6860 tem = build1 (code, type, TREE_OPERAND (op0, 1));
6861 /* First do the assignment, then return converted constant. */
6862 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, fold (tem));
6863 TREE_NO_WARNING (tem) = 1;
6864 TREE_USED (tem) = 1;
6865 return tem;
6868 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6869 constants (if x has signed type, the sign bit cannot be set
6870 in c). This folds extension into the BIT_AND_EXPR. */
6871 if (INTEGRAL_TYPE_P (type)
6872 && TREE_CODE (type) != BOOLEAN_TYPE
6873 && TREE_CODE (op0) == BIT_AND_EXPR
6874 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6876 tree and = op0;
6877 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6878 int change = 0;
6880 if (TYPE_UNSIGNED (TREE_TYPE (and))
6881 || (TYPE_PRECISION (type)
6882 <= TYPE_PRECISION (TREE_TYPE (and))))
6883 change = 1;
6884 else if (TYPE_PRECISION (TREE_TYPE (and1))
6885 <= HOST_BITS_PER_WIDE_INT
6886 && host_integerp (and1, 1))
6888 unsigned HOST_WIDE_INT cst;
6890 cst = tree_low_cst (and1, 1);
6891 cst &= (HOST_WIDE_INT) -1
6892 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6893 change = (cst == 0);
6894 #ifdef LOAD_EXTEND_OP
6895 if (change
6896 && !flag_syntax_only
6897 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6898 == ZERO_EXTEND))
6900 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6901 and0 = fold_convert (uns, and0);
6902 and1 = fold_convert (uns, and1);
6904 #endif
6906 if (change)
6908 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6909 TREE_INT_CST_HIGH (and1));
6910 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6911 TREE_CONSTANT_OVERFLOW (and1));
6912 return fold_build2 (BIT_AND_EXPR, type,
6913 fold_convert (type, and0), tem);
6917 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6918 T2 being pointers to types of the same size. */
6919 if (POINTER_TYPE_P (type)
6920 && BINARY_CLASS_P (arg0)
6921 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6922 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6924 tree arg00 = TREE_OPERAND (arg0, 0);
6925 tree t0 = type;
6926 tree t1 = TREE_TYPE (arg00);
6927 tree tt0 = TREE_TYPE (t0);
6928 tree tt1 = TREE_TYPE (t1);
6929 tree s0 = TYPE_SIZE (tt0);
6930 tree s1 = TYPE_SIZE (tt1);
6932 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6933 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6934 TREE_OPERAND (arg0, 1));
6937 tem = fold_convert_const (code, type, arg0);
6938 return tem ? tem : NULL_TREE;
6940 case VIEW_CONVERT_EXPR:
6941 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6942 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6943 return NULL_TREE;
6945 case NEGATE_EXPR:
6946 if (negate_expr_p (arg0))
6947 return fold_convert (type, negate_expr (arg0));
6948 /* Convert - (~A) to A + 1. */
6949 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6950 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6951 build_int_cst (type, 1));
6952 return NULL_TREE;
6954 case ABS_EXPR:
6955 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6956 return fold_abs_const (arg0, type);
6957 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6958 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6959 /* Convert fabs((double)float) into (double)fabsf(float). */
6960 else if (TREE_CODE (arg0) == NOP_EXPR
6961 && TREE_CODE (type) == REAL_TYPE)
6963 tree targ0 = strip_float_extensions (arg0);
6964 if (targ0 != arg0)
6965 return fold_convert (type, fold_build1 (ABS_EXPR,
6966 TREE_TYPE (targ0),
6967 targ0));
6969 else if (tree_expr_nonnegative_p (arg0))
6970 return arg0;
6972 /* Strip sign ops from argument. */
6973 if (TREE_CODE (type) == REAL_TYPE)
6975 tem = fold_strip_sign_ops (arg0);
6976 if (tem)
6977 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6979 return NULL_TREE;
6981 case CONJ_EXPR:
6982 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6983 return fold_convert (type, arg0);
6984 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6985 return build2 (COMPLEX_EXPR, type,
6986 TREE_OPERAND (arg0, 0),
6987 negate_expr (TREE_OPERAND (arg0, 1)));
6988 else if (TREE_CODE (arg0) == COMPLEX_CST)
6989 return build_complex (type, TREE_REALPART (arg0),
6990 negate_expr (TREE_IMAGPART (arg0)));
6991 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6992 return fold_build2 (TREE_CODE (arg0), type,
6993 fold_build1 (CONJ_EXPR, type,
6994 TREE_OPERAND (arg0, 0)),
6995 fold_build1 (CONJ_EXPR, type,
6996 TREE_OPERAND (arg0, 1)));
6997 else if (TREE_CODE (arg0) == CONJ_EXPR)
6998 return TREE_OPERAND (arg0, 0);
6999 return NULL_TREE;
7001 case BIT_NOT_EXPR:
7002 if (TREE_CODE (arg0) == INTEGER_CST)
7003 return fold_not_const (arg0, type);
7004 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7005 return TREE_OPERAND (arg0, 0);
7006 /* Convert ~ (-A) to A - 1. */
7007 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7008 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7009 build_int_cst (type, 1));
7010 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7011 else if (INTEGRAL_TYPE_P (type)
7012 && ((TREE_CODE (arg0) == MINUS_EXPR
7013 && integer_onep (TREE_OPERAND (arg0, 1)))
7014 || (TREE_CODE (arg0) == PLUS_EXPR
7015 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7016 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7017 return NULL_TREE;
7019 case TRUTH_NOT_EXPR:
7020 /* The argument to invert_truthvalue must have Boolean type. */
7021 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7022 arg0 = fold_convert (boolean_type_node, arg0);
7024 /* Note that the operand of this must be an int
7025 and its values must be 0 or 1.
7026 ("true" is a fixed value perhaps depending on the language,
7027 but we don't handle values other than 1 correctly yet.) */
7028 tem = invert_truthvalue (arg0);
7029 /* Avoid infinite recursion. */
7030 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7031 return NULL_TREE;
7032 return fold_convert (type, tem);
7034 case REALPART_EXPR:
7035 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7036 return NULL_TREE;
7037 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7038 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7039 TREE_OPERAND (arg0, 1));
7040 else if (TREE_CODE (arg0) == COMPLEX_CST)
7041 return TREE_REALPART (arg0);
7042 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7043 return fold_build2 (TREE_CODE (arg0), type,
7044 fold_build1 (REALPART_EXPR, type,
7045 TREE_OPERAND (arg0, 0)),
7046 fold_build1 (REALPART_EXPR, type,
7047 TREE_OPERAND (arg0, 1)));
7048 return NULL_TREE;
7050 case IMAGPART_EXPR:
7051 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7052 return fold_convert (type, integer_zero_node);
7053 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7054 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7055 TREE_OPERAND (arg0, 0));
7056 else if (TREE_CODE (arg0) == COMPLEX_CST)
7057 return TREE_IMAGPART (arg0);
7058 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7059 return fold_build2 (TREE_CODE (arg0), type,
7060 fold_build1 (IMAGPART_EXPR, type,
7061 TREE_OPERAND (arg0, 0)),
7062 fold_build1 (IMAGPART_EXPR, type,
7063 TREE_OPERAND (arg0, 1)));
7064 return NULL_TREE;
7066 default:
7067 return NULL_TREE;
7068 } /* switch (code) */
7071 /* Fold a binary expression of code CODE and type TYPE with operands
7072 OP0 and OP1. Return the folded expression if folding is
7073 successful. Otherwise, return NULL_TREE. */
7075 static tree
7076 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7078 tree t1 = NULL_TREE;
7079 tree tem;
7080 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7081 enum tree_code_class kind = TREE_CODE_CLASS (code);
7083 /* WINS will be nonzero when the switch is done
7084 if all operands are constant. */
7085 int wins = 1;
7087 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7088 && TREE_CODE_LENGTH (code) == 2);
7090 arg0 = op0;
7091 arg1 = op1;
7093 if (arg0)
7095 tree subop;
7097 /* Strip any conversions that don't change the mode. This is
7098 safe for every expression, except for a comparison expression
7099 because its signedness is derived from its operands. So, in
7100 the latter case, only strip conversions that don't change the
7101 signedness.
7103 Note that this is done as an internal manipulation within the
7104 constant folder, in order to find the simplest representation
7105 of the arguments so that their form can be studied. In any
7106 cases, the appropriate type conversions should be put back in
7107 the tree that will get out of the constant folder. */
7108 if (kind == tcc_comparison)
7109 STRIP_SIGN_NOPS (arg0);
7110 else
7111 STRIP_NOPS (arg0);
7113 if (TREE_CODE (arg0) == COMPLEX_CST)
7114 subop = TREE_REALPART (arg0);
7115 else
7116 subop = arg0;
7118 if (TREE_CODE (subop) != INTEGER_CST
7119 && TREE_CODE (subop) != REAL_CST)
7120 /* Note that TREE_CONSTANT isn't enough:
7121 static var addresses are constant but we can't
7122 do arithmetic on them. */
7123 wins = 0;
7126 if (arg1)
7128 tree subop;
7130 /* Strip any conversions that don't change the mode. This is
7131 safe for every expression, except for a comparison expression
7132 because its signedness is derived from its operands. So, in
7133 the latter case, only strip conversions that don't change the
7134 signedness.
7136 Note that this is done as an internal manipulation within the
7137 constant folder, in order to find the simplest representation
7138 of the arguments so that their form can be studied. In any
7139 cases, the appropriate type conversions should be put back in
7140 the tree that will get out of the constant folder. */
7141 if (kind == tcc_comparison)
7142 STRIP_SIGN_NOPS (arg1);
7143 else
7144 STRIP_NOPS (arg1);
7146 if (TREE_CODE (arg1) == COMPLEX_CST)
7147 subop = TREE_REALPART (arg1);
7148 else
7149 subop = arg1;
7151 if (TREE_CODE (subop) != INTEGER_CST
7152 && TREE_CODE (subop) != REAL_CST)
7153 /* Note that TREE_CONSTANT isn't enough:
7154 static var addresses are constant but we can't
7155 do arithmetic on them. */
7156 wins = 0;
7159 /* If this is a commutative operation, and ARG0 is a constant, move it
7160 to ARG1 to reduce the number of tests below. */
7161 if (commutative_tree_code (code)
7162 && tree_swap_operands_p (arg0, arg1, true))
7163 return fold_build2 (code, type, op1, op0);
7165 /* Now WINS is set as described above,
7166 ARG0 is the first operand of EXPR,
7167 and ARG1 is the second operand (if it has more than one operand).
7169 First check for cases where an arithmetic operation is applied to a
7170 compound, conditional, or comparison operation. Push the arithmetic
7171 operation inside the compound or conditional to see if any folding
7172 can then be done. Convert comparison to conditional for this purpose.
7173 The also optimizes non-constant cases that used to be done in
7174 expand_expr.
7176 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7177 one of the operands is a comparison and the other is a comparison, a
7178 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7179 code below would make the expression more complex. Change it to a
7180 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7181 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7183 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7184 || code == EQ_EXPR || code == NE_EXPR)
7185 && ((truth_value_p (TREE_CODE (arg0))
7186 && (truth_value_p (TREE_CODE (arg1))
7187 || (TREE_CODE (arg1) == BIT_AND_EXPR
7188 && integer_onep (TREE_OPERAND (arg1, 1)))))
7189 || (truth_value_p (TREE_CODE (arg1))
7190 && (truth_value_p (TREE_CODE (arg0))
7191 || (TREE_CODE (arg0) == BIT_AND_EXPR
7192 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7194 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7195 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7196 : TRUTH_XOR_EXPR,
7197 boolean_type_node,
7198 fold_convert (boolean_type_node, arg0),
7199 fold_convert (boolean_type_node, arg1));
7201 if (code == EQ_EXPR)
7202 tem = invert_truthvalue (tem);
7204 return fold_convert (type, tem);
7207 if (TREE_CODE_CLASS (code) == tcc_comparison
7208 && TREE_CODE (arg0) == COMPOUND_EXPR)
7209 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7210 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7211 else if (TREE_CODE_CLASS (code) == tcc_comparison
7212 && TREE_CODE (arg1) == COMPOUND_EXPR)
7213 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7214 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7215 else if (TREE_CODE_CLASS (code) == tcc_binary
7216 || TREE_CODE_CLASS (code) == tcc_comparison)
7218 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7219 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7220 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7221 arg1));
7222 if (TREE_CODE (arg1) == COMPOUND_EXPR
7223 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7224 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7225 fold_build2 (code, type,
7226 arg0, TREE_OPERAND (arg1, 1)));
7228 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7230 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7231 arg0, arg1,
7232 /*cond_first_p=*/1);
7233 if (tem != NULL_TREE)
7234 return tem;
7237 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7239 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7240 arg1, arg0,
7241 /*cond_first_p=*/0);
7242 if (tem != NULL_TREE)
7243 return tem;
7247 switch (code)
7249 case PLUS_EXPR:
7250 /* A + (-B) -> A - B */
7251 if (TREE_CODE (arg1) == NEGATE_EXPR)
7252 return fold_build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7253 /* (-A) + B -> B - A */
7254 if (TREE_CODE (arg0) == NEGATE_EXPR
7255 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7256 return fold_build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0));
7257 /* Convert ~A + 1 to -A. */
7258 if (INTEGRAL_TYPE_P (type)
7259 && TREE_CODE (arg0) == BIT_NOT_EXPR
7260 && integer_onep (arg1))
7261 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7263 if (TREE_CODE (type) == COMPLEX_TYPE)
7265 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7266 if (tem)
7267 return tem;
7270 if (! FLOAT_TYPE_P (type))
7272 if (integer_zerop (arg1))
7273 return non_lvalue (fold_convert (type, arg0));
7275 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7276 with a constant, and the two constants have no bits in common,
7277 we should treat this as a BIT_IOR_EXPR since this may produce more
7278 simplifications. */
7279 if (TREE_CODE (arg0) == BIT_AND_EXPR
7280 && TREE_CODE (arg1) == BIT_AND_EXPR
7281 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7282 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7283 && integer_zerop (const_binop (BIT_AND_EXPR,
7284 TREE_OPERAND (arg0, 1),
7285 TREE_OPERAND (arg1, 1), 0)))
7287 code = BIT_IOR_EXPR;
7288 goto bit_ior;
7291 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7292 (plus (plus (mult) (mult)) (foo)) so that we can
7293 take advantage of the factoring cases below. */
7294 if (((TREE_CODE (arg0) == PLUS_EXPR
7295 || TREE_CODE (arg0) == MINUS_EXPR)
7296 && TREE_CODE (arg1) == MULT_EXPR)
7297 || ((TREE_CODE (arg1) == PLUS_EXPR
7298 || TREE_CODE (arg1) == MINUS_EXPR)
7299 && TREE_CODE (arg0) == MULT_EXPR))
7301 tree parg0, parg1, parg, marg;
7302 enum tree_code pcode;
7304 if (TREE_CODE (arg1) == MULT_EXPR)
7305 parg = arg0, marg = arg1;
7306 else
7307 parg = arg1, marg = arg0;
7308 pcode = TREE_CODE (parg);
7309 parg0 = TREE_OPERAND (parg, 0);
7310 parg1 = TREE_OPERAND (parg, 1);
7311 STRIP_NOPS (parg0);
7312 STRIP_NOPS (parg1);
7314 if (TREE_CODE (parg0) == MULT_EXPR
7315 && TREE_CODE (parg1) != MULT_EXPR)
7316 return fold_build2 (pcode, type,
7317 fold_build2 (PLUS_EXPR, type,
7318 fold_convert (type, parg0),
7319 fold_convert (type, marg)),
7320 fold_convert (type, parg1));
7321 if (TREE_CODE (parg0) != MULT_EXPR
7322 && TREE_CODE (parg1) == MULT_EXPR)
7323 return fold_build2 (PLUS_EXPR, type,
7324 fold_convert (type, parg0),
7325 fold_build2 (pcode, type,
7326 fold_convert (type, marg),
7327 fold_convert (type,
7328 parg1)));
7331 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7333 tree arg00, arg01, arg10, arg11;
7334 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7336 /* (A * C) + (B * C) -> (A+B) * C.
7337 We are most concerned about the case where C is a constant,
7338 but other combinations show up during loop reduction. Since
7339 it is not difficult, try all four possibilities. */
7341 arg00 = TREE_OPERAND (arg0, 0);
7342 arg01 = TREE_OPERAND (arg0, 1);
7343 arg10 = TREE_OPERAND (arg1, 0);
7344 arg11 = TREE_OPERAND (arg1, 1);
7345 same = NULL_TREE;
7347 if (operand_equal_p (arg01, arg11, 0))
7348 same = arg01, alt0 = arg00, alt1 = arg10;
7349 else if (operand_equal_p (arg00, arg10, 0))
7350 same = arg00, alt0 = arg01, alt1 = arg11;
7351 else if (operand_equal_p (arg00, arg11, 0))
7352 same = arg00, alt0 = arg01, alt1 = arg10;
7353 else if (operand_equal_p (arg01, arg10, 0))
7354 same = arg01, alt0 = arg00, alt1 = arg11;
7356 /* No identical multiplicands; see if we can find a common
7357 power-of-two factor in non-power-of-two multiplies. This
7358 can help in multi-dimensional array access. */
7359 else if (TREE_CODE (arg01) == INTEGER_CST
7360 && TREE_CODE (arg11) == INTEGER_CST
7361 && TREE_INT_CST_HIGH (arg01) == 0
7362 && TREE_INT_CST_HIGH (arg11) == 0)
7364 HOST_WIDE_INT int01, int11, tmp;
7365 int01 = TREE_INT_CST_LOW (arg01);
7366 int11 = TREE_INT_CST_LOW (arg11);
7368 /* Move min of absolute values to int11. */
7369 if ((int01 >= 0 ? int01 : -int01)
7370 < (int11 >= 0 ? int11 : -int11))
7372 tmp = int01, int01 = int11, int11 = tmp;
7373 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7374 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7377 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7379 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7380 build_int_cst (NULL_TREE,
7381 int01 / int11));
7382 alt1 = arg10;
7383 same = arg11;
7387 if (same)
7388 return fold_build2 (MULT_EXPR, type,
7389 fold_build2 (PLUS_EXPR, type,
7390 fold_convert (type, alt0),
7391 fold_convert (type, alt1)),
7392 same);
7395 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7396 of the array. Loop optimizer sometimes produce this type of
7397 expressions. */
7398 if (TREE_CODE (arg0) == ADDR_EXPR
7399 && TREE_CODE (arg1) == MULT_EXPR)
7401 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7402 if (tem)
7403 return fold_convert (type, fold (tem));
7405 else if (TREE_CODE (arg1) == ADDR_EXPR
7406 && TREE_CODE (arg0) == MULT_EXPR)
7408 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7409 if (tem)
7410 return fold_convert (type, fold (tem));
7413 else
7415 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7416 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7417 return non_lvalue (fold_convert (type, arg0));
7419 /* Likewise if the operands are reversed. */
7420 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7421 return non_lvalue (fold_convert (type, arg1));
7423 /* Convert X + -C into X - C. */
7424 if (TREE_CODE (arg1) == REAL_CST
7425 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7427 tem = fold_negate_const (arg1, type);
7428 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7429 return fold_build2 (MINUS_EXPR, type,
7430 fold_convert (type, arg0),
7431 fold_convert (type, tem));
7434 /* Convert x+x into x*2.0. */
7435 if (operand_equal_p (arg0, arg1, 0)
7436 && SCALAR_FLOAT_TYPE_P (type))
7437 return fold_build2 (MULT_EXPR, type, arg0,
7438 build_real (type, dconst2));
7440 /* Convert x*c+x into x*(c+1). */
7441 if (flag_unsafe_math_optimizations
7442 && TREE_CODE (arg0) == MULT_EXPR
7443 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7444 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7445 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7447 REAL_VALUE_TYPE c;
7449 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7450 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7451 return fold_build2 (MULT_EXPR, type, arg1,
7452 build_real (type, c));
7455 /* Convert x+x*c into x*(c+1). */
7456 if (flag_unsafe_math_optimizations
7457 && TREE_CODE (arg1) == MULT_EXPR
7458 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7459 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7460 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7462 REAL_VALUE_TYPE c;
7464 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7465 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7466 return fold_build2 (MULT_EXPR, type, arg0,
7467 build_real (type, c));
7470 /* Convert x*c1+x*c2 into x*(c1+c2). */
7471 if (flag_unsafe_math_optimizations
7472 && TREE_CODE (arg0) == MULT_EXPR
7473 && TREE_CODE (arg1) == MULT_EXPR
7474 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7475 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7476 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7477 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7478 && operand_equal_p (TREE_OPERAND (arg0, 0),
7479 TREE_OPERAND (arg1, 0), 0))
7481 REAL_VALUE_TYPE c1, c2;
7483 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7484 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7485 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7486 return fold_build2 (MULT_EXPR, type,
7487 TREE_OPERAND (arg0, 0),
7488 build_real (type, c1));
7490 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7491 if (flag_unsafe_math_optimizations
7492 && TREE_CODE (arg1) == PLUS_EXPR
7493 && TREE_CODE (arg0) != MULT_EXPR)
7495 tree tree10 = TREE_OPERAND (arg1, 0);
7496 tree tree11 = TREE_OPERAND (arg1, 1);
7497 if (TREE_CODE (tree11) == MULT_EXPR
7498 && TREE_CODE (tree10) == MULT_EXPR)
7500 tree tree0;
7501 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7502 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7505 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7506 if (flag_unsafe_math_optimizations
7507 && TREE_CODE (arg0) == PLUS_EXPR
7508 && TREE_CODE (arg1) != MULT_EXPR)
7510 tree tree00 = TREE_OPERAND (arg0, 0);
7511 tree tree01 = TREE_OPERAND (arg0, 1);
7512 if (TREE_CODE (tree01) == MULT_EXPR
7513 && TREE_CODE (tree00) == MULT_EXPR)
7515 tree tree0;
7516 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7517 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7522 bit_rotate:
7523 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7524 is a rotate of A by C1 bits. */
7525 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7526 is a rotate of A by B bits. */
7528 enum tree_code code0, code1;
7529 code0 = TREE_CODE (arg0);
7530 code1 = TREE_CODE (arg1);
7531 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7532 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7533 && operand_equal_p (TREE_OPERAND (arg0, 0),
7534 TREE_OPERAND (arg1, 0), 0)
7535 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7537 tree tree01, tree11;
7538 enum tree_code code01, code11;
7540 tree01 = TREE_OPERAND (arg0, 1);
7541 tree11 = TREE_OPERAND (arg1, 1);
7542 STRIP_NOPS (tree01);
7543 STRIP_NOPS (tree11);
7544 code01 = TREE_CODE (tree01);
7545 code11 = TREE_CODE (tree11);
7546 if (code01 == INTEGER_CST
7547 && code11 == INTEGER_CST
7548 && TREE_INT_CST_HIGH (tree01) == 0
7549 && TREE_INT_CST_HIGH (tree11) == 0
7550 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7551 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7552 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7553 code0 == LSHIFT_EXPR ? tree01 : tree11);
7554 else if (code11 == MINUS_EXPR)
7556 tree tree110, tree111;
7557 tree110 = TREE_OPERAND (tree11, 0);
7558 tree111 = TREE_OPERAND (tree11, 1);
7559 STRIP_NOPS (tree110);
7560 STRIP_NOPS (tree111);
7561 if (TREE_CODE (tree110) == INTEGER_CST
7562 && 0 == compare_tree_int (tree110,
7563 TYPE_PRECISION
7564 (TREE_TYPE (TREE_OPERAND
7565 (arg0, 0))))
7566 && operand_equal_p (tree01, tree111, 0))
7567 return build2 ((code0 == LSHIFT_EXPR
7568 ? LROTATE_EXPR
7569 : RROTATE_EXPR),
7570 type, TREE_OPERAND (arg0, 0), tree01);
7572 else if (code01 == MINUS_EXPR)
7574 tree tree010, tree011;
7575 tree010 = TREE_OPERAND (tree01, 0);
7576 tree011 = TREE_OPERAND (tree01, 1);
7577 STRIP_NOPS (tree010);
7578 STRIP_NOPS (tree011);
7579 if (TREE_CODE (tree010) == INTEGER_CST
7580 && 0 == compare_tree_int (tree010,
7581 TYPE_PRECISION
7582 (TREE_TYPE (TREE_OPERAND
7583 (arg0, 0))))
7584 && operand_equal_p (tree11, tree011, 0))
7585 return build2 ((code0 != LSHIFT_EXPR
7586 ? LROTATE_EXPR
7587 : RROTATE_EXPR),
7588 type, TREE_OPERAND (arg0, 0), tree11);
7593 associate:
7594 /* In most languages, can't associate operations on floats through
7595 parentheses. Rather than remember where the parentheses were, we
7596 don't associate floats at all, unless the user has specified
7597 -funsafe-math-optimizations. */
7599 if (! wins
7600 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7602 tree var0, con0, lit0, minus_lit0;
7603 tree var1, con1, lit1, minus_lit1;
7605 /* Split both trees into variables, constants, and literals. Then
7606 associate each group together, the constants with literals,
7607 then the result with variables. This increases the chances of
7608 literals being recombined later and of generating relocatable
7609 expressions for the sum of a constant and literal. */
7610 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7611 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7612 code == MINUS_EXPR);
7614 /* Only do something if we found more than two objects. Otherwise,
7615 nothing has changed and we risk infinite recursion. */
7616 if (2 < ((var0 != 0) + (var1 != 0)
7617 + (con0 != 0) + (con1 != 0)
7618 + (lit0 != 0) + (lit1 != 0)
7619 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7621 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7622 if (code == MINUS_EXPR)
7623 code = PLUS_EXPR;
7625 var0 = associate_trees (var0, var1, code, type);
7626 con0 = associate_trees (con0, con1, code, type);
7627 lit0 = associate_trees (lit0, lit1, code, type);
7628 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7630 /* Preserve the MINUS_EXPR if the negative part of the literal is
7631 greater than the positive part. Otherwise, the multiplicative
7632 folding code (i.e extract_muldiv) may be fooled in case
7633 unsigned constants are subtracted, like in the following
7634 example: ((X*2 + 4) - 8U)/2. */
7635 if (minus_lit0 && lit0)
7637 if (TREE_CODE (lit0) == INTEGER_CST
7638 && TREE_CODE (minus_lit0) == INTEGER_CST
7639 && tree_int_cst_lt (lit0, minus_lit0))
7641 minus_lit0 = associate_trees (minus_lit0, lit0,
7642 MINUS_EXPR, type);
7643 lit0 = 0;
7645 else
7647 lit0 = associate_trees (lit0, minus_lit0,
7648 MINUS_EXPR, type);
7649 minus_lit0 = 0;
7652 if (minus_lit0)
7654 if (con0 == 0)
7655 return fold_convert (type,
7656 associate_trees (var0, minus_lit0,
7657 MINUS_EXPR, type));
7658 else
7660 con0 = associate_trees (con0, minus_lit0,
7661 MINUS_EXPR, type);
7662 return fold_convert (type,
7663 associate_trees (var0, con0,
7664 PLUS_EXPR, type));
7668 con0 = associate_trees (con0, lit0, code, type);
7669 return fold_convert (type, associate_trees (var0, con0,
7670 code, type));
7674 binary:
7675 if (wins)
7676 t1 = const_binop (code, arg0, arg1, 0);
7677 if (t1 != NULL_TREE)
7679 /* The return value should always have
7680 the same type as the original expression. */
7681 if (TREE_TYPE (t1) != type)
7682 t1 = fold_convert (type, t1);
7684 return t1;
7686 return NULL_TREE;
7688 case MINUS_EXPR:
7689 /* A - (-B) -> A + B */
7690 if (TREE_CODE (arg1) == NEGATE_EXPR)
7691 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7692 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7693 if (TREE_CODE (arg0) == NEGATE_EXPR
7694 && (FLOAT_TYPE_P (type)
7695 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7696 && negate_expr_p (arg1)
7697 && reorder_operands_p (arg0, arg1))
7698 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7699 TREE_OPERAND (arg0, 0));
7700 /* Convert -A - 1 to ~A. */
7701 if (INTEGRAL_TYPE_P (type)
7702 && TREE_CODE (arg0) == NEGATE_EXPR
7703 && integer_onep (arg1))
7704 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7706 /* Convert -1 - A to ~A. */
7707 if (INTEGRAL_TYPE_P (type)
7708 && integer_all_onesp (arg0))
7709 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7711 if (TREE_CODE (type) == COMPLEX_TYPE)
7713 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7714 if (tem)
7715 return tem;
7718 if (! FLOAT_TYPE_P (type))
7720 if (! wins && integer_zerop (arg0))
7721 return negate_expr (fold_convert (type, arg1));
7722 if (integer_zerop (arg1))
7723 return non_lvalue (fold_convert (type, arg0));
7725 /* Fold A - (A & B) into ~B & A. */
7726 if (!TREE_SIDE_EFFECTS (arg0)
7727 && TREE_CODE (arg1) == BIT_AND_EXPR)
7729 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7730 return fold_build2 (BIT_AND_EXPR, type,
7731 fold_build1 (BIT_NOT_EXPR, type,
7732 TREE_OPERAND (arg1, 0)),
7733 arg0);
7734 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7735 return fold_build2 (BIT_AND_EXPR, type,
7736 fold_build1 (BIT_NOT_EXPR, type,
7737 TREE_OPERAND (arg1, 1)),
7738 arg0);
7741 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7742 any power of 2 minus 1. */
7743 if (TREE_CODE (arg0) == BIT_AND_EXPR
7744 && TREE_CODE (arg1) == BIT_AND_EXPR
7745 && operand_equal_p (TREE_OPERAND (arg0, 0),
7746 TREE_OPERAND (arg1, 0), 0))
7748 tree mask0 = TREE_OPERAND (arg0, 1);
7749 tree mask1 = TREE_OPERAND (arg1, 1);
7750 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7752 if (operand_equal_p (tem, mask1, 0))
7754 tem = fold_build2 (BIT_XOR_EXPR, type,
7755 TREE_OPERAND (arg0, 0), mask1);
7756 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7761 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7762 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7763 return non_lvalue (fold_convert (type, arg0));
7765 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7766 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7767 (-ARG1 + ARG0) reduces to -ARG1. */
7768 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7769 return negate_expr (fold_convert (type, arg1));
7771 /* Fold &x - &x. This can happen from &x.foo - &x.
7772 This is unsafe for certain floats even in non-IEEE formats.
7773 In IEEE, it is unsafe because it does wrong for NaNs.
7774 Also note that operand_equal_p is always false if an operand
7775 is volatile. */
7777 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7778 && operand_equal_p (arg0, arg1, 0))
7779 return fold_convert (type, integer_zero_node);
7781 /* A - B -> A + (-B) if B is easily negatable. */
7782 if (!wins && negate_expr_p (arg1)
7783 && ((FLOAT_TYPE_P (type)
7784 /* Avoid this transformation if B is a positive REAL_CST. */
7785 && (TREE_CODE (arg1) != REAL_CST
7786 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7787 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7788 return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1));
7790 /* Try folding difference of addresses. */
7792 HOST_WIDE_INT diff;
7794 if ((TREE_CODE (arg0) == ADDR_EXPR
7795 || TREE_CODE (arg1) == ADDR_EXPR)
7796 && ptr_difference_const (arg0, arg1, &diff))
7797 return build_int_cst_type (type, diff);
7800 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7801 of the array. Loop optimizer sometimes produce this type of
7802 expressions. */
7803 if (TREE_CODE (arg0) == ADDR_EXPR
7804 && TREE_CODE (arg1) == MULT_EXPR)
7806 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7807 if (tem)
7808 return fold_convert (type, fold (tem));
7811 if (TREE_CODE (arg0) == MULT_EXPR
7812 && TREE_CODE (arg1) == MULT_EXPR
7813 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7815 /* (A * C) - (B * C) -> (A-B) * C. */
7816 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7817 TREE_OPERAND (arg1, 1), 0))
7818 return fold_build2 (MULT_EXPR, type,
7819 fold_build2 (MINUS_EXPR, type,
7820 TREE_OPERAND (arg0, 0),
7821 TREE_OPERAND (arg1, 0)),
7822 TREE_OPERAND (arg0, 1));
7823 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7824 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7825 TREE_OPERAND (arg1, 0), 0))
7826 return fold_build2 (MULT_EXPR, type,
7827 TREE_OPERAND (arg0, 0),
7828 fold_build2 (MINUS_EXPR, type,
7829 TREE_OPERAND (arg0, 1),
7830 TREE_OPERAND (arg1, 1)));
7833 goto associate;
7835 case MULT_EXPR:
7836 /* (-A) * (-B) -> A * B */
7837 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7838 return fold_build2 (MULT_EXPR, type,
7839 TREE_OPERAND (arg0, 0),
7840 negate_expr (arg1));
7841 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7842 return fold_build2 (MULT_EXPR, type,
7843 negate_expr (arg0),
7844 TREE_OPERAND (arg1, 0));
7846 if (TREE_CODE (type) == COMPLEX_TYPE)
7848 tem = fold_complex_mult (type, arg0, arg1);
7849 if (tem)
7850 return tem;
7853 if (! FLOAT_TYPE_P (type))
7855 if (integer_zerop (arg1))
7856 return omit_one_operand (type, arg1, arg0);
7857 if (integer_onep (arg1))
7858 return non_lvalue (fold_convert (type, arg0));
7859 /* Transform x * -1 into -x. */
7860 if (integer_all_onesp (arg1))
7861 return fold_convert (type, negate_expr (arg0));
7863 /* (a * (1 << b)) is (a << b) */
7864 if (TREE_CODE (arg1) == LSHIFT_EXPR
7865 && integer_onep (TREE_OPERAND (arg1, 0)))
7866 return fold_build2 (LSHIFT_EXPR, type, arg0,
7867 TREE_OPERAND (arg1, 1));
7868 if (TREE_CODE (arg0) == LSHIFT_EXPR
7869 && integer_onep (TREE_OPERAND (arg0, 0)))
7870 return fold_build2 (LSHIFT_EXPR, type, arg1,
7871 TREE_OPERAND (arg0, 1));
7873 if (TREE_CODE (arg1) == INTEGER_CST
7874 && 0 != (tem = extract_muldiv (op0,
7875 fold_convert (type, arg1),
7876 code, NULL_TREE)))
7877 return fold_convert (type, tem);
7880 else
7882 /* Maybe fold x * 0 to 0. The expressions aren't the same
7883 when x is NaN, since x * 0 is also NaN. Nor are they the
7884 same in modes with signed zeros, since multiplying a
7885 negative value by 0 gives -0, not +0. */
7886 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7887 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7888 && real_zerop (arg1))
7889 return omit_one_operand (type, arg1, arg0);
7890 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7891 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7892 && real_onep (arg1))
7893 return non_lvalue (fold_convert (type, arg0));
7895 /* Transform x * -1.0 into -x. */
7896 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7897 && real_minus_onep (arg1))
7898 return fold_convert (type, negate_expr (arg0));
7900 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7901 if (flag_unsafe_math_optimizations
7902 && TREE_CODE (arg0) == RDIV_EXPR
7903 && TREE_CODE (arg1) == REAL_CST
7904 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7906 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7907 arg1, 0);
7908 if (tem)
7909 return fold_build2 (RDIV_EXPR, type, tem,
7910 TREE_OPERAND (arg0, 1));
7913 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7914 if (operand_equal_p (arg0, arg1, 0))
7916 tree tem = fold_strip_sign_ops (arg0);
7917 if (tem != NULL_TREE)
7919 tem = fold_convert (type, tem);
7920 return fold_build2 (MULT_EXPR, type, tem, tem);
7924 if (flag_unsafe_math_optimizations)
7926 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7927 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7929 /* Optimizations of root(...)*root(...). */
7930 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7932 tree rootfn, arg, arglist;
7933 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7934 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7936 /* Optimize sqrt(x)*sqrt(x) as x. */
7937 if (BUILTIN_SQRT_P (fcode0)
7938 && operand_equal_p (arg00, arg10, 0)
7939 && ! HONOR_SNANS (TYPE_MODE (type)))
7940 return arg00;
7942 /* Optimize root(x)*root(y) as root(x*y). */
7943 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7944 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7945 arglist = build_tree_list (NULL_TREE, arg);
7946 return build_function_call_expr (rootfn, arglist);
7949 /* Optimize expN(x)*expN(y) as expN(x+y). */
7950 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7952 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7953 tree arg = build2 (PLUS_EXPR, type,
7954 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7955 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7956 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7957 return build_function_call_expr (expfn, arglist);
7960 /* Optimizations of pow(...)*pow(...). */
7961 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7962 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7963 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7965 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7966 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7967 1)));
7968 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7969 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7970 1)));
7972 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7973 if (operand_equal_p (arg01, arg11, 0))
7975 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7976 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7977 tree arglist = tree_cons (NULL_TREE, fold (arg),
7978 build_tree_list (NULL_TREE,
7979 arg01));
7980 return build_function_call_expr (powfn, arglist);
7983 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7984 if (operand_equal_p (arg00, arg10, 0))
7986 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7987 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7988 tree arglist = tree_cons (NULL_TREE, arg00,
7989 build_tree_list (NULL_TREE,
7990 arg));
7991 return build_function_call_expr (powfn, arglist);
7995 /* Optimize tan(x)*cos(x) as sin(x). */
7996 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7997 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7998 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7999 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8000 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8001 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8002 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8003 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8005 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8007 if (sinfn != NULL_TREE)
8008 return build_function_call_expr (sinfn,
8009 TREE_OPERAND (arg0, 1));
8012 /* Optimize x*pow(x,c) as pow(x,c+1). */
8013 if (fcode1 == BUILT_IN_POW
8014 || fcode1 == BUILT_IN_POWF
8015 || fcode1 == BUILT_IN_POWL)
8017 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8018 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8019 1)));
8020 if (TREE_CODE (arg11) == REAL_CST
8021 && ! TREE_CONSTANT_OVERFLOW (arg11)
8022 && operand_equal_p (arg0, arg10, 0))
8024 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8025 REAL_VALUE_TYPE c;
8026 tree arg, arglist;
8028 c = TREE_REAL_CST (arg11);
8029 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8030 arg = build_real (type, c);
8031 arglist = build_tree_list (NULL_TREE, arg);
8032 arglist = tree_cons (NULL_TREE, arg0, arglist);
8033 return build_function_call_expr (powfn, arglist);
8037 /* Optimize pow(x,c)*x as pow(x,c+1). */
8038 if (fcode0 == BUILT_IN_POW
8039 || fcode0 == BUILT_IN_POWF
8040 || fcode0 == BUILT_IN_POWL)
8042 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8043 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8044 1)));
8045 if (TREE_CODE (arg01) == REAL_CST
8046 && ! TREE_CONSTANT_OVERFLOW (arg01)
8047 && operand_equal_p (arg1, arg00, 0))
8049 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8050 REAL_VALUE_TYPE c;
8051 tree arg, arglist;
8053 c = TREE_REAL_CST (arg01);
8054 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8055 arg = build_real (type, c);
8056 arglist = build_tree_list (NULL_TREE, arg);
8057 arglist = tree_cons (NULL_TREE, arg1, arglist);
8058 return build_function_call_expr (powfn, arglist);
8062 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8063 if (! optimize_size
8064 && operand_equal_p (arg0, arg1, 0))
8066 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8068 if (powfn)
8070 tree arg = build_real (type, dconst2);
8071 tree arglist = build_tree_list (NULL_TREE, arg);
8072 arglist = tree_cons (NULL_TREE, arg0, arglist);
8073 return build_function_call_expr (powfn, arglist);
8078 goto associate;
8080 case BIT_IOR_EXPR:
8081 bit_ior:
8082 if (integer_all_onesp (arg1))
8083 return omit_one_operand (type, arg1, arg0);
8084 if (integer_zerop (arg1))
8085 return non_lvalue (fold_convert (type, arg0));
8086 if (operand_equal_p (arg0, arg1, 0))
8087 return non_lvalue (fold_convert (type, arg0));
8089 /* ~X | X is -1. */
8090 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8093 t1 = build_int_cst (type, -1);
8094 t1 = force_fit_type (t1, 0, false, false);
8095 return omit_one_operand (type, t1, arg1);
8098 /* X | ~X is -1. */
8099 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8100 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8102 t1 = build_int_cst (type, -1);
8103 t1 = force_fit_type (t1, 0, false, false);
8104 return omit_one_operand (type, t1, arg0);
8107 t1 = distribute_bit_expr (code, type, arg0, arg1);
8108 if (t1 != NULL_TREE)
8109 return t1;
8111 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8113 This results in more efficient code for machines without a NAND
8114 instruction. Combine will canonicalize to the first form
8115 which will allow use of NAND instructions provided by the
8116 backend if they exist. */
8117 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8118 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8120 return fold_build1 (BIT_NOT_EXPR, type,
8121 build2 (BIT_AND_EXPR, type,
8122 TREE_OPERAND (arg0, 0),
8123 TREE_OPERAND (arg1, 0)));
8126 /* See if this can be simplified into a rotate first. If that
8127 is unsuccessful continue in the association code. */
8128 goto bit_rotate;
8130 case BIT_XOR_EXPR:
8131 if (integer_zerop (arg1))
8132 return non_lvalue (fold_convert (type, arg0));
8133 if (integer_all_onesp (arg1))
8134 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8135 if (operand_equal_p (arg0, arg1, 0))
8136 return omit_one_operand (type, integer_zero_node, arg0);
8138 /* ~X ^ X is -1. */
8139 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8140 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8142 t1 = build_int_cst (type, -1);
8143 t1 = force_fit_type (t1, 0, false, false);
8144 return omit_one_operand (type, t1, arg1);
8147 /* X ^ ~X is -1. */
8148 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8149 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8151 t1 = build_int_cst (type, -1);
8152 t1 = force_fit_type (t1, 0, false, false);
8153 return omit_one_operand (type, t1, arg0);
8156 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8157 with a constant, and the two constants have no bits in common,
8158 we should treat this as a BIT_IOR_EXPR since this may produce more
8159 simplifications. */
8160 if (TREE_CODE (arg0) == BIT_AND_EXPR
8161 && TREE_CODE (arg1) == BIT_AND_EXPR
8162 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8163 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8164 && integer_zerop (const_binop (BIT_AND_EXPR,
8165 TREE_OPERAND (arg0, 1),
8166 TREE_OPERAND (arg1, 1), 0)))
8168 code = BIT_IOR_EXPR;
8169 goto bit_ior;
8172 /* See if this can be simplified into a rotate first. If that
8173 is unsuccessful continue in the association code. */
8174 goto bit_rotate;
8176 case BIT_AND_EXPR:
8177 if (integer_all_onesp (arg1))
8178 return non_lvalue (fold_convert (type, arg0));
8179 if (integer_zerop (arg1))
8180 return omit_one_operand (type, arg1, arg0);
8181 if (operand_equal_p (arg0, arg1, 0))
8182 return non_lvalue (fold_convert (type, arg0));
8184 /* ~X & X is always zero. */
8185 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8186 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8187 return omit_one_operand (type, integer_zero_node, arg1);
8189 /* X & ~X is always zero. */
8190 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8191 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8192 return omit_one_operand (type, integer_zero_node, arg0);
8194 t1 = distribute_bit_expr (code, type, arg0, arg1);
8195 if (t1 != NULL_TREE)
8196 return t1;
8197 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8198 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8199 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8201 unsigned int prec
8202 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8204 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8205 && (~TREE_INT_CST_LOW (arg1)
8206 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8207 return fold_convert (type, TREE_OPERAND (arg0, 0));
8210 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8212 This results in more efficient code for machines without a NOR
8213 instruction. Combine will canonicalize to the first form
8214 which will allow use of NOR instructions provided by the
8215 backend if they exist. */
8216 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8217 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8219 return fold_build1 (BIT_NOT_EXPR, type,
8220 build2 (BIT_IOR_EXPR, type,
8221 TREE_OPERAND (arg0, 0),
8222 TREE_OPERAND (arg1, 0)));
8225 goto associate;
8227 case RDIV_EXPR:
8228 /* Don't touch a floating-point divide by zero unless the mode
8229 of the constant can represent infinity. */
8230 if (TREE_CODE (arg1) == REAL_CST
8231 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8232 && real_zerop (arg1))
8233 return NULL_TREE;
8235 /* (-A) / (-B) -> A / B */
8236 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8237 return fold_build2 (RDIV_EXPR, type,
8238 TREE_OPERAND (arg0, 0),
8239 negate_expr (arg1));
8240 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8241 return fold_build2 (RDIV_EXPR, type,
8242 negate_expr (arg0),
8243 TREE_OPERAND (arg1, 0));
8245 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8246 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8247 && real_onep (arg1))
8248 return non_lvalue (fold_convert (type, arg0));
8250 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8251 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8252 && real_minus_onep (arg1))
8253 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8255 /* If ARG1 is a constant, we can convert this to a multiply by the
8256 reciprocal. This does not have the same rounding properties,
8257 so only do this if -funsafe-math-optimizations. We can actually
8258 always safely do it if ARG1 is a power of two, but it's hard to
8259 tell if it is or not in a portable manner. */
8260 if (TREE_CODE (arg1) == REAL_CST)
8262 if (flag_unsafe_math_optimizations
8263 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8264 arg1, 0)))
8265 return fold_build2 (MULT_EXPR, type, arg0, tem);
8266 /* Find the reciprocal if optimizing and the result is exact. */
8267 if (optimize)
8269 REAL_VALUE_TYPE r;
8270 r = TREE_REAL_CST (arg1);
8271 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8273 tem = build_real (type, r);
8274 return fold_build2 (MULT_EXPR, type, arg0, tem);
8278 /* Convert A/B/C to A/(B*C). */
8279 if (flag_unsafe_math_optimizations
8280 && TREE_CODE (arg0) == RDIV_EXPR)
8281 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8282 fold_build2 (MULT_EXPR, type,
8283 TREE_OPERAND (arg0, 1), arg1));
8285 /* Convert A/(B/C) to (A/B)*C. */
8286 if (flag_unsafe_math_optimizations
8287 && TREE_CODE (arg1) == RDIV_EXPR)
8288 return fold_build2 (MULT_EXPR, type,
8289 fold_build2 (RDIV_EXPR, type, arg0,
8290 TREE_OPERAND (arg1, 0)),
8291 TREE_OPERAND (arg1, 1));
8293 /* Convert C1/(X*C2) into (C1/C2)/X. */
8294 if (flag_unsafe_math_optimizations
8295 && TREE_CODE (arg1) == MULT_EXPR
8296 && TREE_CODE (arg0) == REAL_CST
8297 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8299 tree tem = const_binop (RDIV_EXPR, arg0,
8300 TREE_OPERAND (arg1, 1), 0);
8301 if (tem)
8302 return fold_build2 (RDIV_EXPR, type, tem,
8303 TREE_OPERAND (arg1, 0));
8306 if (TREE_CODE (type) == COMPLEX_TYPE)
8308 tem = fold_complex_div (type, arg0, arg1, code);
8309 if (tem)
8310 return tem;
8313 if (flag_unsafe_math_optimizations)
8315 enum built_in_function fcode = builtin_mathfn_code (arg1);
8316 /* Optimize x/expN(y) into x*expN(-y). */
8317 if (BUILTIN_EXPONENT_P (fcode))
8319 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8320 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8321 tree arglist = build_tree_list (NULL_TREE,
8322 fold_convert (type, arg));
8323 arg1 = build_function_call_expr (expfn, arglist);
8324 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8327 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8328 if (fcode == BUILT_IN_POW
8329 || fcode == BUILT_IN_POWF
8330 || fcode == BUILT_IN_POWL)
8332 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8333 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8334 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8335 tree neg11 = fold_convert (type, negate_expr (arg11));
8336 tree arglist = tree_cons(NULL_TREE, arg10,
8337 build_tree_list (NULL_TREE, neg11));
8338 arg1 = build_function_call_expr (powfn, arglist);
8339 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8343 if (flag_unsafe_math_optimizations)
8345 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8346 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8348 /* Optimize sin(x)/cos(x) as tan(x). */
8349 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8350 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8351 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8352 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8353 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8355 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8357 if (tanfn != NULL_TREE)
8358 return build_function_call_expr (tanfn,
8359 TREE_OPERAND (arg0, 1));
8362 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8363 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8364 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8365 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8366 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8367 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8369 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8371 if (tanfn != NULL_TREE)
8373 tree tmp = TREE_OPERAND (arg0, 1);
8374 tmp = build_function_call_expr (tanfn, tmp);
8375 return fold_build2 (RDIV_EXPR, type,
8376 build_real (type, dconst1), tmp);
8380 /* Optimize pow(x,c)/x as pow(x,c-1). */
8381 if (fcode0 == BUILT_IN_POW
8382 || fcode0 == BUILT_IN_POWF
8383 || fcode0 == BUILT_IN_POWL)
8385 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8386 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8387 if (TREE_CODE (arg01) == REAL_CST
8388 && ! TREE_CONSTANT_OVERFLOW (arg01)
8389 && operand_equal_p (arg1, arg00, 0))
8391 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8392 REAL_VALUE_TYPE c;
8393 tree arg, arglist;
8395 c = TREE_REAL_CST (arg01);
8396 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8397 arg = build_real (type, c);
8398 arglist = build_tree_list (NULL_TREE, arg);
8399 arglist = tree_cons (NULL_TREE, arg1, arglist);
8400 return build_function_call_expr (powfn, arglist);
8404 goto binary;
8406 case TRUNC_DIV_EXPR:
8407 case ROUND_DIV_EXPR:
8408 case FLOOR_DIV_EXPR:
8409 case CEIL_DIV_EXPR:
8410 case EXACT_DIV_EXPR:
8411 if (integer_onep (arg1))
8412 return non_lvalue (fold_convert (type, arg0));
8413 if (integer_zerop (arg1))
8414 return NULL_TREE;
8415 /* X / -1 is -X. */
8416 if (!TYPE_UNSIGNED (type)
8417 && TREE_CODE (arg1) == INTEGER_CST
8418 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8419 && TREE_INT_CST_HIGH (arg1) == -1)
8420 return fold_convert (type, negate_expr (arg0));
8422 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8423 operation, EXACT_DIV_EXPR.
8425 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8426 At one time others generated faster code, it's not clear if they do
8427 after the last round to changes to the DIV code in expmed.c. */
8428 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8429 && multiple_of_p (type, arg0, arg1))
8430 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8432 if (TREE_CODE (arg1) == INTEGER_CST
8433 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8434 return fold_convert (type, tem);
8436 if (TREE_CODE (type) == COMPLEX_TYPE)
8438 tem = fold_complex_div (type, arg0, arg1, code);
8439 if (tem)
8440 return tem;
8442 goto binary;
8444 case CEIL_MOD_EXPR:
8445 case FLOOR_MOD_EXPR:
8446 case ROUND_MOD_EXPR:
8447 case TRUNC_MOD_EXPR:
8448 /* X % 1 is always zero, but be sure to preserve any side
8449 effects in X. */
8450 if (integer_onep (arg1))
8451 return omit_one_operand (type, integer_zero_node, arg0);
8453 /* X % 0, return X % 0 unchanged so that we can get the
8454 proper warnings and errors. */
8455 if (integer_zerop (arg1))
8456 return NULL_TREE;
8458 /* 0 % X is always zero, but be sure to preserve any side
8459 effects in X. Place this after checking for X == 0. */
8460 if (integer_zerop (arg0))
8461 return omit_one_operand (type, integer_zero_node, arg1);
8463 /* X % -1 is zero. */
8464 if (!TYPE_UNSIGNED (type)
8465 && TREE_CODE (arg1) == INTEGER_CST
8466 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8467 && TREE_INT_CST_HIGH (arg1) == -1)
8468 return omit_one_operand (type, integer_zero_node, arg0);
8470 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8471 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8472 if (code == TRUNC_MOD_EXPR
8473 && TYPE_UNSIGNED (type)
8474 && integer_pow2p (arg1))
8476 unsigned HOST_WIDE_INT high, low;
8477 tree mask;
8478 int l;
8480 l = tree_log2 (arg1);
8481 if (l >= HOST_BITS_PER_WIDE_INT)
8483 high = ((unsigned HOST_WIDE_INT) 1
8484 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8485 low = -1;
8487 else
8489 high = 0;
8490 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8493 mask = build_int_cst_wide (type, low, high);
8494 return fold_build2 (BIT_AND_EXPR, type,
8495 fold_convert (type, arg0), mask);
8498 /* X % -C is the same as X % C. */
8499 if (code == TRUNC_MOD_EXPR
8500 && !TYPE_UNSIGNED (type)
8501 && TREE_CODE (arg1) == INTEGER_CST
8502 && TREE_INT_CST_HIGH (arg1) < 0
8503 && !flag_trapv
8504 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8505 && !sign_bit_p (arg1, arg1))
8506 return fold_build2 (code, type, fold_convert (type, arg0),
8507 fold_convert (type, negate_expr (arg1)));
8509 /* X % -Y is the same as X % Y. */
8510 if (code == TRUNC_MOD_EXPR
8511 && !TYPE_UNSIGNED (type)
8512 && TREE_CODE (arg1) == NEGATE_EXPR
8513 && !flag_trapv)
8514 return fold_build2 (code, type, fold_convert (type, arg0),
8515 fold_convert (type, TREE_OPERAND (arg1, 0)));
8517 if (TREE_CODE (arg1) == INTEGER_CST
8518 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8519 return fold_convert (type, tem);
8521 goto binary;
8523 case LROTATE_EXPR:
8524 case RROTATE_EXPR:
8525 if (integer_all_onesp (arg0))
8526 return omit_one_operand (type, arg0, arg1);
8527 goto shift;
8529 case RSHIFT_EXPR:
8530 /* Optimize -1 >> x for arithmetic right shifts. */
8531 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8532 return omit_one_operand (type, arg0, arg1);
8533 /* ... fall through ... */
8535 case LSHIFT_EXPR:
8536 shift:
8537 if (integer_zerop (arg1))
8538 return non_lvalue (fold_convert (type, arg0));
8539 if (integer_zerop (arg0))
8540 return omit_one_operand (type, arg0, arg1);
8542 /* Since negative shift count is not well-defined,
8543 don't try to compute it in the compiler. */
8544 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8545 return NULL_TREE;
8546 /* Rewrite an LROTATE_EXPR by a constant into an
8547 RROTATE_EXPR by a new constant. */
8548 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8550 tree tem = build_int_cst (NULL_TREE,
8551 GET_MODE_BITSIZE (TYPE_MODE (type)));
8552 tem = fold_convert (TREE_TYPE (arg1), tem);
8553 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8554 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8557 /* If we have a rotate of a bit operation with the rotate count and
8558 the second operand of the bit operation both constant,
8559 permute the two operations. */
8560 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8561 && (TREE_CODE (arg0) == BIT_AND_EXPR
8562 || TREE_CODE (arg0) == BIT_IOR_EXPR
8563 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8564 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8565 return fold_build2 (TREE_CODE (arg0), type,
8566 fold_build2 (code, type,
8567 TREE_OPERAND (arg0, 0), arg1),
8568 fold_build2 (code, type,
8569 TREE_OPERAND (arg0, 1), arg1));
8571 /* Two consecutive rotates adding up to the width of the mode can
8572 be ignored. */
8573 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8574 && TREE_CODE (arg0) == RROTATE_EXPR
8575 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8576 && TREE_INT_CST_HIGH (arg1) == 0
8577 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8578 && ((TREE_INT_CST_LOW (arg1)
8579 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8580 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8581 return TREE_OPERAND (arg0, 0);
8583 goto binary;
8585 case MIN_EXPR:
8586 if (operand_equal_p (arg0, arg1, 0))
8587 return omit_one_operand (type, arg0, arg1);
8588 if (INTEGRAL_TYPE_P (type)
8589 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8590 return omit_one_operand (type, arg1, arg0);
8591 goto associate;
8593 case MAX_EXPR:
8594 if (operand_equal_p (arg0, arg1, 0))
8595 return omit_one_operand (type, arg0, arg1);
8596 if (INTEGRAL_TYPE_P (type)
8597 && TYPE_MAX_VALUE (type)
8598 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8599 return omit_one_operand (type, arg1, arg0);
8600 goto associate;
8602 case TRUTH_ANDIF_EXPR:
8603 /* Note that the operands of this must be ints
8604 and their values must be 0 or 1.
8605 ("true" is a fixed value perhaps depending on the language.) */
8606 /* If first arg is constant zero, return it. */
8607 if (integer_zerop (arg0))
8608 return fold_convert (type, arg0);
8609 case TRUTH_AND_EXPR:
8610 /* If either arg is constant true, drop it. */
8611 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8612 return non_lvalue (fold_convert (type, arg1));
8613 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8614 /* Preserve sequence points. */
8615 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8616 return non_lvalue (fold_convert (type, arg0));
8617 /* If second arg is constant zero, result is zero, but first arg
8618 must be evaluated. */
8619 if (integer_zerop (arg1))
8620 return omit_one_operand (type, arg1, arg0);
8621 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8622 case will be handled here. */
8623 if (integer_zerop (arg0))
8624 return omit_one_operand (type, arg0, arg1);
8626 /* !X && X is always false. */
8627 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8628 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8629 return omit_one_operand (type, integer_zero_node, arg1);
8630 /* X && !X is always false. */
8631 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8632 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8633 return omit_one_operand (type, integer_zero_node, arg0);
8635 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8636 means A >= Y && A != MAX, but in this case we know that
8637 A < X <= MAX. */
8639 if (!TREE_SIDE_EFFECTS (arg0)
8640 && !TREE_SIDE_EFFECTS (arg1))
8642 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8643 if (tem)
8644 return fold_build2 (code, type, tem, arg1);
8646 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8647 if (tem)
8648 return fold_build2 (code, type, arg0, tem);
8651 truth_andor:
8652 /* We only do these simplifications if we are optimizing. */
8653 if (!optimize)
8654 return NULL_TREE;
8656 /* Check for things like (A || B) && (A || C). We can convert this
8657 to A || (B && C). Note that either operator can be any of the four
8658 truth and/or operations and the transformation will still be
8659 valid. Also note that we only care about order for the
8660 ANDIF and ORIF operators. If B contains side effects, this
8661 might change the truth-value of A. */
8662 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8663 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8664 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8665 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8666 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8667 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8669 tree a00 = TREE_OPERAND (arg0, 0);
8670 tree a01 = TREE_OPERAND (arg0, 1);
8671 tree a10 = TREE_OPERAND (arg1, 0);
8672 tree a11 = TREE_OPERAND (arg1, 1);
8673 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8674 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8675 && (code == TRUTH_AND_EXPR
8676 || code == TRUTH_OR_EXPR));
8678 if (operand_equal_p (a00, a10, 0))
8679 return fold_build2 (TREE_CODE (arg0), type, a00,
8680 fold_build2 (code, type, a01, a11));
8681 else if (commutative && operand_equal_p (a00, a11, 0))
8682 return fold_build2 (TREE_CODE (arg0), type, a00,
8683 fold_build2 (code, type, a01, a10));
8684 else if (commutative && operand_equal_p (a01, a10, 0))
8685 return fold_build2 (TREE_CODE (arg0), type, a01,
8686 fold_build2 (code, type, a00, a11));
8688 /* This case if tricky because we must either have commutative
8689 operators or else A10 must not have side-effects. */
8691 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8692 && operand_equal_p (a01, a11, 0))
8693 return fold_build2 (TREE_CODE (arg0), type,
8694 fold_build2 (code, type, a00, a10),
8695 a01);
8698 /* See if we can build a range comparison. */
8699 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8700 return tem;
8702 /* Check for the possibility of merging component references. If our
8703 lhs is another similar operation, try to merge its rhs with our
8704 rhs. Then try to merge our lhs and rhs. */
8705 if (TREE_CODE (arg0) == code
8706 && 0 != (tem = fold_truthop (code, type,
8707 TREE_OPERAND (arg0, 1), arg1)))
8708 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8710 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8711 return tem;
8713 return NULL_TREE;
8715 case TRUTH_ORIF_EXPR:
8716 /* Note that the operands of this must be ints
8717 and their values must be 0 or true.
8718 ("true" is a fixed value perhaps depending on the language.) */
8719 /* If first arg is constant true, return it. */
8720 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8721 return fold_convert (type, arg0);
8722 case TRUTH_OR_EXPR:
8723 /* If either arg is constant zero, drop it. */
8724 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8725 return non_lvalue (fold_convert (type, arg1));
8726 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8727 /* Preserve sequence points. */
8728 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8729 return non_lvalue (fold_convert (type, arg0));
8730 /* If second arg is constant true, result is true, but we must
8731 evaluate first arg. */
8732 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8733 return omit_one_operand (type, arg1, arg0);
8734 /* Likewise for first arg, but note this only occurs here for
8735 TRUTH_OR_EXPR. */
8736 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8737 return omit_one_operand (type, arg0, arg1);
8739 /* !X || X is always true. */
8740 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8741 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8742 return omit_one_operand (type, integer_one_node, arg1);
8743 /* X || !X is always true. */
8744 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8745 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8746 return omit_one_operand (type, integer_one_node, arg0);
8748 goto truth_andor;
8750 case TRUTH_XOR_EXPR:
8751 /* If the second arg is constant zero, drop it. */
8752 if (integer_zerop (arg1))
8753 return non_lvalue (fold_convert (type, arg0));
8754 /* If the second arg is constant true, this is a logical inversion. */
8755 if (integer_onep (arg1))
8757 /* Only call invert_truthvalue if operand is a truth value. */
8758 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8759 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8760 else
8761 tem = invert_truthvalue (arg0);
8762 return non_lvalue (fold_convert (type, tem));
8764 /* Identical arguments cancel to zero. */
8765 if (operand_equal_p (arg0, arg1, 0))
8766 return omit_one_operand (type, integer_zero_node, arg0);
8768 /* !X ^ X is always true. */
8769 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8770 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8771 return omit_one_operand (type, integer_one_node, arg1);
8773 /* X ^ !X is always true. */
8774 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8775 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8776 return omit_one_operand (type, integer_one_node, arg0);
8778 return NULL_TREE;
8780 case EQ_EXPR:
8781 case NE_EXPR:
8782 case LT_EXPR:
8783 case GT_EXPR:
8784 case LE_EXPR:
8785 case GE_EXPR:
8786 /* If one arg is a real or integer constant, put it last. */
8787 if (tree_swap_operands_p (arg0, arg1, true))
8788 return fold_build2 (swap_tree_comparison (code), type, arg1, arg0);
8790 /* If this is an equality comparison of the address of a non-weak
8791 object against zero, then we know the result. */
8792 if ((code == EQ_EXPR || code == NE_EXPR)
8793 && TREE_CODE (arg0) == ADDR_EXPR
8794 && DECL_P (TREE_OPERAND (arg0, 0))
8795 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8796 && integer_zerop (arg1))
8797 return constant_boolean_node (code != EQ_EXPR, type);
8799 /* If this is an equality comparison of the address of two non-weak,
8800 unaliased symbols neither of which are extern (since we do not
8801 have access to attributes for externs), then we know the result. */
8802 if ((code == EQ_EXPR || code == NE_EXPR)
8803 && TREE_CODE (arg0) == ADDR_EXPR
8804 && DECL_P (TREE_OPERAND (arg0, 0))
8805 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8806 && ! lookup_attribute ("alias",
8807 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8808 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8809 && TREE_CODE (arg1) == ADDR_EXPR
8810 && DECL_P (TREE_OPERAND (arg1, 0))
8811 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8812 && ! lookup_attribute ("alias",
8813 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8814 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8815 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8816 ? code == EQ_EXPR : code != EQ_EXPR,
8817 type);
8819 /* If this is a comparison of two exprs that look like an
8820 ARRAY_REF of the same object, then we can fold this to a
8821 comparison of the two offsets. */
8822 if (TREE_CODE_CLASS (code) == tcc_comparison)
8824 tree base0, offset0, base1, offset1;
8826 if (extract_array_ref (arg0, &base0, &offset0)
8827 && extract_array_ref (arg1, &base1, &offset1)
8828 && operand_equal_p (base0, base1, 0))
8830 if (offset0 == NULL_TREE
8831 && offset1 == NULL_TREE)
8833 offset0 = integer_zero_node;
8834 offset1 = integer_zero_node;
8836 else if (offset0 == NULL_TREE)
8837 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8838 else if (offset1 == NULL_TREE)
8839 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8841 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8842 return fold_build2 (code, type, offset0, offset1);
8846 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8848 tree targ0 = strip_float_extensions (arg0);
8849 tree targ1 = strip_float_extensions (arg1);
8850 tree newtype = TREE_TYPE (targ0);
8852 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8853 newtype = TREE_TYPE (targ1);
8855 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8856 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8857 return fold_build2 (code, type, fold_convert (newtype, targ0),
8858 fold_convert (newtype, targ1));
8860 /* (-a) CMP (-b) -> b CMP a */
8861 if (TREE_CODE (arg0) == NEGATE_EXPR
8862 && TREE_CODE (arg1) == NEGATE_EXPR)
8863 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8864 TREE_OPERAND (arg0, 0));
8866 if (TREE_CODE (arg1) == REAL_CST)
8868 REAL_VALUE_TYPE cst;
8869 cst = TREE_REAL_CST (arg1);
8871 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8872 if (TREE_CODE (arg0) == NEGATE_EXPR)
8873 return
8874 fold_build2 (swap_tree_comparison (code), type,
8875 TREE_OPERAND (arg0, 0),
8876 build_real (TREE_TYPE (arg1),
8877 REAL_VALUE_NEGATE (cst)));
8879 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8880 /* a CMP (-0) -> a CMP 0 */
8881 if (REAL_VALUE_MINUS_ZERO (cst))
8882 return fold_build2 (code, type, arg0,
8883 build_real (TREE_TYPE (arg1), dconst0));
8885 /* x != NaN is always true, other ops are always false. */
8886 if (REAL_VALUE_ISNAN (cst)
8887 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8889 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8890 return omit_one_operand (type, tem, arg0);
8893 /* Fold comparisons against infinity. */
8894 if (REAL_VALUE_ISINF (cst))
8896 tem = fold_inf_compare (code, type, arg0, arg1);
8897 if (tem != NULL_TREE)
8898 return tem;
8902 /* If this is a comparison of a real constant with a PLUS_EXPR
8903 or a MINUS_EXPR of a real constant, we can convert it into a
8904 comparison with a revised real constant as long as no overflow
8905 occurs when unsafe_math_optimizations are enabled. */
8906 if (flag_unsafe_math_optimizations
8907 && TREE_CODE (arg1) == REAL_CST
8908 && (TREE_CODE (arg0) == PLUS_EXPR
8909 || TREE_CODE (arg0) == MINUS_EXPR)
8910 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8911 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8912 ? MINUS_EXPR : PLUS_EXPR,
8913 arg1, TREE_OPERAND (arg0, 1), 0))
8914 && ! TREE_CONSTANT_OVERFLOW (tem))
8915 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8917 /* Likewise, we can simplify a comparison of a real constant with
8918 a MINUS_EXPR whose first operand is also a real constant, i.e.
8919 (c1 - x) < c2 becomes x > c1-c2. */
8920 if (flag_unsafe_math_optimizations
8921 && TREE_CODE (arg1) == REAL_CST
8922 && TREE_CODE (arg0) == MINUS_EXPR
8923 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8924 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8925 arg1, 0))
8926 && ! TREE_CONSTANT_OVERFLOW (tem))
8927 return fold_build2 (swap_tree_comparison (code), type,
8928 TREE_OPERAND (arg0, 1), tem);
8930 /* Fold comparisons against built-in math functions. */
8931 if (TREE_CODE (arg1) == REAL_CST
8932 && flag_unsafe_math_optimizations
8933 && ! flag_errno_math)
8935 enum built_in_function fcode = builtin_mathfn_code (arg0);
8937 if (fcode != END_BUILTINS)
8939 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8940 if (tem != NULL_TREE)
8941 return tem;
8946 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8947 if (TREE_CONSTANT (arg1)
8948 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8949 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8950 /* This optimization is invalid for ordered comparisons
8951 if CONST+INCR overflows or if foo+incr might overflow.
8952 This optimization is invalid for floating point due to rounding.
8953 For pointer types we assume overflow doesn't happen. */
8954 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8955 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8956 && (code == EQ_EXPR || code == NE_EXPR))))
8958 tree varop, newconst;
8960 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8962 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8963 arg1, TREE_OPERAND (arg0, 1));
8964 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8965 TREE_OPERAND (arg0, 0),
8966 TREE_OPERAND (arg0, 1));
8968 else
8970 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8971 arg1, TREE_OPERAND (arg0, 1));
8972 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8973 TREE_OPERAND (arg0, 0),
8974 TREE_OPERAND (arg0, 1));
8978 /* If VAROP is a reference to a bitfield, we must mask
8979 the constant by the width of the field. */
8980 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8981 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8982 && host_integerp (DECL_SIZE (TREE_OPERAND
8983 (TREE_OPERAND (varop, 0), 1)), 1))
8985 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8986 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8987 tree folded_compare, shift;
8989 /* First check whether the comparison would come out
8990 always the same. If we don't do that we would
8991 change the meaning with the masking. */
8992 folded_compare = fold_build2 (code, type,
8993 TREE_OPERAND (varop, 0), arg1);
8994 if (integer_zerop (folded_compare)
8995 || integer_onep (folded_compare))
8996 return omit_one_operand (type, folded_compare, varop);
8998 shift = build_int_cst (NULL_TREE,
8999 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9000 shift = fold_convert (TREE_TYPE (varop), shift);
9001 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9002 newconst, shift);
9003 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9004 newconst, shift);
9007 return fold_build2 (code, type, varop, newconst);
9010 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9011 This transformation affects the cases which are handled in later
9012 optimizations involving comparisons with non-negative constants. */
9013 if (TREE_CODE (arg1) == INTEGER_CST
9014 && TREE_CODE (arg0) != INTEGER_CST
9015 && tree_int_cst_sgn (arg1) > 0)
9017 switch (code)
9019 case GE_EXPR:
9020 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9021 return fold_build2 (GT_EXPR, type, arg0, arg1);
9023 case LT_EXPR:
9024 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9025 return fold_build2 (LE_EXPR, type, arg0, arg1);
9027 default:
9028 break;
9032 /* Comparisons with the highest or lowest possible integer of
9033 the specified size will have known values.
9035 This is quite similar to fold_relational_hi_lo, however,
9036 attempts to share the code have been nothing but trouble. */
9038 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9040 if (TREE_CODE (arg1) == INTEGER_CST
9041 && ! TREE_CONSTANT_OVERFLOW (arg1)
9042 && width <= 2 * HOST_BITS_PER_WIDE_INT
9043 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9044 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9046 HOST_WIDE_INT signed_max_hi;
9047 unsigned HOST_WIDE_INT signed_max_lo;
9048 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9050 if (width <= HOST_BITS_PER_WIDE_INT)
9052 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9053 - 1;
9054 signed_max_hi = 0;
9055 max_hi = 0;
9057 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9059 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9060 min_lo = 0;
9061 min_hi = 0;
9063 else
9065 max_lo = signed_max_lo;
9066 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9067 min_hi = -1;
9070 else
9072 width -= HOST_BITS_PER_WIDE_INT;
9073 signed_max_lo = -1;
9074 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9075 - 1;
9076 max_lo = -1;
9077 min_lo = 0;
9079 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9081 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9082 min_hi = 0;
9084 else
9086 max_hi = signed_max_hi;
9087 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9091 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9092 && TREE_INT_CST_LOW (arg1) == max_lo)
9093 switch (code)
9095 case GT_EXPR:
9096 return omit_one_operand (type, integer_zero_node, arg0);
9098 case GE_EXPR:
9099 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9101 case LE_EXPR:
9102 return omit_one_operand (type, integer_one_node, arg0);
9104 case LT_EXPR:
9105 return fold_build2 (NE_EXPR, type, arg0, arg1);
9107 /* The GE_EXPR and LT_EXPR cases above are not normally
9108 reached because of previous transformations. */
9110 default:
9111 break;
9113 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9114 == max_hi
9115 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9116 switch (code)
9118 case GT_EXPR:
9119 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9120 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9121 case LE_EXPR:
9122 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9123 return fold_build2 (NE_EXPR, type, arg0, arg1);
9124 default:
9125 break;
9127 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9128 == min_hi
9129 && TREE_INT_CST_LOW (arg1) == min_lo)
9130 switch (code)
9132 case LT_EXPR:
9133 return omit_one_operand (type, integer_zero_node, arg0);
9135 case LE_EXPR:
9136 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9138 case GE_EXPR:
9139 return omit_one_operand (type, integer_one_node, arg0);
9141 case GT_EXPR:
9142 return fold_build2 (NE_EXPR, type, arg0, arg1);
9144 default:
9145 break;
9147 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9148 == min_hi
9149 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9150 switch (code)
9152 case GE_EXPR:
9153 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9154 return fold_build2 (NE_EXPR, type, arg0, arg1);
9155 case LT_EXPR:
9156 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9157 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9158 default:
9159 break;
9162 else if (!in_gimple_form
9163 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9164 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9165 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9166 /* signed_type does not work on pointer types. */
9167 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9169 /* The following case also applies to X < signed_max+1
9170 and X >= signed_max+1 because previous transformations. */
9171 if (code == LE_EXPR || code == GT_EXPR)
9173 tree st0, st1;
9174 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9175 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9176 return fold
9177 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9178 type, fold_convert (st0, arg0),
9179 fold_convert (st1, integer_zero_node)));
9185 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9186 a MINUS_EXPR of a constant, we can convert it into a comparison with
9187 a revised constant as long as no overflow occurs. */
9188 if ((code == EQ_EXPR || code == NE_EXPR)
9189 && TREE_CODE (arg1) == INTEGER_CST
9190 && (TREE_CODE (arg0) == PLUS_EXPR
9191 || TREE_CODE (arg0) == MINUS_EXPR)
9192 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9193 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9194 ? MINUS_EXPR : PLUS_EXPR,
9195 arg1, TREE_OPERAND (arg0, 1), 0))
9196 && ! TREE_CONSTANT_OVERFLOW (tem))
9197 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9199 /* Similarly for a NEGATE_EXPR. */
9200 else if ((code == EQ_EXPR || code == NE_EXPR)
9201 && TREE_CODE (arg0) == NEGATE_EXPR
9202 && TREE_CODE (arg1) == INTEGER_CST
9203 && 0 != (tem = negate_expr (arg1))
9204 && TREE_CODE (tem) == INTEGER_CST
9205 && ! TREE_CONSTANT_OVERFLOW (tem))
9206 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9208 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9209 for !=. Don't do this for ordered comparisons due to overflow. */
9210 else if ((code == NE_EXPR || code == EQ_EXPR)
9211 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9212 return fold_build2 (code, type,
9213 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9215 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9216 && (TREE_CODE (arg0) == NOP_EXPR
9217 || TREE_CODE (arg0) == CONVERT_EXPR))
9219 /* If we are widening one operand of an integer comparison,
9220 see if the other operand is similarly being widened. Perhaps we
9221 can do the comparison in the narrower type. */
9222 tem = fold_widened_comparison (code, type, arg0, arg1);
9223 if (tem)
9224 return tem;
9226 /* Or if we are changing signedness. */
9227 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9228 if (tem)
9229 return tem;
9232 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9233 constant, we can simplify it. */
9234 else if (TREE_CODE (arg1) == INTEGER_CST
9235 && (TREE_CODE (arg0) == MIN_EXPR
9236 || TREE_CODE (arg0) == MAX_EXPR)
9237 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9239 tem = optimize_minmax_comparison (code, type, op0, op1);
9240 if (tem)
9241 return tem;
9243 return NULL_TREE;
9246 /* If we are comparing an ABS_EXPR with a constant, we can
9247 convert all the cases into explicit comparisons, but they may
9248 well not be faster than doing the ABS and one comparison.
9249 But ABS (X) <= C is a range comparison, which becomes a subtraction
9250 and a comparison, and is probably faster. */
9251 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9252 && TREE_CODE (arg0) == ABS_EXPR
9253 && ! TREE_SIDE_EFFECTS (arg0)
9254 && (0 != (tem = negate_expr (arg1)))
9255 && TREE_CODE (tem) == INTEGER_CST
9256 && ! TREE_CONSTANT_OVERFLOW (tem))
9257 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9258 build2 (GE_EXPR, type,
9259 TREE_OPERAND (arg0, 0), tem),
9260 build2 (LE_EXPR, type,
9261 TREE_OPERAND (arg0, 0), arg1));
9263 /* Convert ABS_EXPR<x> >= 0 to true. */
9264 else if (code == GE_EXPR
9265 && tree_expr_nonnegative_p (arg0)
9266 && (integer_zerop (arg1)
9267 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9268 && real_zerop (arg1))))
9269 return omit_one_operand (type, integer_one_node, arg0);
9271 /* Convert ABS_EXPR<x> < 0 to false. */
9272 else if (code == LT_EXPR
9273 && tree_expr_nonnegative_p (arg0)
9274 && (integer_zerop (arg1) || real_zerop (arg1)))
9275 return omit_one_operand (type, integer_zero_node, arg0);
9277 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9278 else if ((code == EQ_EXPR || code == NE_EXPR)
9279 && TREE_CODE (arg0) == ABS_EXPR
9280 && (integer_zerop (arg1) || real_zerop (arg1)))
9281 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9283 /* If this is an EQ or NE comparison with zero and ARG0 is
9284 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9285 two operations, but the latter can be done in one less insn
9286 on machines that have only two-operand insns or on which a
9287 constant cannot be the first operand. */
9288 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9289 && TREE_CODE (arg0) == BIT_AND_EXPR)
9291 tree arg00 = TREE_OPERAND (arg0, 0);
9292 tree arg01 = TREE_OPERAND (arg0, 1);
9293 if (TREE_CODE (arg00) == LSHIFT_EXPR
9294 && integer_onep (TREE_OPERAND (arg00, 0)))
9295 return
9296 fold_build2 (code, type,
9297 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9298 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9299 arg01, TREE_OPERAND (arg00, 1)),
9300 fold_convert (TREE_TYPE (arg0),
9301 integer_one_node)),
9302 arg1);
9303 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9304 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9305 return
9306 fold_build2 (code, type,
9307 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9308 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9309 arg00, TREE_OPERAND (arg01, 1)),
9310 fold_convert (TREE_TYPE (arg0),
9311 integer_one_node)),
9312 arg1);
9315 /* If this is an NE or EQ comparison of zero against the result of a
9316 signed MOD operation whose second operand is a power of 2, make
9317 the MOD operation unsigned since it is simpler and equivalent. */
9318 if ((code == NE_EXPR || code == EQ_EXPR)
9319 && integer_zerop (arg1)
9320 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9321 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9322 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9323 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9324 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9325 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9327 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9328 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9329 fold_convert (newtype,
9330 TREE_OPERAND (arg0, 0)),
9331 fold_convert (newtype,
9332 TREE_OPERAND (arg0, 1)));
9334 return fold_build2 (code, type, newmod,
9335 fold_convert (newtype, arg1));
9338 /* If this is an NE comparison of zero with an AND of one, remove the
9339 comparison since the AND will give the correct value. */
9340 if (code == NE_EXPR && integer_zerop (arg1)
9341 && TREE_CODE (arg0) == BIT_AND_EXPR
9342 && integer_onep (TREE_OPERAND (arg0, 1)))
9343 return fold_convert (type, arg0);
9345 /* If we have (A & C) == C where C is a power of 2, convert this into
9346 (A & C) != 0. Similarly for NE_EXPR. */
9347 if ((code == EQ_EXPR || code == NE_EXPR)
9348 && TREE_CODE (arg0) == BIT_AND_EXPR
9349 && integer_pow2p (TREE_OPERAND (arg0, 1))
9350 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9351 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9352 arg0, fold_convert (TREE_TYPE (arg0),
9353 integer_zero_node));
9355 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9356 2, then fold the expression into shifts and logical operations. */
9357 tem = fold_single_bit_test (code, arg0, arg1, type);
9358 if (tem)
9359 return tem;
9361 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9362 Similarly for NE_EXPR. */
9363 if ((code == EQ_EXPR || code == NE_EXPR)
9364 && TREE_CODE (arg0) == BIT_AND_EXPR
9365 && TREE_CODE (arg1) == INTEGER_CST
9366 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9368 tree notc = fold_build1 (BIT_NOT_EXPR,
9369 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9370 TREE_OPERAND (arg0, 1));
9371 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9372 arg1, notc);
9373 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9374 if (integer_nonzerop (dandnotc))
9375 return omit_one_operand (type, rslt, arg0);
9378 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9379 Similarly for NE_EXPR. */
9380 if ((code == EQ_EXPR || code == NE_EXPR)
9381 && TREE_CODE (arg0) == BIT_IOR_EXPR
9382 && TREE_CODE (arg1) == INTEGER_CST
9383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9385 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9386 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9387 TREE_OPERAND (arg0, 1), notd);
9388 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9389 if (integer_nonzerop (candnotd))
9390 return omit_one_operand (type, rslt, arg0);
9393 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9394 and similarly for >= into !=. */
9395 if ((code == LT_EXPR || code == GE_EXPR)
9396 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9397 && TREE_CODE (arg1) == LSHIFT_EXPR
9398 && integer_onep (TREE_OPERAND (arg1, 0)))
9399 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9400 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9401 TREE_OPERAND (arg1, 1)),
9402 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9404 else if ((code == LT_EXPR || code == GE_EXPR)
9405 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9406 && (TREE_CODE (arg1) == NOP_EXPR
9407 || TREE_CODE (arg1) == CONVERT_EXPR)
9408 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9409 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9410 return
9411 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9412 fold_convert (TREE_TYPE (arg0),
9413 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9414 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9415 1))),
9416 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9418 /* Simplify comparison of something with itself. (For IEEE
9419 floating-point, we can only do some of these simplifications.) */
9420 if (operand_equal_p (arg0, arg1, 0))
9422 switch (code)
9424 case EQ_EXPR:
9425 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9426 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9427 return constant_boolean_node (1, type);
9428 break;
9430 case GE_EXPR:
9431 case LE_EXPR:
9432 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9433 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9434 return constant_boolean_node (1, type);
9435 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9437 case NE_EXPR:
9438 /* For NE, we can only do this simplification if integer
9439 or we don't honor IEEE floating point NaNs. */
9440 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9441 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9442 break;
9443 /* ... fall through ... */
9444 case GT_EXPR:
9445 case LT_EXPR:
9446 return constant_boolean_node (0, type);
9447 default:
9448 gcc_unreachable ();
9452 /* If we are comparing an expression that just has comparisons
9453 of two integer values, arithmetic expressions of those comparisons,
9454 and constants, we can simplify it. There are only three cases
9455 to check: the two values can either be equal, the first can be
9456 greater, or the second can be greater. Fold the expression for
9457 those three values. Since each value must be 0 or 1, we have
9458 eight possibilities, each of which corresponds to the constant 0
9459 or 1 or one of the six possible comparisons.
9461 This handles common cases like (a > b) == 0 but also handles
9462 expressions like ((x > y) - (y > x)) > 0, which supposedly
9463 occur in macroized code. */
9465 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9467 tree cval1 = 0, cval2 = 0;
9468 int save_p = 0;
9470 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9471 /* Don't handle degenerate cases here; they should already
9472 have been handled anyway. */
9473 && cval1 != 0 && cval2 != 0
9474 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9475 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9476 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9477 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9478 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9479 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9480 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9482 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9483 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9485 /* We can't just pass T to eval_subst in case cval1 or cval2
9486 was the same as ARG1. */
9488 tree high_result
9489 = fold_build2 (code, type,
9490 eval_subst (arg0, cval1, maxval,
9491 cval2, minval),
9492 arg1);
9493 tree equal_result
9494 = fold_build2 (code, type,
9495 eval_subst (arg0, cval1, maxval,
9496 cval2, maxval),
9497 arg1);
9498 tree low_result
9499 = fold_build2 (code, type,
9500 eval_subst (arg0, cval1, minval,
9501 cval2, maxval),
9502 arg1);
9504 /* All three of these results should be 0 or 1. Confirm they
9505 are. Then use those values to select the proper code
9506 to use. */
9508 if ((integer_zerop (high_result)
9509 || integer_onep (high_result))
9510 && (integer_zerop (equal_result)
9511 || integer_onep (equal_result))
9512 && (integer_zerop (low_result)
9513 || integer_onep (low_result)))
9515 /* Make a 3-bit mask with the high-order bit being the
9516 value for `>', the next for '=', and the low for '<'. */
9517 switch ((integer_onep (high_result) * 4)
9518 + (integer_onep (equal_result) * 2)
9519 + integer_onep (low_result))
9521 case 0:
9522 /* Always false. */
9523 return omit_one_operand (type, integer_zero_node, arg0);
9524 case 1:
9525 code = LT_EXPR;
9526 break;
9527 case 2:
9528 code = EQ_EXPR;
9529 break;
9530 case 3:
9531 code = LE_EXPR;
9532 break;
9533 case 4:
9534 code = GT_EXPR;
9535 break;
9536 case 5:
9537 code = NE_EXPR;
9538 break;
9539 case 6:
9540 code = GE_EXPR;
9541 break;
9542 case 7:
9543 /* Always true. */
9544 return omit_one_operand (type, integer_one_node, arg0);
9547 tem = build2 (code, type, cval1, cval2);
9548 if (save_p)
9549 return save_expr (tem);
9550 else
9551 return fold (tem);
9556 /* If this is a comparison of a field, we may be able to simplify it. */
9557 if (((TREE_CODE (arg0) == COMPONENT_REF
9558 && lang_hooks.can_use_bit_fields_p ())
9559 || TREE_CODE (arg0) == BIT_FIELD_REF)
9560 && (code == EQ_EXPR || code == NE_EXPR)
9561 /* Handle the constant case even without -O
9562 to make sure the warnings are given. */
9563 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9565 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9566 if (t1)
9567 return t1;
9570 /* If this is a comparison of complex values and either or both sides
9571 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9572 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9573 This may prevent needless evaluations. */
9574 if ((code == EQ_EXPR || code == NE_EXPR)
9575 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9576 && (TREE_CODE (arg0) == COMPLEX_EXPR
9577 || TREE_CODE (arg1) == COMPLEX_EXPR
9578 || TREE_CODE (arg0) == COMPLEX_CST
9579 || TREE_CODE (arg1) == COMPLEX_CST))
9581 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9582 tree real0, imag0, real1, imag1;
9584 arg0 = save_expr (arg0);
9585 arg1 = save_expr (arg1);
9586 real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
9587 imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
9588 real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
9589 imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
9591 return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9592 : TRUTH_ORIF_EXPR),
9593 type,
9594 fold_build2 (code, type, real0, real1),
9595 fold_build2 (code, type, imag0, imag1));
9598 /* Optimize comparisons of strlen vs zero to a compare of the
9599 first character of the string vs zero. To wit,
9600 strlen(ptr) == 0 => *ptr == 0
9601 strlen(ptr) != 0 => *ptr != 0
9602 Other cases should reduce to one of these two (or a constant)
9603 due to the return value of strlen being unsigned. */
9604 if ((code == EQ_EXPR || code == NE_EXPR)
9605 && integer_zerop (arg1)
9606 && TREE_CODE (arg0) == CALL_EXPR)
9608 tree fndecl = get_callee_fndecl (arg0);
9609 tree arglist;
9611 if (fndecl
9612 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9613 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9614 && (arglist = TREE_OPERAND (arg0, 1))
9615 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9616 && ! TREE_CHAIN (arglist))
9617 return fold_build2 (code, type,
9618 build1 (INDIRECT_REF, char_type_node,
9619 TREE_VALUE (arglist)),
9620 fold_convert (char_type_node,
9621 integer_zero_node));
9624 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9625 into a single range test. */
9626 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9627 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9628 && TREE_CODE (arg1) == INTEGER_CST
9629 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9630 && !integer_zerop (TREE_OPERAND (arg0, 1))
9631 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9632 && !TREE_OVERFLOW (arg1))
9634 t1 = fold_div_compare (code, type, arg0, arg1);
9635 if (t1 != NULL_TREE)
9636 return t1;
9639 if ((code == EQ_EXPR || code == NE_EXPR)
9640 && !TREE_SIDE_EFFECTS (arg0)
9641 && integer_zerop (arg1)
9642 && tree_expr_nonzero_p (arg0))
9643 return constant_boolean_node (code==NE_EXPR, type);
9645 t1 = fold_relational_const (code, type, arg0, arg1);
9646 return t1 == NULL_TREE ? NULL_TREE : t1;
9648 case UNORDERED_EXPR:
9649 case ORDERED_EXPR:
9650 case UNLT_EXPR:
9651 case UNLE_EXPR:
9652 case UNGT_EXPR:
9653 case UNGE_EXPR:
9654 case UNEQ_EXPR:
9655 case LTGT_EXPR:
9656 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9658 t1 = fold_relational_const (code, type, arg0, arg1);
9659 if (t1 != NULL_TREE)
9660 return t1;
9663 /* If the first operand is NaN, the result is constant. */
9664 if (TREE_CODE (arg0) == REAL_CST
9665 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9666 && (code != LTGT_EXPR || ! flag_trapping_math))
9668 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9669 ? integer_zero_node
9670 : integer_one_node;
9671 return omit_one_operand (type, t1, arg1);
9674 /* If the second operand is NaN, the result is constant. */
9675 if (TREE_CODE (arg1) == REAL_CST
9676 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9677 && (code != LTGT_EXPR || ! flag_trapping_math))
9679 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9680 ? integer_zero_node
9681 : integer_one_node;
9682 return omit_one_operand (type, t1, arg0);
9685 /* Simplify unordered comparison of something with itself. */
9686 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9687 && operand_equal_p (arg0, arg1, 0))
9688 return constant_boolean_node (1, type);
9690 if (code == LTGT_EXPR
9691 && !flag_trapping_math
9692 && operand_equal_p (arg0, arg1, 0))
9693 return constant_boolean_node (0, type);
9695 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9697 tree targ0 = strip_float_extensions (arg0);
9698 tree targ1 = strip_float_extensions (arg1);
9699 tree newtype = TREE_TYPE (targ0);
9701 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9702 newtype = TREE_TYPE (targ1);
9704 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9705 return fold_build2 (code, type, fold_convert (newtype, targ0),
9706 fold_convert (newtype, targ1));
9709 return NULL_TREE;
9711 case COMPOUND_EXPR:
9712 /* When pedantic, a compound expression can be neither an lvalue
9713 nor an integer constant expression. */
9714 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9715 return NULL_TREE;
9716 /* Don't let (0, 0) be null pointer constant. */
9717 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9718 : fold_convert (type, arg1);
9719 return pedantic_non_lvalue (tem);
9721 case COMPLEX_EXPR:
9722 if (wins)
9723 return build_complex (type, arg0, arg1);
9724 return NULL_TREE;
9726 default:
9727 return NULL_TREE;
9728 } /* switch (code) */
9731 /* Fold a ternary expression of code CODE and type TYPE with operands
9732 OP0, OP1, and OP2. Return the folded expression if folding is
9733 successful. Otherwise, return NULL_TREE. */
9735 static tree
9736 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9738 tree tem;
9739 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9740 enum tree_code_class kind = TREE_CODE_CLASS (code);
9742 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9743 && TREE_CODE_LENGTH (code) == 3);
9745 /* Strip any conversions that don't change the mode. This is safe
9746 for every expression, except for a comparison expression because
9747 its signedness is derived from its operands. So, in the latter
9748 case, only strip conversions that don't change the signedness.
9750 Note that this is done as an internal manipulation within the
9751 constant folder, in order to find the simplest representation of
9752 the arguments so that their form can be studied. In any cases,
9753 the appropriate type conversions should be put back in the tree
9754 that will get out of the constant folder. */
9755 if (op0)
9757 arg0 = op0;
9758 STRIP_NOPS (arg0);
9761 if (op1)
9763 arg1 = op1;
9764 STRIP_NOPS (arg1);
9767 switch (code)
9769 case COMPONENT_REF:
9770 if (TREE_CODE (arg0) == CONSTRUCTOR
9771 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9773 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
9774 if (m)
9775 return TREE_VALUE (m);
9777 return NULL_TREE;
9779 case COND_EXPR:
9780 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9781 so all simple results must be passed through pedantic_non_lvalue. */
9782 if (TREE_CODE (arg0) == INTEGER_CST)
9784 tem = integer_zerop (arg0) ? op2 : op1;
9785 /* Only optimize constant conditions when the selected branch
9786 has the same type as the COND_EXPR. This avoids optimizing
9787 away "c ? x : throw", where the throw has a void type. */
9788 if (! VOID_TYPE_P (TREE_TYPE (tem))
9789 || VOID_TYPE_P (type))
9790 return pedantic_non_lvalue (tem);
9791 return NULL_TREE;
9793 if (operand_equal_p (arg1, op2, 0))
9794 return pedantic_omit_one_operand (type, arg1, arg0);
9796 /* If we have A op B ? A : C, we may be able to convert this to a
9797 simpler expression, depending on the operation and the values
9798 of B and C. Signed zeros prevent all of these transformations,
9799 for reasons given above each one.
9801 Also try swapping the arguments and inverting the conditional. */
9802 if (COMPARISON_CLASS_P (arg0)
9803 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9804 arg1, TREE_OPERAND (arg0, 1))
9805 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9807 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9808 if (tem)
9809 return tem;
9812 if (COMPARISON_CLASS_P (arg0)
9813 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9814 op2,
9815 TREE_OPERAND (arg0, 1))
9816 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9818 tem = invert_truthvalue (arg0);
9819 if (COMPARISON_CLASS_P (tem))
9821 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9822 if (tem)
9823 return tem;
9827 /* If the second operand is simpler than the third, swap them
9828 since that produces better jump optimization results. */
9829 if (tree_swap_operands_p (op1, op2, false))
9831 /* See if this can be inverted. If it can't, possibly because
9832 it was a floating-point inequality comparison, don't do
9833 anything. */
9834 tem = invert_truthvalue (arg0);
9836 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9837 return fold_build3 (code, type, tem, op2, op1);
9840 /* Convert A ? 1 : 0 to simply A. */
9841 if (integer_onep (op1)
9842 && integer_zerop (op2)
9843 /* If we try to convert OP0 to our type, the
9844 call to fold will try to move the conversion inside
9845 a COND, which will recurse. In that case, the COND_EXPR
9846 is probably the best choice, so leave it alone. */
9847 && type == TREE_TYPE (arg0))
9848 return pedantic_non_lvalue (arg0);
9850 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9851 over COND_EXPR in cases such as floating point comparisons. */
9852 if (integer_zerop (op1)
9853 && integer_onep (op2)
9854 && truth_value_p (TREE_CODE (arg0)))
9855 return pedantic_non_lvalue (fold_convert (type,
9856 invert_truthvalue (arg0)));
9858 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9859 if (TREE_CODE (arg0) == LT_EXPR
9860 && integer_zerop (TREE_OPERAND (arg0, 1))
9861 && integer_zerop (op2)
9862 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9863 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
9864 TREE_TYPE (tem), tem, arg1));
9866 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9867 already handled above. */
9868 if (TREE_CODE (arg0) == BIT_AND_EXPR
9869 && integer_onep (TREE_OPERAND (arg0, 1))
9870 && integer_zerop (op2)
9871 && integer_pow2p (arg1))
9873 tree tem = TREE_OPERAND (arg0, 0);
9874 STRIP_NOPS (tem);
9875 if (TREE_CODE (tem) == RSHIFT_EXPR
9876 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9877 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9878 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9879 return fold_build2 (BIT_AND_EXPR, type,
9880 TREE_OPERAND (tem, 0), arg1);
9883 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9884 is probably obsolete because the first operand should be a
9885 truth value (that's why we have the two cases above), but let's
9886 leave it in until we can confirm this for all front-ends. */
9887 if (integer_zerop (op2)
9888 && TREE_CODE (arg0) == NE_EXPR
9889 && integer_zerop (TREE_OPERAND (arg0, 1))
9890 && integer_pow2p (arg1)
9891 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9892 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9893 arg1, OEP_ONLY_CONST))
9894 return pedantic_non_lvalue (fold_convert (type,
9895 TREE_OPERAND (arg0, 0)));
9897 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9898 if (integer_zerop (op2)
9899 && truth_value_p (TREE_CODE (arg0))
9900 && truth_value_p (TREE_CODE (arg1)))
9901 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
9903 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9904 if (integer_onep (op2)
9905 && truth_value_p (TREE_CODE (arg0))
9906 && truth_value_p (TREE_CODE (arg1)))
9908 /* Only perform transformation if ARG0 is easily inverted. */
9909 tem = invert_truthvalue (arg0);
9910 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9911 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
9914 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9915 if (integer_zerop (arg1)
9916 && truth_value_p (TREE_CODE (arg0))
9917 && truth_value_p (TREE_CODE (op2)))
9919 /* Only perform transformation if ARG0 is easily inverted. */
9920 tem = invert_truthvalue (arg0);
9921 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9922 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
9925 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9926 if (integer_onep (arg1)
9927 && truth_value_p (TREE_CODE (arg0))
9928 && truth_value_p (TREE_CODE (op2)))
9929 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
9931 return NULL_TREE;
9933 case CALL_EXPR:
9934 /* Check for a built-in function. */
9935 if (TREE_CODE (op0) == ADDR_EXPR
9936 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
9937 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
9939 tree fndecl = TREE_OPERAND (op0, 0);
9940 tree arglist = op1;
9941 tree tmp = fold_builtin (fndecl, arglist, false);
9942 if (tmp)
9943 return tmp;
9945 return NULL_TREE;
9947 default:
9948 return NULL_TREE;
9949 } /* switch (code) */
9952 /* Perform constant folding and related simplification of EXPR.
9953 The related simplifications include x*1 => x, x*0 => 0, etc.,
9954 and application of the associative law.
9955 NOP_EXPR conversions may be removed freely (as long as we
9956 are careful not to change the type of the overall expression).
9957 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
9958 but we can constant-fold them if they have constant operands. */
9960 #ifdef ENABLE_FOLD_CHECKING
9961 # define fold(x) fold_1 (x)
9962 static tree fold_1 (tree);
9963 static
9964 #endif
9965 tree
9966 fold (tree expr)
9968 const tree t = expr;
9969 enum tree_code code = TREE_CODE (t);
9970 enum tree_code_class kind = TREE_CODE_CLASS (code);
9971 tree tem;
9973 /* Return right away if a constant. */
9974 if (kind == tcc_constant)
9975 return t;
9977 if (IS_EXPR_CODE_CLASS (kind))
9979 tree type = TREE_TYPE (t);
9980 tree op0, op1, op2;
9982 switch (TREE_CODE_LENGTH (code))
9984 case 1:
9985 op0 = TREE_OPERAND (t, 0);
9986 tem = fold_unary (code, type, op0);
9987 return tem ? tem : expr;
9988 case 2:
9989 op0 = TREE_OPERAND (t, 0);
9990 op1 = TREE_OPERAND (t, 1);
9991 tem = fold_binary (code, type, op0, op1);
9992 return tem ? tem : expr;
9993 case 3:
9994 op0 = TREE_OPERAND (t, 0);
9995 op1 = TREE_OPERAND (t, 1);
9996 op2 = TREE_OPERAND (t, 2);
9997 tem = fold_ternary (code, type, op0, op1, op2);
9998 return tem ? tem : expr;
9999 default:
10000 break;
10004 switch (code)
10006 case CONST_DECL:
10007 return fold (DECL_INITIAL (t));
10009 default:
10010 return t;
10011 } /* switch (code) */
10014 #ifdef ENABLE_FOLD_CHECKING
10015 #undef fold
10017 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10018 static void fold_check_failed (tree, tree);
10019 void print_fold_checksum (tree);
10021 /* When --enable-checking=fold, compute a digest of expr before
10022 and after actual fold call to see if fold did not accidentally
10023 change original expr. */
10025 tree
10026 fold (tree expr)
10028 tree ret;
10029 struct md5_ctx ctx;
10030 unsigned char checksum_before[16], checksum_after[16];
10031 htab_t ht;
10033 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10034 md5_init_ctx (&ctx);
10035 fold_checksum_tree (expr, &ctx, ht);
10036 md5_finish_ctx (&ctx, checksum_before);
10037 htab_empty (ht);
10039 ret = fold_1 (expr);
10041 md5_init_ctx (&ctx);
10042 fold_checksum_tree (expr, &ctx, ht);
10043 md5_finish_ctx (&ctx, checksum_after);
10044 htab_delete (ht);
10046 if (memcmp (checksum_before, checksum_after, 16))
10047 fold_check_failed (expr, ret);
10049 return ret;
10052 void
10053 print_fold_checksum (tree expr)
10055 struct md5_ctx ctx;
10056 unsigned char checksum[16], cnt;
10057 htab_t ht;
10059 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10060 md5_init_ctx (&ctx);
10061 fold_checksum_tree (expr, &ctx, ht);
10062 md5_finish_ctx (&ctx, checksum);
10063 htab_delete (ht);
10064 for (cnt = 0; cnt < 16; ++cnt)
10065 fprintf (stderr, "%02x", checksum[cnt]);
10066 putc ('\n', stderr);
10069 static void
10070 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10072 internal_error ("fold check: original tree changed by fold");
10075 static void
10076 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10078 void **slot;
10079 enum tree_code code;
10080 char buf[sizeof (struct tree_decl)];
10081 int i, len;
10083 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10084 <= sizeof (struct tree_decl))
10085 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10086 if (expr == NULL)
10087 return;
10088 slot = htab_find_slot (ht, expr, INSERT);
10089 if (*slot != NULL)
10090 return;
10091 *slot = expr;
10092 code = TREE_CODE (expr);
10093 if (TREE_CODE_CLASS (code) == tcc_declaration
10094 && DECL_ASSEMBLER_NAME_SET_P (expr))
10096 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10097 memcpy (buf, expr, tree_size (expr));
10098 expr = (tree) buf;
10099 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10101 else if (TREE_CODE_CLASS (code) == tcc_type
10102 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10103 || TYPE_CACHED_VALUES_P (expr)))
10105 /* Allow these fields to be modified. */
10106 memcpy (buf, expr, tree_size (expr));
10107 expr = (tree) buf;
10108 TYPE_POINTER_TO (expr) = NULL;
10109 TYPE_REFERENCE_TO (expr) = NULL;
10110 if (TYPE_CACHED_VALUES_P (expr))
10112 TYPE_CACHED_VALUES_P (expr) = 0;
10113 TYPE_CACHED_VALUES (expr) = NULL;
10116 md5_process_bytes (expr, tree_size (expr), ctx);
10117 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10118 if (TREE_CODE_CLASS (code) != tcc_type
10119 && TREE_CODE_CLASS (code) != tcc_declaration)
10120 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10121 switch (TREE_CODE_CLASS (code))
10123 case tcc_constant:
10124 switch (code)
10126 case STRING_CST:
10127 md5_process_bytes (TREE_STRING_POINTER (expr),
10128 TREE_STRING_LENGTH (expr), ctx);
10129 break;
10130 case COMPLEX_CST:
10131 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10132 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10133 break;
10134 case VECTOR_CST:
10135 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10136 break;
10137 default:
10138 break;
10140 break;
10141 case tcc_exceptional:
10142 switch (code)
10144 case TREE_LIST:
10145 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10146 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10147 break;
10148 case TREE_VEC:
10149 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10150 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10151 break;
10152 default:
10153 break;
10155 break;
10156 case tcc_expression:
10157 case tcc_reference:
10158 case tcc_comparison:
10159 case tcc_unary:
10160 case tcc_binary:
10161 case tcc_statement:
10162 len = TREE_CODE_LENGTH (code);
10163 for (i = 0; i < len; ++i)
10164 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10165 break;
10166 case tcc_declaration:
10167 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10168 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10169 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10170 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10171 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10172 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10173 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10174 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10175 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10176 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10177 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10178 break;
10179 case tcc_type:
10180 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10181 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10182 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10183 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10184 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10185 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10186 if (INTEGRAL_TYPE_P (expr)
10187 || SCALAR_FLOAT_TYPE_P (expr))
10189 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10190 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10192 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10193 if (TREE_CODE (expr) == RECORD_TYPE
10194 || TREE_CODE (expr) == UNION_TYPE
10195 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10196 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10197 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10198 break;
10199 default:
10200 break;
10204 #endif
10206 /* Fold a unary tree expression with code CODE of type TYPE with an
10207 operand OP0. Return a folded expression if successful. Otherwise,
10208 return a tree expression with code CODE of type TYPE with an
10209 operand OP0. */
10211 tree
10212 fold_build1 (enum tree_code code, tree type, tree op0)
10214 tree tem = fold_unary (code, type, op0);
10215 if (tem)
10216 return tem;
10218 return build1 (code, type, op0);
10221 /* Fold a binary tree expression with code CODE of type TYPE with
10222 operands OP0 and OP1. Return a folded expression if successful.
10223 Otherwise, return a tree expression with code CODE of type TYPE
10224 with operands OP0 and OP1. */
10226 tree
10227 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10229 tree tem = fold_binary (code, type, op0, op1);
10230 if (tem)
10231 return tem;
10233 return build2 (code, type, op0, op1);
10236 /* Fold a ternary tree expression with code CODE of type TYPE with
10237 operands OP0, OP1, and OP2. Return a folded expression if
10238 successful. Otherwise, return a tree expression with code CODE of
10239 type TYPE with operands OP0, OP1, and OP2. */
10241 tree
10242 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10244 tree tem = fold_ternary (code, type, op0, op1, op2);
10245 if (tem)
10246 return tem;
10248 return build3 (code, type, op0, op1, op2);
10251 /* Perform constant folding and related simplification of initializer
10252 expression EXPR. This behaves identically to "fold" but ignores
10253 potential run-time traps and exceptions that fold must preserve. */
10255 tree
10256 fold_initializer (tree expr)
10258 int saved_signaling_nans = flag_signaling_nans;
10259 int saved_trapping_math = flag_trapping_math;
10260 int saved_rounding_math = flag_rounding_math;
10261 int saved_trapv = flag_trapv;
10262 tree result;
10264 flag_signaling_nans = 0;
10265 flag_trapping_math = 0;
10266 flag_rounding_math = 0;
10267 flag_trapv = 0;
10269 result = fold (expr);
10271 flag_signaling_nans = saved_signaling_nans;
10272 flag_trapping_math = saved_trapping_math;
10273 flag_rounding_math = saved_rounding_math;
10274 flag_trapv = saved_trapv;
10276 return result;
10279 /* Determine if first argument is a multiple of second argument. Return 0 if
10280 it is not, or we cannot easily determined it to be.
10282 An example of the sort of thing we care about (at this point; this routine
10283 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10284 fold cases do now) is discovering that
10286 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10288 is a multiple of
10290 SAVE_EXPR (J * 8)
10292 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10294 This code also handles discovering that
10296 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10298 is a multiple of 8 so we don't have to worry about dealing with a
10299 possible remainder.
10301 Note that we *look* inside a SAVE_EXPR only to determine how it was
10302 calculated; it is not safe for fold to do much of anything else with the
10303 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10304 at run time. For example, the latter example above *cannot* be implemented
10305 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10306 evaluation time of the original SAVE_EXPR is not necessarily the same at
10307 the time the new expression is evaluated. The only optimization of this
10308 sort that would be valid is changing
10310 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10312 divided by 8 to
10314 SAVE_EXPR (I) * SAVE_EXPR (J)
10316 (where the same SAVE_EXPR (J) is used in the original and the
10317 transformed version). */
10319 static int
10320 multiple_of_p (tree type, tree top, tree bottom)
10322 if (operand_equal_p (top, bottom, 0))
10323 return 1;
10325 if (TREE_CODE (type) != INTEGER_TYPE)
10326 return 0;
10328 switch (TREE_CODE (top))
10330 case BIT_AND_EXPR:
10331 /* Bitwise and provides a power of two multiple. If the mask is
10332 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10333 if (!integer_pow2p (bottom))
10334 return 0;
10335 /* FALLTHRU */
10337 case MULT_EXPR:
10338 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10339 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10341 case PLUS_EXPR:
10342 case MINUS_EXPR:
10343 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10344 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10346 case LSHIFT_EXPR:
10347 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10349 tree op1, t1;
10351 op1 = TREE_OPERAND (top, 1);
10352 /* const_binop may not detect overflow correctly,
10353 so check for it explicitly here. */
10354 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10355 > TREE_INT_CST_LOW (op1)
10356 && TREE_INT_CST_HIGH (op1) == 0
10357 && 0 != (t1 = fold_convert (type,
10358 const_binop (LSHIFT_EXPR,
10359 size_one_node,
10360 op1, 0)))
10361 && ! TREE_OVERFLOW (t1))
10362 return multiple_of_p (type, t1, bottom);
10364 return 0;
10366 case NOP_EXPR:
10367 /* Can't handle conversions from non-integral or wider integral type. */
10368 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10369 || (TYPE_PRECISION (type)
10370 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10371 return 0;
10373 /* .. fall through ... */
10375 case SAVE_EXPR:
10376 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10378 case INTEGER_CST:
10379 if (TREE_CODE (bottom) != INTEGER_CST
10380 || (TYPE_UNSIGNED (type)
10381 && (tree_int_cst_sgn (top) < 0
10382 || tree_int_cst_sgn (bottom) < 0)))
10383 return 0;
10384 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10385 top, bottom, 0));
10387 default:
10388 return 0;
10392 /* Return true if `t' is known to be non-negative. */
10395 tree_expr_nonnegative_p (tree t)
10397 switch (TREE_CODE (t))
10399 case ABS_EXPR:
10400 return 1;
10402 case INTEGER_CST:
10403 return tree_int_cst_sgn (t) >= 0;
10405 case REAL_CST:
10406 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10408 case PLUS_EXPR:
10409 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10410 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10411 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10413 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10414 both unsigned and at least 2 bits shorter than the result. */
10415 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10416 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10417 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10419 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10420 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10421 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10422 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10424 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10425 TYPE_PRECISION (inner2)) + 1;
10426 return prec < TYPE_PRECISION (TREE_TYPE (t));
10429 break;
10431 case MULT_EXPR:
10432 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10434 /* x * x for floating point x is always non-negative. */
10435 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10436 return 1;
10437 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10438 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10441 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10442 both unsigned and their total bits is shorter than the result. */
10443 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10444 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10445 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10447 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10448 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10449 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10450 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10451 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10452 < TYPE_PRECISION (TREE_TYPE (t));
10454 return 0;
10456 case TRUNC_DIV_EXPR:
10457 case CEIL_DIV_EXPR:
10458 case FLOOR_DIV_EXPR:
10459 case ROUND_DIV_EXPR:
10460 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10461 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10463 case TRUNC_MOD_EXPR:
10464 case CEIL_MOD_EXPR:
10465 case FLOOR_MOD_EXPR:
10466 case ROUND_MOD_EXPR:
10467 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10469 case RDIV_EXPR:
10470 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10471 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10473 case BIT_AND_EXPR:
10474 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10475 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10476 case BIT_IOR_EXPR:
10477 case BIT_XOR_EXPR:
10478 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10479 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10481 case NOP_EXPR:
10483 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10484 tree outer_type = TREE_TYPE (t);
10486 if (TREE_CODE (outer_type) == REAL_TYPE)
10488 if (TREE_CODE (inner_type) == REAL_TYPE)
10489 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10490 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10492 if (TYPE_UNSIGNED (inner_type))
10493 return 1;
10494 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10497 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10499 if (TREE_CODE (inner_type) == REAL_TYPE)
10500 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10501 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10502 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10503 && TYPE_UNSIGNED (inner_type);
10506 break;
10508 case COND_EXPR:
10509 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10510 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10511 case COMPOUND_EXPR:
10512 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10513 case MIN_EXPR:
10514 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10515 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10516 case MAX_EXPR:
10517 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10518 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10519 case MODIFY_EXPR:
10520 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10521 case BIND_EXPR:
10522 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10523 case SAVE_EXPR:
10524 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10525 case NON_LVALUE_EXPR:
10526 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10527 case FLOAT_EXPR:
10528 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10530 case TARGET_EXPR:
10532 tree temp = TARGET_EXPR_SLOT (t);
10533 t = TARGET_EXPR_INITIAL (t);
10535 /* If the initializer is non-void, then it's a normal expression
10536 that will be assigned to the slot. */
10537 if (!VOID_TYPE_P (t))
10538 return tree_expr_nonnegative_p (t);
10540 /* Otherwise, the initializer sets the slot in some way. One common
10541 way is an assignment statement at the end of the initializer. */
10542 while (1)
10544 if (TREE_CODE (t) == BIND_EXPR)
10545 t = expr_last (BIND_EXPR_BODY (t));
10546 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10547 || TREE_CODE (t) == TRY_CATCH_EXPR)
10548 t = expr_last (TREE_OPERAND (t, 0));
10549 else if (TREE_CODE (t) == STATEMENT_LIST)
10550 t = expr_last (t);
10551 else
10552 break;
10554 if (TREE_CODE (t) == MODIFY_EXPR
10555 && TREE_OPERAND (t, 0) == temp)
10556 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10558 return 0;
10561 case CALL_EXPR:
10563 tree fndecl = get_callee_fndecl (t);
10564 tree arglist = TREE_OPERAND (t, 1);
10565 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10566 switch (DECL_FUNCTION_CODE (fndecl))
10568 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10569 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10570 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10571 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10573 CASE_BUILTIN_F (BUILT_IN_ACOS)
10574 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10575 CASE_BUILTIN_F (BUILT_IN_CABS)
10576 CASE_BUILTIN_F (BUILT_IN_COSH)
10577 CASE_BUILTIN_F (BUILT_IN_ERFC)
10578 CASE_BUILTIN_F (BUILT_IN_EXP)
10579 CASE_BUILTIN_F (BUILT_IN_EXP10)
10580 CASE_BUILTIN_F (BUILT_IN_EXP2)
10581 CASE_BUILTIN_F (BUILT_IN_FABS)
10582 CASE_BUILTIN_F (BUILT_IN_FDIM)
10583 CASE_BUILTIN_F (BUILT_IN_FREXP)
10584 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10585 CASE_BUILTIN_F (BUILT_IN_POW10)
10586 CASE_BUILTIN_I (BUILT_IN_FFS)
10587 CASE_BUILTIN_I (BUILT_IN_PARITY)
10588 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10589 /* Always true. */
10590 return 1;
10592 CASE_BUILTIN_F (BUILT_IN_SQRT)
10593 /* sqrt(-0.0) is -0.0. */
10594 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10595 return 1;
10596 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10598 CASE_BUILTIN_F (BUILT_IN_ASINH)
10599 CASE_BUILTIN_F (BUILT_IN_ATAN)
10600 CASE_BUILTIN_F (BUILT_IN_ATANH)
10601 CASE_BUILTIN_F (BUILT_IN_CBRT)
10602 CASE_BUILTIN_F (BUILT_IN_CEIL)
10603 CASE_BUILTIN_F (BUILT_IN_ERF)
10604 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10605 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10606 CASE_BUILTIN_F (BUILT_IN_FMOD)
10607 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10608 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10609 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10610 CASE_BUILTIN_F (BUILT_IN_LRINT)
10611 CASE_BUILTIN_F (BUILT_IN_LROUND)
10612 CASE_BUILTIN_F (BUILT_IN_MODF)
10613 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10614 CASE_BUILTIN_F (BUILT_IN_POW)
10615 CASE_BUILTIN_F (BUILT_IN_RINT)
10616 CASE_BUILTIN_F (BUILT_IN_ROUND)
10617 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10618 CASE_BUILTIN_F (BUILT_IN_SINH)
10619 CASE_BUILTIN_F (BUILT_IN_TANH)
10620 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10621 /* True if the 1st argument is nonnegative. */
10622 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10624 CASE_BUILTIN_F (BUILT_IN_FMAX)
10625 /* True if the 1st OR 2nd arguments are nonnegative. */
10626 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10627 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10629 CASE_BUILTIN_F (BUILT_IN_FMIN)
10630 /* True if the 1st AND 2nd arguments are nonnegative. */
10631 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10632 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10634 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10635 /* True if the 2nd argument is nonnegative. */
10636 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10638 default:
10639 break;
10640 #undef CASE_BUILTIN_F
10641 #undef CASE_BUILTIN_I
10645 /* ... fall through ... */
10647 default:
10648 if (truth_value_p (TREE_CODE (t)))
10649 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10650 return 1;
10653 /* We don't know sign of `t', so be conservative and return false. */
10654 return 0;
10657 /* Return true when T is an address and is known to be nonzero.
10658 For floating point we further ensure that T is not denormal.
10659 Similar logic is present in nonzero_address in rtlanal.h. */
10661 static bool
10662 tree_expr_nonzero_p (tree t)
10664 tree type = TREE_TYPE (t);
10666 /* Doing something useful for floating point would need more work. */
10667 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10668 return false;
10670 switch (TREE_CODE (t))
10672 case ABS_EXPR:
10673 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10674 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10676 case INTEGER_CST:
10677 /* We used to test for !integer_zerop here. This does not work correctly
10678 if TREE_CONSTANT_OVERFLOW (t). */
10679 return (TREE_INT_CST_LOW (t) != 0
10680 || TREE_INT_CST_HIGH (t) != 0);
10682 case PLUS_EXPR:
10683 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10685 /* With the presence of negative values it is hard
10686 to say something. */
10687 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10688 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10689 return false;
10690 /* One of operands must be positive and the other non-negative. */
10691 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10692 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10694 break;
10696 case MULT_EXPR:
10697 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10699 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10700 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10702 break;
10704 case NOP_EXPR:
10706 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10707 tree outer_type = TREE_TYPE (t);
10709 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10710 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10712 break;
10714 case ADDR_EXPR:
10716 tree base = get_base_address (TREE_OPERAND (t, 0));
10718 if (!base)
10719 return false;
10721 /* Weak declarations may link to NULL. */
10722 if (DECL_P (base))
10723 return !DECL_WEAK (base);
10725 /* Constants are never weak. */
10726 if (CONSTANT_CLASS_P (base))
10727 return true;
10729 return false;
10732 case COND_EXPR:
10733 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10734 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10736 case MIN_EXPR:
10737 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10738 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10740 case MAX_EXPR:
10741 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10743 /* When both operands are nonzero, then MAX must be too. */
10744 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10745 return true;
10747 /* MAX where operand 0 is positive is positive. */
10748 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10750 /* MAX where operand 1 is positive is positive. */
10751 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10752 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10753 return true;
10754 break;
10756 case COMPOUND_EXPR:
10757 case MODIFY_EXPR:
10758 case BIND_EXPR:
10759 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10761 case SAVE_EXPR:
10762 case NON_LVALUE_EXPR:
10763 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10765 case BIT_IOR_EXPR:
10766 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10767 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10769 default:
10770 break;
10772 return false;
10775 /* See if we are applying CODE, a relational to the highest or lowest
10776 possible integer of TYPE. If so, then the result is a compile
10777 time constant. */
10779 static tree
10780 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10781 tree *op1_p)
10783 tree op0 = *op0_p;
10784 tree op1 = *op1_p;
10785 enum tree_code code = *code_p;
10786 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10788 if (TREE_CODE (op1) == INTEGER_CST
10789 && ! TREE_CONSTANT_OVERFLOW (op1)
10790 && width <= HOST_BITS_PER_WIDE_INT
10791 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10792 || POINTER_TYPE_P (TREE_TYPE (op1))))
10794 unsigned HOST_WIDE_INT signed_max;
10795 unsigned HOST_WIDE_INT max, min;
10797 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10799 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10801 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10802 min = 0;
10804 else
10806 max = signed_max;
10807 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10810 if (TREE_INT_CST_HIGH (op1) == 0
10811 && TREE_INT_CST_LOW (op1) == max)
10812 switch (code)
10814 case GT_EXPR:
10815 return omit_one_operand (type, integer_zero_node, op0);
10817 case GE_EXPR:
10818 *code_p = EQ_EXPR;
10819 break;
10820 case LE_EXPR:
10821 return omit_one_operand (type, integer_one_node, op0);
10823 case LT_EXPR:
10824 *code_p = NE_EXPR;
10825 break;
10827 /* The GE_EXPR and LT_EXPR cases above are not normally
10828 reached because of previous transformations. */
10830 default:
10831 break;
10833 else if (TREE_INT_CST_HIGH (op1) == 0
10834 && TREE_INT_CST_LOW (op1) == max - 1)
10835 switch (code)
10837 case GT_EXPR:
10838 *code_p = EQ_EXPR;
10839 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10840 break;
10841 case LE_EXPR:
10842 *code_p = NE_EXPR;
10843 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10844 break;
10845 default:
10846 break;
10848 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10849 && TREE_INT_CST_LOW (op1) == min)
10850 switch (code)
10852 case LT_EXPR:
10853 return omit_one_operand (type, integer_zero_node, op0);
10855 case LE_EXPR:
10856 *code_p = EQ_EXPR;
10857 break;
10859 case GE_EXPR:
10860 return omit_one_operand (type, integer_one_node, op0);
10862 case GT_EXPR:
10863 *code_p = NE_EXPR;
10864 break;
10866 default:
10867 break;
10869 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10870 && TREE_INT_CST_LOW (op1) == min + 1)
10871 switch (code)
10873 case GE_EXPR:
10874 *code_p = NE_EXPR;
10875 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10876 break;
10877 case LT_EXPR:
10878 *code_p = EQ_EXPR;
10879 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10880 break;
10881 default:
10882 break;
10885 else if (TREE_INT_CST_HIGH (op1) == 0
10886 && TREE_INT_CST_LOW (op1) == signed_max
10887 && TYPE_UNSIGNED (TREE_TYPE (op1))
10888 /* signed_type does not work on pointer types. */
10889 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10891 /* The following case also applies to X < signed_max+1
10892 and X >= signed_max+1 because previous transformations. */
10893 if (code == LE_EXPR || code == GT_EXPR)
10895 tree st0, st1, exp, retval;
10896 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10897 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10899 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10900 type,
10901 fold_convert (st0, op0),
10902 fold_convert (st1, integer_zero_node));
10904 retval = fold_binary_to_constant (TREE_CODE (exp),
10905 TREE_TYPE (exp),
10906 TREE_OPERAND (exp, 0),
10907 TREE_OPERAND (exp, 1));
10909 /* If we are in gimple form, then returning EXP would create
10910 non-gimple expressions. Clearing it is safe and insures
10911 we do not allow a non-gimple expression to escape. */
10912 if (in_gimple_form)
10913 exp = NULL;
10915 return (retval ? retval : exp);
10920 return NULL_TREE;
10924 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10925 attempt to fold the expression to a constant without modifying TYPE,
10926 OP0 or OP1.
10928 If the expression could be simplified to a constant, then return
10929 the constant. If the expression would not be simplified to a
10930 constant, then return NULL_TREE.
10932 Note this is primarily designed to be called after gimplification
10933 of the tree structures and when at least one operand is a constant.
10934 As a result of those simplifying assumptions this routine is far
10935 simpler than the generic fold routine. */
10937 tree
10938 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10940 int wins = 1;
10941 tree subop0;
10942 tree subop1;
10943 tree tem;
10945 /* If this is a commutative operation, and ARG0 is a constant, move it
10946 to ARG1 to reduce the number of tests below. */
10947 if (commutative_tree_code (code)
10948 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10950 tem = op0;
10951 op0 = op1;
10952 op1 = tem;
10955 /* If either operand is a complex type, extract its real component. */
10956 if (TREE_CODE (op0) == COMPLEX_CST)
10957 subop0 = TREE_REALPART (op0);
10958 else
10959 subop0 = op0;
10961 if (TREE_CODE (op1) == COMPLEX_CST)
10962 subop1 = TREE_REALPART (op1);
10963 else
10964 subop1 = op1;
10966 /* Note if either argument is not a real or integer constant.
10967 With a few exceptions, simplification is limited to cases
10968 where both arguments are constants. */
10969 if ((TREE_CODE (subop0) != INTEGER_CST
10970 && TREE_CODE (subop0) != REAL_CST)
10971 || (TREE_CODE (subop1) != INTEGER_CST
10972 && TREE_CODE (subop1) != REAL_CST))
10973 wins = 0;
10975 switch (code)
10977 case PLUS_EXPR:
10978 /* (plus (address) (const_int)) is a constant. */
10979 if (TREE_CODE (op0) == PLUS_EXPR
10980 && TREE_CODE (op1) == INTEGER_CST
10981 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10982 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10983 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10984 == ADDR_EXPR)))
10985 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10987 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10988 const_binop (PLUS_EXPR, op1,
10989 TREE_OPERAND (op0, 1), 0));
10991 case BIT_XOR_EXPR:
10993 binary:
10994 if (!wins)
10995 return NULL_TREE;
10997 /* Both arguments are constants. Simplify. */
10998 tem = const_binop (code, op0, op1, 0);
10999 if (tem != NULL_TREE)
11001 /* The return value should always have the same type as
11002 the original expression. */
11003 if (TREE_TYPE (tem) != type)
11004 tem = fold_convert (type, tem);
11006 return tem;
11008 return NULL_TREE;
11010 case MINUS_EXPR:
11011 /* Fold &x - &x. This can happen from &x.foo - &x.
11012 This is unsafe for certain floats even in non-IEEE formats.
11013 In IEEE, it is unsafe because it does wrong for NaNs.
11014 Also note that operand_equal_p is always false if an
11015 operand is volatile. */
11016 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
11017 return fold_convert (type, integer_zero_node);
11019 goto binary;
11021 case MULT_EXPR:
11022 case BIT_AND_EXPR:
11023 /* Special case multiplication or bitwise AND where one argument
11024 is zero. */
11025 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
11026 return omit_one_operand (type, op1, op0);
11027 else
11028 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
11029 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
11030 && real_zerop (op1))
11031 return omit_one_operand (type, op1, op0);
11033 goto binary;
11035 case BIT_IOR_EXPR:
11036 /* Special case when we know the result will be all ones. */
11037 if (integer_all_onesp (op1))
11038 return omit_one_operand (type, op1, op0);
11040 goto binary;
11042 case TRUNC_DIV_EXPR:
11043 case ROUND_DIV_EXPR:
11044 case FLOOR_DIV_EXPR:
11045 case CEIL_DIV_EXPR:
11046 case EXACT_DIV_EXPR:
11047 case TRUNC_MOD_EXPR:
11048 case ROUND_MOD_EXPR:
11049 case FLOOR_MOD_EXPR:
11050 case CEIL_MOD_EXPR:
11051 case RDIV_EXPR:
11052 /* Division by zero is undefined. */
11053 if (integer_zerop (op1))
11054 return NULL_TREE;
11056 if (TREE_CODE (op1) == REAL_CST
11057 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
11058 && real_zerop (op1))
11059 return NULL_TREE;
11061 goto binary;
11063 case MIN_EXPR:
11064 if (INTEGRAL_TYPE_P (type)
11065 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11066 return omit_one_operand (type, op1, op0);
11068 goto binary;
11070 case MAX_EXPR:
11071 if (INTEGRAL_TYPE_P (type)
11072 && TYPE_MAX_VALUE (type)
11073 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11074 return omit_one_operand (type, op1, op0);
11076 goto binary;
11078 case RSHIFT_EXPR:
11079 /* Optimize -1 >> x for arithmetic right shifts. */
11080 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
11081 return omit_one_operand (type, op0, op1);
11082 /* ... fall through ... */
11084 case LSHIFT_EXPR:
11085 if (integer_zerop (op0))
11086 return omit_one_operand (type, op0, op1);
11088 /* Since negative shift count is not well-defined, don't
11089 try to compute it in the compiler. */
11090 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
11091 return NULL_TREE;
11093 goto binary;
11095 case LROTATE_EXPR:
11096 case RROTATE_EXPR:
11097 /* -1 rotated either direction by any amount is still -1. */
11098 if (integer_all_onesp (op0))
11099 return omit_one_operand (type, op0, op1);
11101 /* 0 rotated either direction by any amount is still zero. */
11102 if (integer_zerop (op0))
11103 return omit_one_operand (type, op0, op1);
11105 goto binary;
11107 case COMPLEX_EXPR:
11108 if (wins)
11109 return build_complex (type, op0, op1);
11110 return NULL_TREE;
11112 case LT_EXPR:
11113 case LE_EXPR:
11114 case GT_EXPR:
11115 case GE_EXPR:
11116 case EQ_EXPR:
11117 case NE_EXPR:
11118 /* If one arg is a real or integer constant, put it last. */
11119 if ((TREE_CODE (op0) == INTEGER_CST
11120 && TREE_CODE (op1) != INTEGER_CST)
11121 || (TREE_CODE (op0) == REAL_CST
11122 && TREE_CODE (op0) != REAL_CST))
11124 tree temp;
11126 temp = op0;
11127 op0 = op1;
11128 op1 = temp;
11129 code = swap_tree_comparison (code);
11132 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11133 This transformation affects the cases which are handled in later
11134 optimizations involving comparisons with non-negative constants. */
11135 if (TREE_CODE (op1) == INTEGER_CST
11136 && TREE_CODE (op0) != INTEGER_CST
11137 && tree_int_cst_sgn (op1) > 0)
11139 switch (code)
11141 case GE_EXPR:
11142 code = GT_EXPR;
11143 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11144 break;
11146 case LT_EXPR:
11147 code = LE_EXPR;
11148 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11149 break;
11151 default:
11152 break;
11156 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
11157 if (tem)
11158 return tem;
11160 /* Fall through. */
11162 case ORDERED_EXPR:
11163 case UNORDERED_EXPR:
11164 case UNLT_EXPR:
11165 case UNLE_EXPR:
11166 case UNGT_EXPR:
11167 case UNGE_EXPR:
11168 case UNEQ_EXPR:
11169 case LTGT_EXPR:
11170 if (!wins)
11171 return NULL_TREE;
11173 return fold_relational_const (code, type, op0, op1);
11175 case RANGE_EXPR:
11176 /* This could probably be handled. */
11177 return NULL_TREE;
11179 case TRUTH_AND_EXPR:
11180 /* If second arg is constant zero, result is zero, but first arg
11181 must be evaluated. */
11182 if (integer_zerop (op1))
11183 return omit_one_operand (type, op1, op0);
11184 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11185 case will be handled here. */
11186 if (integer_zerop (op0))
11187 return omit_one_operand (type, op0, op1);
11188 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11189 return constant_boolean_node (true, type);
11190 return NULL_TREE;
11192 case TRUTH_OR_EXPR:
11193 /* If second arg is constant true, result is true, but we must
11194 evaluate first arg. */
11195 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
11196 return omit_one_operand (type, op1, op0);
11197 /* Likewise for first arg, but note this only occurs here for
11198 TRUTH_OR_EXPR. */
11199 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
11200 return omit_one_operand (type, op0, op1);
11201 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11202 return constant_boolean_node (false, type);
11203 return NULL_TREE;
11205 case TRUTH_XOR_EXPR:
11206 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11208 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
11209 return constant_boolean_node (x, type);
11211 return NULL_TREE;
11213 default:
11214 return NULL_TREE;
11218 /* Given the components of a unary expression CODE, TYPE and OP0,
11219 attempt to fold the expression to a constant without modifying
11220 TYPE or OP0.
11222 If the expression could be simplified to a constant, then return
11223 the constant. If the expression would not be simplified to a
11224 constant, then return NULL_TREE.
11226 Note this is primarily designed to be called after gimplification
11227 of the tree structures and when op0 is a constant. As a result
11228 of those simplifying assumptions this routine is far simpler than
11229 the generic fold routine. */
11231 tree
11232 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11234 /* Make sure we have a suitable constant argument. */
11235 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
11237 tree subop;
11239 if (TREE_CODE (op0) == COMPLEX_CST)
11240 subop = TREE_REALPART (op0);
11241 else
11242 subop = op0;
11244 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
11245 return NULL_TREE;
11248 switch (code)
11250 case NOP_EXPR:
11251 case FLOAT_EXPR:
11252 case CONVERT_EXPR:
11253 case FIX_TRUNC_EXPR:
11254 case FIX_FLOOR_EXPR:
11255 case FIX_CEIL_EXPR:
11256 case FIX_ROUND_EXPR:
11257 return fold_convert_const (code, type, op0);
11259 case NEGATE_EXPR:
11260 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11261 return fold_negate_const (op0, type);
11262 else
11263 return NULL_TREE;
11265 case ABS_EXPR:
11266 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11267 return fold_abs_const (op0, type);
11268 else
11269 return NULL_TREE;
11271 case BIT_NOT_EXPR:
11272 if (TREE_CODE (op0) == INTEGER_CST)
11273 return fold_not_const (op0, type);
11274 else
11275 return NULL_TREE;
11277 case REALPART_EXPR:
11278 if (TREE_CODE (op0) == COMPLEX_CST)
11279 return TREE_REALPART (op0);
11280 else
11281 return NULL_TREE;
11283 case IMAGPART_EXPR:
11284 if (TREE_CODE (op0) == COMPLEX_CST)
11285 return TREE_IMAGPART (op0);
11286 else
11287 return NULL_TREE;
11289 case CONJ_EXPR:
11290 if (TREE_CODE (op0) == COMPLEX_CST
11291 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
11292 return build_complex (type, TREE_REALPART (op0),
11293 negate_expr (TREE_IMAGPART (op0)));
11294 return NULL_TREE;
11296 default:
11297 return NULL_TREE;
11301 /* If EXP represents referencing an element in a constant string
11302 (either via pointer arithmetic or array indexing), return the
11303 tree representing the value accessed, otherwise return NULL. */
11305 tree
11306 fold_read_from_constant_string (tree exp)
11308 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11310 tree exp1 = TREE_OPERAND (exp, 0);
11311 tree index;
11312 tree string;
11314 if (TREE_CODE (exp) == INDIRECT_REF)
11315 string = string_constant (exp1, &index);
11316 else
11318 tree low_bound = array_ref_low_bound (exp);
11319 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11321 /* Optimize the special-case of a zero lower bound.
11323 We convert the low_bound to sizetype to avoid some problems
11324 with constant folding. (E.g. suppose the lower bound is 1,
11325 and its mode is QI. Without the conversion,l (ARRAY
11326 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11327 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11328 if (! integer_zerop (low_bound))
11329 index = size_diffop (index, fold_convert (sizetype, low_bound));
11331 string = exp1;
11334 if (string
11335 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11336 && TREE_CODE (string) == STRING_CST
11337 && TREE_CODE (index) == INTEGER_CST
11338 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11339 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11340 == MODE_INT)
11341 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11342 return fold_convert (TREE_TYPE (exp),
11343 build_int_cst (NULL_TREE,
11344 (TREE_STRING_POINTER (string)
11345 [TREE_INT_CST_LOW (index)])));
11347 return NULL;
11350 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11351 an integer constant or real constant.
11353 TYPE is the type of the result. */
11355 static tree
11356 fold_negate_const (tree arg0, tree type)
11358 tree t = NULL_TREE;
11360 switch (TREE_CODE (arg0))
11362 case INTEGER_CST:
11364 unsigned HOST_WIDE_INT low;
11365 HOST_WIDE_INT high;
11366 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11367 TREE_INT_CST_HIGH (arg0),
11368 &low, &high);
11369 t = build_int_cst_wide (type, low, high);
11370 t = force_fit_type (t, 1,
11371 (overflow | TREE_OVERFLOW (arg0))
11372 && !TYPE_UNSIGNED (type),
11373 TREE_CONSTANT_OVERFLOW (arg0));
11374 break;
11377 case REAL_CST:
11378 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11379 break;
11381 default:
11382 gcc_unreachable ();
11385 return t;
11388 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11389 an integer constant or real constant.
11391 TYPE is the type of the result. */
11393 tree
11394 fold_abs_const (tree arg0, tree type)
11396 tree t = NULL_TREE;
11398 switch (TREE_CODE (arg0))
11400 case INTEGER_CST:
11401 /* If the value is unsigned, then the absolute value is
11402 the same as the ordinary value. */
11403 if (TYPE_UNSIGNED (type))
11404 t = arg0;
11405 /* Similarly, if the value is non-negative. */
11406 else if (INT_CST_LT (integer_minus_one_node, arg0))
11407 t = arg0;
11408 /* If the value is negative, then the absolute value is
11409 its negation. */
11410 else
11412 unsigned HOST_WIDE_INT low;
11413 HOST_WIDE_INT high;
11414 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11415 TREE_INT_CST_HIGH (arg0),
11416 &low, &high);
11417 t = build_int_cst_wide (type, low, high);
11418 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11419 TREE_CONSTANT_OVERFLOW (arg0));
11421 break;
11423 case REAL_CST:
11424 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11425 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11426 else
11427 t = arg0;
11428 break;
11430 default:
11431 gcc_unreachable ();
11434 return t;
11437 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11438 constant. TYPE is the type of the result. */
11440 static tree
11441 fold_not_const (tree arg0, tree type)
11443 tree t = NULL_TREE;
11445 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11447 t = build_int_cst_wide (type,
11448 ~ TREE_INT_CST_LOW (arg0),
11449 ~ TREE_INT_CST_HIGH (arg0));
11450 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11451 TREE_CONSTANT_OVERFLOW (arg0));
11453 return t;
11456 /* Given CODE, a relational operator, the target type, TYPE and two
11457 constant operands OP0 and OP1, return the result of the
11458 relational operation. If the result is not a compile time
11459 constant, then return NULL_TREE. */
11461 static tree
11462 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11464 int result, invert;
11466 /* From here on, the only cases we handle are when the result is
11467 known to be a constant. */
11469 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11471 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11472 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11474 /* Handle the cases where either operand is a NaN. */
11475 if (real_isnan (c0) || real_isnan (c1))
11477 switch (code)
11479 case EQ_EXPR:
11480 case ORDERED_EXPR:
11481 result = 0;
11482 break;
11484 case NE_EXPR:
11485 case UNORDERED_EXPR:
11486 case UNLT_EXPR:
11487 case UNLE_EXPR:
11488 case UNGT_EXPR:
11489 case UNGE_EXPR:
11490 case UNEQ_EXPR:
11491 result = 1;
11492 break;
11494 case LT_EXPR:
11495 case LE_EXPR:
11496 case GT_EXPR:
11497 case GE_EXPR:
11498 case LTGT_EXPR:
11499 if (flag_trapping_math)
11500 return NULL_TREE;
11501 result = 0;
11502 break;
11504 default:
11505 gcc_unreachable ();
11508 return constant_boolean_node (result, type);
11511 return constant_boolean_node (real_compare (code, c0, c1), type);
11514 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11516 To compute GT, swap the arguments and do LT.
11517 To compute GE, do LT and invert the result.
11518 To compute LE, swap the arguments, do LT and invert the result.
11519 To compute NE, do EQ and invert the result.
11521 Therefore, the code below must handle only EQ and LT. */
11523 if (code == LE_EXPR || code == GT_EXPR)
11525 tree tem = op0;
11526 op0 = op1;
11527 op1 = tem;
11528 code = swap_tree_comparison (code);
11531 /* Note that it is safe to invert for real values here because we
11532 have already handled the one case that it matters. */
11534 invert = 0;
11535 if (code == NE_EXPR || code == GE_EXPR)
11537 invert = 1;
11538 code = invert_tree_comparison (code, false);
11541 /* Compute a result for LT or EQ if args permit;
11542 Otherwise return T. */
11543 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11545 if (code == EQ_EXPR)
11546 result = tree_int_cst_equal (op0, op1);
11547 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11548 result = INT_CST_LT_UNSIGNED (op0, op1);
11549 else
11550 result = INT_CST_LT (op0, op1);
11552 else
11553 return NULL_TREE;
11555 if (invert)
11556 result ^= 1;
11557 return constant_boolean_node (result, type);
11560 /* Build an expression for the a clean point containing EXPR with type TYPE.
11561 Don't build a cleanup point expression for EXPR which don't have side
11562 effects. */
11564 tree
11565 fold_build_cleanup_point_expr (tree type, tree expr)
11567 /* If the expression does not have side effects then we don't have to wrap
11568 it with a cleanup point expression. */
11569 if (!TREE_SIDE_EFFECTS (expr))
11570 return expr;
11572 /* If the expression is a return, check to see if the expression inside the
11573 return has no side effects or the right hand side of the modify expression
11574 inside the return. If either don't have side effects set we don't need to
11575 wrap the expression in a cleanup point expression. Note we don't check the
11576 left hand side of the modify because it should always be a return decl. */
11577 if (TREE_CODE (expr) == RETURN_EXPR)
11579 tree op = TREE_OPERAND (expr, 0);
11580 if (!op || !TREE_SIDE_EFFECTS (op))
11581 return expr;
11582 op = TREE_OPERAND (op, 1);
11583 if (!TREE_SIDE_EFFECTS (op))
11584 return expr;
11587 return build1 (CLEANUP_POINT_EXPR, type, expr);
11590 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11591 avoid confusing the gimplify process. */
11593 tree
11594 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11596 /* The size of the object is not relevant when talking about its address. */
11597 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11598 t = TREE_OPERAND (t, 0);
11600 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11601 if (TREE_CODE (t) == INDIRECT_REF
11602 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11604 t = TREE_OPERAND (t, 0);
11605 if (TREE_TYPE (t) != ptrtype)
11606 t = build1 (NOP_EXPR, ptrtype, t);
11608 else
11610 tree base = t;
11612 while (handled_component_p (base))
11613 base = TREE_OPERAND (base, 0);
11614 if (DECL_P (base))
11615 TREE_ADDRESSABLE (base) = 1;
11617 t = build1 (ADDR_EXPR, ptrtype, t);
11620 return t;
11623 tree
11624 build_fold_addr_expr (tree t)
11626 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11629 /* Given a pointer value T, return a simplified version of an indirection
11630 through T, or NULL_TREE if no simplification is possible. */
11632 static tree
11633 fold_indirect_ref_1 (tree t)
11635 tree type = TREE_TYPE (TREE_TYPE (t));
11636 tree sub = t;
11637 tree subtype;
11639 STRIP_NOPS (sub);
11640 subtype = TREE_TYPE (sub);
11641 if (!POINTER_TYPE_P (subtype))
11642 return NULL_TREE;
11644 if (TREE_CODE (sub) == ADDR_EXPR)
11646 tree op = TREE_OPERAND (sub, 0);
11647 tree optype = TREE_TYPE (op);
11648 /* *&p => p */
11649 if (lang_hooks.types_compatible_p (type, optype))
11650 return op;
11651 /* *(foo *)&fooarray => fooarray[0] */
11652 else if (TREE_CODE (optype) == ARRAY_TYPE
11653 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11655 tree type_domain = TYPE_DOMAIN (optype);
11656 tree min_val = size_zero_node;
11657 if (type_domain && TYPE_MIN_VALUE (type_domain))
11658 min_val = TYPE_MIN_VALUE (type_domain);
11659 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11663 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11664 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11665 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11667 tree type_domain;
11668 tree min_val = size_zero_node;
11669 sub = build_fold_indirect_ref (sub);
11670 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11671 if (type_domain && TYPE_MIN_VALUE (type_domain))
11672 min_val = TYPE_MIN_VALUE (type_domain);
11673 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11676 return NULL_TREE;
11679 /* Builds an expression for an indirection through T, simplifying some
11680 cases. */
11682 tree
11683 build_fold_indirect_ref (tree t)
11685 tree sub = fold_indirect_ref_1 (t);
11687 if (sub)
11688 return sub;
11689 else
11690 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
11693 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11695 tree
11696 fold_indirect_ref (tree t)
11698 tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
11700 if (sub)
11701 return sub;
11702 else
11703 return t;
11706 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11707 whose result is ignored. The type of the returned tree need not be
11708 the same as the original expression. */
11710 tree
11711 fold_ignored_result (tree t)
11713 if (!TREE_SIDE_EFFECTS (t))
11714 return integer_zero_node;
11716 for (;;)
11717 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11719 case tcc_unary:
11720 t = TREE_OPERAND (t, 0);
11721 break;
11723 case tcc_binary:
11724 case tcc_comparison:
11725 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11726 t = TREE_OPERAND (t, 0);
11727 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11728 t = TREE_OPERAND (t, 1);
11729 else
11730 return t;
11731 break;
11733 case tcc_expression:
11734 switch (TREE_CODE (t))
11736 case COMPOUND_EXPR:
11737 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11738 return t;
11739 t = TREE_OPERAND (t, 0);
11740 break;
11742 case COND_EXPR:
11743 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11744 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11745 return t;
11746 t = TREE_OPERAND (t, 0);
11747 break;
11749 default:
11750 return t;
11752 break;
11754 default:
11755 return t;
11759 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11760 This can only be applied to objects of a sizetype. */
11762 tree
11763 round_up (tree value, int divisor)
11765 tree div = NULL_TREE;
11767 gcc_assert (divisor > 0);
11768 if (divisor == 1)
11769 return value;
11771 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11772 have to do anything. Only do this when we are not given a const,
11773 because in that case, this check is more expensive than just
11774 doing it. */
11775 if (TREE_CODE (value) != INTEGER_CST)
11777 div = build_int_cst (TREE_TYPE (value), divisor);
11779 if (multiple_of_p (TREE_TYPE (value), value, div))
11780 return value;
11783 /* If divisor is a power of two, simplify this to bit manipulation. */
11784 if (divisor == (divisor & -divisor))
11786 tree t;
11788 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11789 value = size_binop (PLUS_EXPR, value, t);
11790 t = build_int_cst (TREE_TYPE (value), -divisor);
11791 value = size_binop (BIT_AND_EXPR, value, t);
11793 else
11795 if (!div)
11796 div = build_int_cst (TREE_TYPE (value), divisor);
11797 value = size_binop (CEIL_DIV_EXPR, value, div);
11798 value = size_binop (MULT_EXPR, value, div);
11801 return value;
11804 /* Likewise, but round down. */
11806 tree
11807 round_down (tree value, int divisor)
11809 tree div = NULL_TREE;
11811 gcc_assert (divisor > 0);
11812 if (divisor == 1)
11813 return value;
11815 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11816 have to do anything. Only do this when we are not given a const,
11817 because in that case, this check is more expensive than just
11818 doing it. */
11819 if (TREE_CODE (value) != INTEGER_CST)
11821 div = build_int_cst (TREE_TYPE (value), divisor);
11823 if (multiple_of_p (TREE_TYPE (value), value, div))
11824 return value;
11827 /* If divisor is a power of two, simplify this to bit manipulation. */
11828 if (divisor == (divisor & -divisor))
11830 tree t;
11832 t = build_int_cst (TREE_TYPE (value), -divisor);
11833 value = size_binop (BIT_AND_EXPR, value, t);
11835 else
11837 if (!div)
11838 div = build_int_cst (TREE_TYPE (value), divisor);
11839 value = size_binop (FLOOR_DIV_EXPR, value, div);
11840 value = size_binop (MULT_EXPR, value, div);
11843 return value;
11846 /* Returns the pointer to the base of the object addressed by EXP and
11847 extracts the information about the offset of the access, storing it
11848 to PBITPOS and POFFSET. */
11850 static tree
11851 split_address_to_core_and_offset (tree exp,
11852 HOST_WIDE_INT *pbitpos, tree *poffset)
11854 tree core;
11855 enum machine_mode mode;
11856 int unsignedp, volatilep;
11857 HOST_WIDE_INT bitsize;
11859 if (TREE_CODE (exp) == ADDR_EXPR)
11861 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11862 poffset, &mode, &unsignedp, &volatilep,
11863 false);
11865 if (TREE_CODE (core) == INDIRECT_REF)
11866 core = TREE_OPERAND (core, 0);
11868 else
11870 core = exp;
11871 *pbitpos = 0;
11872 *poffset = NULL_TREE;
11875 return core;
11878 /* Returns true if addresses of E1 and E2 differ by a constant, false
11879 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11881 bool
11882 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11884 tree core1, core2;
11885 HOST_WIDE_INT bitpos1, bitpos2;
11886 tree toffset1, toffset2, tdiff, type;
11888 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11889 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11891 if (bitpos1 % BITS_PER_UNIT != 0
11892 || bitpos2 % BITS_PER_UNIT != 0
11893 || !operand_equal_p (core1, core2, 0))
11894 return false;
11896 if (toffset1 && toffset2)
11898 type = TREE_TYPE (toffset1);
11899 if (type != TREE_TYPE (toffset2))
11900 toffset2 = fold_convert (type, toffset2);
11902 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11903 if (!host_integerp (tdiff, 0))
11904 return false;
11906 *diff = tree_low_cst (tdiff, 0);
11908 else if (toffset1 || toffset2)
11910 /* If only one of the offsets is non-constant, the difference cannot
11911 be a constant. */
11912 return false;
11914 else
11915 *diff = 0;
11917 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11918 return true;
11921 /* Simplify the floating point expression EXP when the sign of the
11922 result is not significant. Return NULL_TREE if no simplification
11923 is possible. */
11925 tree
11926 fold_strip_sign_ops (tree exp)
11928 tree arg0, arg1;
11930 switch (TREE_CODE (exp))
11932 case ABS_EXPR:
11933 case NEGATE_EXPR:
11934 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11935 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11937 case MULT_EXPR:
11938 case RDIV_EXPR:
11939 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11940 return NULL_TREE;
11941 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11942 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11943 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11944 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11945 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11946 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11947 break;
11949 default:
11950 break;
11952 return NULL_TREE;