* arm.c (adjacent_mem_locations): Reject volatile memory refs.
[official-gcc.git] / gcc / fold-const.c
blob4ac921b3a55b4353247470ae25fc5b4e4c92285a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum tree_code invert_tree_comparison (enum tree_code, bool);
93 static enum comparison_code comparison_to_compcode (enum tree_code);
94 static enum tree_code compcode_to_comparison (enum comparison_code);
95 static tree combine_comparisons (enum tree_code, enum tree_code,
96 enum tree_code, tree, tree, tree);
97 static int truth_value_p (enum tree_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand (tree, tree, tree);
102 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (tree, tree, int, int, int);
104 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106 enum machine_mode *, int *, int *,
107 tree *, tree *);
108 static int all_ones_mask_p (tree, int);
109 static tree sign_bit_p (tree, tree);
110 static int simple_operand_p (tree);
111 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112 static tree make_range (tree, int *, tree *, tree *);
113 static tree build_range_check (tree, tree, int, tree, tree);
114 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 tree);
116 static tree fold_range_test (enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree fold_truthop (enum tree_code, tree, tree, tree);
120 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123 static int multiple_of_p (tree, tree, tree);
124 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_relational_hi_lo (enum tree_code *, const tree,
137 tree *, tree *);
138 static bool tree_expr_nonzero_p (tree);
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
143 addition.
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 sign. */
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
155 #define LOWPART(x) \
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
165 static void
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178 static void
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
180 HOST_WIDE_INT *hi)
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is nonzero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is nonzero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
201 tree
202 force_fit_type (tree t, int overflowable,
203 bool overflowed, bool overflowed_const)
205 unsigned HOST_WIDE_INT low;
206 HOST_WIDE_INT high;
207 unsigned int prec;
208 int sign_extended_type;
210 gcc_assert (TREE_CODE (t) == INTEGER_CST);
212 low = TREE_INT_CST_LOW (t);
213 high = TREE_INT_CST_HIGH (t);
215 if (POINTER_TYPE_P (TREE_TYPE (t))
216 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
217 prec = POINTER_SIZE;
218 else
219 prec = TYPE_PRECISION (TREE_TYPE (t));
220 /* Size types *are* sign extended. */
221 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
222 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
223 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
225 /* First clear all bits that are beyond the type's precision. */
227 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
229 else if (prec > HOST_BITS_PER_WIDE_INT)
230 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 else
233 high = 0;
234 if (prec < HOST_BITS_PER_WIDE_INT)
235 low &= ~((HOST_WIDE_INT) (-1) << prec);
238 if (!sign_extended_type)
239 /* No sign extension */;
240 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
241 /* Correct width already. */;
242 else if (prec > HOST_BITS_PER_WIDE_INT)
244 /* Sign extend top half? */
245 if (high & ((unsigned HOST_WIDE_INT)1
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
249 else if (prec == HOST_BITS_PER_WIDE_INT)
251 if ((HOST_WIDE_INT)low < 0)
252 high = -1;
254 else
256 /* Sign extend bottom half? */
257 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
259 high = -1;
260 low |= (HOST_WIDE_INT)(-1) << prec;
264 /* If the value changed, return a new node. */
265 if (overflowed || overflowed_const
266 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
268 t = build_int_cst_wide (TREE_TYPE (t), low, high);
270 if (overflowed
271 || overflowable < 0
272 || (overflowable > 0 && sign_extended_type))
274 t = copy_node (t);
275 TREE_OVERFLOW (t) = 1;
276 TREE_CONSTANT_OVERFLOW (t) = 1;
278 else if (overflowed_const)
280 t = copy_node (t);
281 TREE_CONSTANT_OVERFLOW (t) = 1;
285 return t;
288 /* Add two doubleword integers with doubleword result.
289 Each argument is given as two `HOST_WIDE_INT' pieces.
290 One argument is L1 and H1; the other, L2 and H2.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
294 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
295 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
296 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
298 unsigned HOST_WIDE_INT l;
299 HOST_WIDE_INT h;
301 l = l1 + l2;
302 h = h1 + h2 + (l < l1);
304 *lv = l;
305 *hv = h;
306 return OVERFLOW_SUM_SIGN (h1, h2, h);
309 /* Negate a doubleword integer with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
312 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
315 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
316 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 if (l1 == 0)
320 *lv = 0;
321 *hv = - h1;
322 return (*hv & h1) < 0;
324 else
326 *lv = -l1;
327 *hv = ~h1;
328 return 0;
332 /* Multiply two doubleword integers with doubleword result.
333 Return nonzero if the operation overflows, assuming it's signed.
334 Each argument is given as two `HOST_WIDE_INT' pieces.
335 One argument is L1 and H1; the other, L2 and H2.
336 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
339 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
340 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
341 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
343 HOST_WIDE_INT arg1[4];
344 HOST_WIDE_INT arg2[4];
345 HOST_WIDE_INT prod[4 * 2];
346 unsigned HOST_WIDE_INT carry;
347 int i, j, k;
348 unsigned HOST_WIDE_INT toplow, neglow;
349 HOST_WIDE_INT tophigh, neghigh;
351 encode (arg1, l1, h1);
352 encode (arg2, l2, h2);
354 memset (prod, 0, sizeof prod);
356 for (i = 0; i < 4; i++)
358 carry = 0;
359 for (j = 0; j < 4; j++)
361 k = i + j;
362 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
363 carry += arg1[i] * arg2[j];
364 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
365 carry += prod[k];
366 prod[k] = LOWPART (carry);
367 carry = HIGHPART (carry);
369 prod[i + 4] = carry;
372 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
374 /* Check for overflow by calculating the top half of the answer in full;
375 it should agree with the low half's sign bit. */
376 decode (prod + 4, &toplow, &tophigh);
377 if (h1 < 0)
379 neg_double (l2, h2, &neglow, &neghigh);
380 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
382 if (h2 < 0)
384 neg_double (l1, h1, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
387 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
390 /* Shift the doubleword integer in L1, H1 left by COUNT places
391 keeping only PREC bits of result.
392 Shift right if COUNT is negative.
393 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
394 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
396 void
397 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
398 HOST_WIDE_INT count, unsigned int prec,
399 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
401 unsigned HOST_WIDE_INT signmask;
403 if (count < 0)
405 rshift_double (l1, h1, -count, prec, lv, hv, arith);
406 return;
409 if (SHIFT_COUNT_TRUNCATED)
410 count %= prec;
412 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
414 /* Shifting by the host word size is undefined according to the
415 ANSI standard, so we must handle this as a special case. */
416 *hv = 0;
417 *lv = 0;
419 else if (count >= HOST_BITS_PER_WIDE_INT)
421 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *lv = 0;
424 else
426 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
427 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
428 *lv = l1 << count;
431 /* Sign extend all bits that are beyond the precision. */
433 signmask = -((prec > HOST_BITS_PER_WIDE_INT
434 ? ((unsigned HOST_WIDE_INT) *hv
435 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
436 : (*lv >> (prec - 1))) & 1);
438 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
440 else if (prec >= HOST_BITS_PER_WIDE_INT)
442 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
443 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
445 else
447 *hv = signmask;
448 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
449 *lv |= signmask << prec;
453 /* Shift the doubleword integer in L1, H1 right by COUNT places
454 keeping only PREC bits of result. COUNT must be positive.
455 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
456 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
458 void
459 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
460 HOST_WIDE_INT count, unsigned int prec,
461 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
462 int arith)
464 unsigned HOST_WIDE_INT signmask;
466 signmask = (arith
467 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
468 : 0);
470 if (SHIFT_COUNT_TRUNCATED)
471 count %= prec;
473 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
475 /* Shifting by the host word size is undefined according to the
476 ANSI standard, so we must handle this as a special case. */
477 *hv = 0;
478 *lv = 0;
480 else if (count >= HOST_BITS_PER_WIDE_INT)
482 *hv = 0;
483 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
488 *lv = ((l1 >> count)
489 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
492 /* Zero / sign extend all bits that are beyond the precision. */
494 if (count >= (HOST_WIDE_INT)prec)
496 *hv = signmask;
497 *lv = signmask;
499 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
501 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
503 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
504 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
506 else
508 *hv = signmask;
509 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
510 *lv |= signmask << (prec - count);
514 /* Rotate the doubleword integer in L1, H1 left by COUNT places
515 keeping only PREC bits of result.
516 Rotate right if COUNT is negative.
517 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
519 void
520 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
521 HOST_WIDE_INT count, unsigned int prec,
522 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
524 unsigned HOST_WIDE_INT s1l, s2l;
525 HOST_WIDE_INT s1h, s2h;
527 count %= prec;
528 if (count < 0)
529 count += prec;
531 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
532 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 *lv = s1l | s2l;
534 *hv = s1h | s2h;
537 /* Rotate the doubleword integer in L1, H1 left by COUNT places
538 keeping only PREC bits of result. COUNT must be positive.
539 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
541 void
542 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
543 HOST_WIDE_INT count, unsigned int prec,
544 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
546 unsigned HOST_WIDE_INT s1l, s2l;
547 HOST_WIDE_INT s1h, s2h;
549 count %= prec;
550 if (count < 0)
551 count += prec;
553 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
554 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 *lv = s1l | s2l;
556 *hv = s1h | s2h;
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 or EXACT_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
569 div_and_round_double (enum tree_code code, int uns,
570 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
571 HOST_WIDE_INT hnum_orig,
572 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
573 HOST_WIDE_INT hden_orig,
574 unsigned HOST_WIDE_INT *lquo,
575 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
576 HOST_WIDE_INT *hrem)
578 int quo_neg = 0;
579 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den[4], quo[4];
581 int i, j;
582 unsigned HOST_WIDE_INT work;
583 unsigned HOST_WIDE_INT carry = 0;
584 unsigned HOST_WIDE_INT lnum = lnum_orig;
585 HOST_WIDE_INT hnum = hnum_orig;
586 unsigned HOST_WIDE_INT lden = lden_orig;
587 HOST_WIDE_INT hden = hden_orig;
588 int overflow = 0;
590 if (hden == 0 && lden == 0)
591 overflow = 1, lden = 1;
593 /* Calculate quotient sign and convert operands to unsigned. */
594 if (!uns)
596 if (hnum < 0)
598 quo_neg = ~ quo_neg;
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum, hnum, &lnum, &hnum)
601 && ((HOST_WIDE_INT) lden & hden) == -1)
602 overflow = 1;
604 if (hden < 0)
606 quo_neg = ~ quo_neg;
607 neg_double (lden, hden, &lden, &hden);
611 if (hnum == 0 && hden == 0)
612 { /* single precision */
613 *hquo = *hrem = 0;
614 /* This unsigned division rounds toward zero. */
615 *lquo = lnum / lden;
616 goto finish_up;
619 if (hnum == 0)
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
622 *hquo = *lquo = 0;
623 *hrem = hnum;
624 *lrem = lnum;
625 goto finish_up;
628 memset (quo, 0, sizeof quo);
630 memset (num, 0, sizeof num); /* to zero 9th element */
631 memset (den, 0, sizeof den);
633 encode (num, lnum, hnum);
634 encode (den, lden, hden);
636 /* Special code for when the divisor < BASE. */
637 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
639 /* hnum != 0 already checked. */
640 for (i = 4 - 1; i >= 0; i--)
642 work = num[i] + carry * BASE;
643 quo[i] = work / lden;
644 carry = work % lden;
647 else
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig, den_hi_sig;
652 unsigned HOST_WIDE_INT quo_est, scale;
654 /* Find the highest nonzero divisor digit. */
655 for (i = 4 - 1;; i--)
656 if (den[i] != 0)
658 den_hi_sig = i;
659 break;
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
665 scale = BASE / (den[den_hi_sig] + 1);
666 if (scale > 1)
667 { /* scale divisor and dividend */
668 carry = 0;
669 for (i = 0; i <= 4 - 1; i++)
671 work = (num[i] * scale) + carry;
672 num[i] = LOWPART (work);
673 carry = HIGHPART (work);
676 num[4] = carry;
677 carry = 0;
678 for (i = 0; i <= 4 - 1; i++)
680 work = (den[i] * scale) + carry;
681 den[i] = LOWPART (work);
682 carry = HIGHPART (work);
683 if (den[i] != 0) den_hi_sig = i;
687 num_hi_sig = 4;
689 /* Main loop */
690 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp;
697 num_hi_sig = i + den_hi_sig + 1;
698 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
699 if (num[num_hi_sig] != den[den_hi_sig])
700 quo_est = work / den[den_hi_sig];
701 else
702 quo_est = BASE - 1;
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp = work - quo_est * den[den_hi_sig];
706 if (tmp < BASE
707 && (den[den_hi_sig - 1] * quo_est
708 > (tmp * BASE + num[num_hi_sig - 2])))
709 quo_est--;
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
715 carry = 0;
716 for (j = 0; j <= den_hi_sig; j++)
718 work = quo_est * den[j] + carry;
719 carry = HIGHPART (work);
720 work = num[i + j] - LOWPART (work);
721 num[i + j] = LOWPART (work);
722 carry += HIGHPART (work) != 0;
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
729 quo_est--;
730 carry = 0; /* add divisor back in */
731 for (j = 0; j <= den_hi_sig; j++)
733 work = num[i + j] + den[j] + carry;
734 carry = HIGHPART (work);
735 num[i + j] = LOWPART (work);
738 num [num_hi_sig] += carry;
741 /* Store the quotient digit. */
742 quo[i] = quo_est;
746 decode (quo, lquo, hquo);
748 finish_up:
749 /* If result is negative, make it so. */
750 if (quo_neg)
751 neg_double (*lquo, *hquo, lquo, hquo);
753 /* Compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
755 neg_double (*lrem, *hrem, lrem, hrem);
756 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
758 switch (code)
760 case TRUNC_DIV_EXPR:
761 case TRUNC_MOD_EXPR: /* round toward zero */
762 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 return overflow;
765 case FLOOR_DIV_EXPR:
766 case FLOOR_MOD_EXPR: /* round toward negative infinity */
767 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
769 /* quo = quo - 1; */
770 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
771 lquo, hquo);
773 else
774 return overflow;
775 break;
777 case CEIL_DIV_EXPR:
778 case CEIL_MOD_EXPR: /* round toward positive infinity */
779 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
781 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
782 lquo, hquo);
784 else
785 return overflow;
786 break;
788 case ROUND_DIV_EXPR:
789 case ROUND_MOD_EXPR: /* round to closest integer */
791 unsigned HOST_WIDE_INT labs_rem = *lrem;
792 HOST_WIDE_INT habs_rem = *hrem;
793 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
794 HOST_WIDE_INT habs_den = hden, htwice;
796 /* Get absolute values. */
797 if (*hrem < 0)
798 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 if (hden < 0)
800 neg_double (lden, hden, &labs_den, &habs_den);
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
804 labs_rem, habs_rem, &ltwice, &htwice);
806 if (((unsigned HOST_WIDE_INT) habs_den
807 < (unsigned HOST_WIDE_INT) htwice)
808 || (((unsigned HOST_WIDE_INT) habs_den
809 == (unsigned HOST_WIDE_INT) htwice)
810 && (labs_den < ltwice)))
812 if (*hquo < 0)
813 /* quo = quo - 1; */
814 add_double (*lquo, *hquo,
815 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
816 else
817 /* quo = quo + 1; */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
821 else
822 return overflow;
824 break;
826 default:
827 gcc_unreachable ();
830 /* Compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
832 neg_double (*lrem, *hrem, lrem, hrem);
833 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 return overflow;
837 /* Return true if built-in mathematical function specified by CODE
838 preserves the sign of it argument, i.e. -f(x) == f(-x). */
840 static bool
841 negate_mathfn_p (enum built_in_function code)
843 switch (code)
845 case BUILT_IN_ASIN:
846 case BUILT_IN_ASINF:
847 case BUILT_IN_ASINL:
848 case BUILT_IN_ATAN:
849 case BUILT_IN_ATANF:
850 case BUILT_IN_ATANL:
851 case BUILT_IN_SIN:
852 case BUILT_IN_SINF:
853 case BUILT_IN_SINL:
854 case BUILT_IN_TAN:
855 case BUILT_IN_TANF:
856 case BUILT_IN_TANL:
857 return true;
859 default:
860 break;
862 return false;
865 /* Check whether we may negate an integer constant T without causing
866 overflow. */
868 bool
869 may_negate_without_overflow_p (tree t)
871 unsigned HOST_WIDE_INT val;
872 unsigned int prec;
873 tree type;
875 gcc_assert (TREE_CODE (t) == INTEGER_CST);
877 type = TREE_TYPE (t);
878 if (TYPE_UNSIGNED (type))
879 return false;
881 prec = TYPE_PRECISION (type);
882 if (prec > HOST_BITS_PER_WIDE_INT)
884 if (TREE_INT_CST_LOW (t) != 0)
885 return true;
886 prec -= HOST_BITS_PER_WIDE_INT;
887 val = TREE_INT_CST_HIGH (t);
889 else
890 val = TREE_INT_CST_LOW (t);
891 if (prec < HOST_BITS_PER_WIDE_INT)
892 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
893 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
896 /* Determine whether an expression T can be cheaply negated using
897 the function negate_expr. */
899 static bool
900 negate_expr_p (tree t)
902 tree type;
904 if (t == 0)
905 return false;
907 type = TREE_TYPE (t);
909 STRIP_SIGN_NOPS (t);
910 switch (TREE_CODE (t))
912 case INTEGER_CST:
913 if (TYPE_UNSIGNED (type) || ! flag_trapv)
914 return true;
916 /* Check that -CST will not overflow type. */
917 return may_negate_without_overflow_p (t);
919 case REAL_CST:
920 case NEGATE_EXPR:
921 return true;
923 case COMPLEX_CST:
924 return negate_expr_p (TREE_REALPART (t))
925 && negate_expr_p (TREE_IMAGPART (t));
927 case PLUS_EXPR:
928 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
929 return false;
930 /* -(A + B) -> (-B) - A. */
931 if (negate_expr_p (TREE_OPERAND (t, 1))
932 && reorder_operands_p (TREE_OPERAND (t, 0),
933 TREE_OPERAND (t, 1)))
934 return true;
935 /* -(A + B) -> (-A) - B. */
936 return negate_expr_p (TREE_OPERAND (t, 0));
938 case MINUS_EXPR:
939 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
940 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
941 && reorder_operands_p (TREE_OPERAND (t, 0),
942 TREE_OPERAND (t, 1));
944 case MULT_EXPR:
945 if (TYPE_UNSIGNED (TREE_TYPE (t)))
946 break;
948 /* Fall through. */
950 case RDIV_EXPR:
951 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
952 return negate_expr_p (TREE_OPERAND (t, 1))
953 || negate_expr_p (TREE_OPERAND (t, 0));
954 break;
956 case NOP_EXPR:
957 /* Negate -((double)float) as (double)(-float). */
958 if (TREE_CODE (type) == REAL_TYPE)
960 tree tem = strip_float_extensions (t);
961 if (tem != t)
962 return negate_expr_p (tem);
964 break;
966 case CALL_EXPR:
967 /* Negate -f(x) as f(-x). */
968 if (negate_mathfn_p (builtin_mathfn_code (t)))
969 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
970 break;
972 case RSHIFT_EXPR:
973 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
974 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
976 tree op1 = TREE_OPERAND (t, 1);
977 if (TREE_INT_CST_HIGH (op1) == 0
978 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
979 == TREE_INT_CST_LOW (op1))
980 return true;
982 break;
984 default:
985 break;
987 return false;
990 /* Given T, an expression, return the negation of T. Allow for T to be
991 null, in which case return null. */
993 static tree
994 negate_expr (tree t)
996 tree type;
997 tree tem;
999 if (t == 0)
1000 return 0;
1002 type = TREE_TYPE (t);
1003 STRIP_SIGN_NOPS (t);
1005 switch (TREE_CODE (t))
1007 case INTEGER_CST:
1008 tem = fold_negate_const (t, type);
1009 if (! TREE_OVERFLOW (tem)
1010 || TYPE_UNSIGNED (type)
1011 || ! flag_trapv)
1012 return tem;
1013 break;
1015 case REAL_CST:
1016 tem = fold_negate_const (t, type);
1017 /* Two's complement FP formats, such as c4x, may overflow. */
1018 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1019 return fold_convert (type, tem);
1020 break;
1022 case COMPLEX_CST:
1024 tree rpart = negate_expr (TREE_REALPART (t));
1025 tree ipart = negate_expr (TREE_IMAGPART (t));
1027 if ((TREE_CODE (rpart) == REAL_CST
1028 && TREE_CODE (ipart) == REAL_CST)
1029 || (TREE_CODE (rpart) == INTEGER_CST
1030 && TREE_CODE (ipart) == INTEGER_CST))
1031 return build_complex (type, rpart, ipart);
1033 break;
1035 case NEGATE_EXPR:
1036 return fold_convert (type, TREE_OPERAND (t, 0));
1038 case PLUS_EXPR:
1039 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1041 /* -(A + B) -> (-B) - A. */
1042 if (negate_expr_p (TREE_OPERAND (t, 1))
1043 && reorder_operands_p (TREE_OPERAND (t, 0),
1044 TREE_OPERAND (t, 1)))
1046 tem = negate_expr (TREE_OPERAND (t, 1));
1047 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1048 tem, TREE_OPERAND (t, 0));
1049 return fold_convert (type, tem);
1052 /* -(A + B) -> (-A) - B. */
1053 if (negate_expr_p (TREE_OPERAND (t, 0)))
1055 tem = negate_expr (TREE_OPERAND (t, 0));
1056 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1057 tem, TREE_OPERAND (t, 1));
1058 return fold_convert (type, tem);
1061 break;
1063 case MINUS_EXPR:
1064 /* - (A - B) -> B - A */
1065 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1066 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1067 return fold_convert (type,
1068 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1069 TREE_OPERAND (t, 1),
1070 TREE_OPERAND (t, 0)));
1071 break;
1073 case MULT_EXPR:
1074 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1075 break;
1077 /* Fall through. */
1079 case RDIV_EXPR:
1080 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1082 tem = TREE_OPERAND (t, 1);
1083 if (negate_expr_p (tem))
1084 return fold_convert (type,
1085 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1086 TREE_OPERAND (t, 0),
1087 negate_expr (tem)));
1088 tem = TREE_OPERAND (t, 0);
1089 if (negate_expr_p (tem))
1090 return fold_convert (type,
1091 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1092 negate_expr (tem),
1093 TREE_OPERAND (t, 1)));
1095 break;
1097 case NOP_EXPR:
1098 /* Convert -((double)float) into (double)(-float). */
1099 if (TREE_CODE (type) == REAL_TYPE)
1101 tem = strip_float_extensions (t);
1102 if (tem != t && negate_expr_p (tem))
1103 return fold_convert (type, negate_expr (tem));
1105 break;
1107 case CALL_EXPR:
1108 /* Negate -f(x) as f(-x). */
1109 if (negate_mathfn_p (builtin_mathfn_code (t))
1110 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1112 tree fndecl, arg, arglist;
1114 fndecl = get_callee_fndecl (t);
1115 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1116 arglist = build_tree_list (NULL_TREE, arg);
1117 return build_function_call_expr (fndecl, arglist);
1119 break;
1121 case RSHIFT_EXPR:
1122 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1123 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1125 tree op1 = TREE_OPERAND (t, 1);
1126 if (TREE_INT_CST_HIGH (op1) == 0
1127 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1128 == TREE_INT_CST_LOW (op1))
1130 tree ntype = TYPE_UNSIGNED (type)
1131 ? lang_hooks.types.signed_type (type)
1132 : lang_hooks.types.unsigned_type (type);
1133 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1134 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1135 return fold_convert (type, temp);
1138 break;
1140 default:
1141 break;
1144 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1145 return fold_convert (type, tem);
1148 /* Split a tree IN into a constant, literal and variable parts that could be
1149 combined with CODE to make IN. "constant" means an expression with
1150 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1151 commutative arithmetic operation. Store the constant part into *CONP,
1152 the literal in *LITP and return the variable part. If a part isn't
1153 present, set it to null. If the tree does not decompose in this way,
1154 return the entire tree as the variable part and the other parts as null.
1156 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1157 case, we negate an operand that was subtracted. Except if it is a
1158 literal for which we use *MINUS_LITP instead.
1160 If NEGATE_P is true, we are negating all of IN, again except a literal
1161 for which we use *MINUS_LITP instead.
1163 If IN is itself a literal or constant, return it as appropriate.
1165 Note that we do not guarantee that any of the three values will be the
1166 same type as IN, but they will have the same signedness and mode. */
1168 static tree
1169 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1170 tree *minus_litp, int negate_p)
1172 tree var = 0;
1174 *conp = 0;
1175 *litp = 0;
1176 *minus_litp = 0;
1178 /* Strip any conversions that don't change the machine mode or signedness. */
1179 STRIP_SIGN_NOPS (in);
1181 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1182 *litp = in;
1183 else if (TREE_CODE (in) == code
1184 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1185 /* We can associate addition and subtraction together (even
1186 though the C standard doesn't say so) for integers because
1187 the value is not affected. For reals, the value might be
1188 affected, so we can't. */
1189 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1190 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1192 tree op0 = TREE_OPERAND (in, 0);
1193 tree op1 = TREE_OPERAND (in, 1);
1194 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1195 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1197 /* First see if either of the operands is a literal, then a constant. */
1198 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1199 *litp = op0, op0 = 0;
1200 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1201 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1203 if (op0 != 0 && TREE_CONSTANT (op0))
1204 *conp = op0, op0 = 0;
1205 else if (op1 != 0 && TREE_CONSTANT (op1))
1206 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1208 /* If we haven't dealt with either operand, this is not a case we can
1209 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1210 if (op0 != 0 && op1 != 0)
1211 var = in;
1212 else if (op0 != 0)
1213 var = op0;
1214 else
1215 var = op1, neg_var_p = neg1_p;
1217 /* Now do any needed negations. */
1218 if (neg_litp_p)
1219 *minus_litp = *litp, *litp = 0;
1220 if (neg_conp_p)
1221 *conp = negate_expr (*conp);
1222 if (neg_var_p)
1223 var = negate_expr (var);
1225 else if (TREE_CONSTANT (in))
1226 *conp = in;
1227 else
1228 var = in;
1230 if (negate_p)
1232 if (*litp)
1233 *minus_litp = *litp, *litp = 0;
1234 else if (*minus_litp)
1235 *litp = *minus_litp, *minus_litp = 0;
1236 *conp = negate_expr (*conp);
1237 var = negate_expr (var);
1240 return var;
1243 /* Re-associate trees split by the above function. T1 and T2 are either
1244 expressions to associate or null. Return the new expression, if any. If
1245 we build an operation, do it in TYPE and with CODE. */
1247 static tree
1248 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1250 if (t1 == 0)
1251 return t2;
1252 else if (t2 == 0)
1253 return t1;
1255 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1256 try to fold this since we will have infinite recursion. But do
1257 deal with any NEGATE_EXPRs. */
1258 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1259 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1261 if (code == PLUS_EXPR)
1263 if (TREE_CODE (t1) == NEGATE_EXPR)
1264 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1265 fold_convert (type, TREE_OPERAND (t1, 0)));
1266 else if (TREE_CODE (t2) == NEGATE_EXPR)
1267 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1268 fold_convert (type, TREE_OPERAND (t2, 0)));
1269 else if (integer_zerop (t2))
1270 return fold_convert (type, t1);
1272 else if (code == MINUS_EXPR)
1274 if (integer_zerop (t2))
1275 return fold_convert (type, t1);
1278 return build2 (code, type, fold_convert (type, t1),
1279 fold_convert (type, t2));
1282 return fold_build2 (code, type, fold_convert (type, t1),
1283 fold_convert (type, t2));
1286 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1287 to produce a new constant.
1289 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1291 tree
1292 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1294 unsigned HOST_WIDE_INT int1l, int2l;
1295 HOST_WIDE_INT int1h, int2h;
1296 unsigned HOST_WIDE_INT low;
1297 HOST_WIDE_INT hi;
1298 unsigned HOST_WIDE_INT garbagel;
1299 HOST_WIDE_INT garbageh;
1300 tree t;
1301 tree type = TREE_TYPE (arg1);
1302 int uns = TYPE_UNSIGNED (type);
1303 int is_sizetype
1304 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1305 int overflow = 0;
1307 int1l = TREE_INT_CST_LOW (arg1);
1308 int1h = TREE_INT_CST_HIGH (arg1);
1309 int2l = TREE_INT_CST_LOW (arg2);
1310 int2h = TREE_INT_CST_HIGH (arg2);
1312 switch (code)
1314 case BIT_IOR_EXPR:
1315 low = int1l | int2l, hi = int1h | int2h;
1316 break;
1318 case BIT_XOR_EXPR:
1319 low = int1l ^ int2l, hi = int1h ^ int2h;
1320 break;
1322 case BIT_AND_EXPR:
1323 low = int1l & int2l, hi = int1h & int2h;
1324 break;
1326 case RSHIFT_EXPR:
1327 int2l = -int2l;
1328 case LSHIFT_EXPR:
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1333 &low, &hi, !uns);
1334 break;
1336 case RROTATE_EXPR:
1337 int2l = - int2l;
1338 case LROTATE_EXPR:
1339 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 &low, &hi);
1341 break;
1343 case PLUS_EXPR:
1344 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1345 break;
1347 case MINUS_EXPR:
1348 neg_double (int2l, int2h, &low, &hi);
1349 add_double (int1l, int1h, low, hi, &low, &hi);
1350 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1351 break;
1353 case MULT_EXPR:
1354 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1355 break;
1357 case TRUNC_DIV_EXPR:
1358 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1359 case EXACT_DIV_EXPR:
1360 /* This is a shortcut for a common special case. */
1361 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1362 && ! TREE_CONSTANT_OVERFLOW (arg1)
1363 && ! TREE_CONSTANT_OVERFLOW (arg2)
1364 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1366 if (code == CEIL_DIV_EXPR)
1367 int1l += int2l - 1;
1369 low = int1l / int2l, hi = 0;
1370 break;
1373 /* ... fall through ... */
1375 case ROUND_DIV_EXPR:
1376 if (int2h == 0 && int2l == 1)
1378 low = int1l, hi = int1h;
1379 break;
1381 if (int1l == int2l && int1h == int2h
1382 && ! (int1l == 0 && int1h == 0))
1384 low = 1, hi = 0;
1385 break;
1387 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1388 &low, &hi, &garbagel, &garbageh);
1389 break;
1391 case TRUNC_MOD_EXPR:
1392 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1393 /* This is a shortcut for a common special case. */
1394 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1395 && ! TREE_CONSTANT_OVERFLOW (arg1)
1396 && ! TREE_CONSTANT_OVERFLOW (arg2)
1397 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1399 if (code == CEIL_MOD_EXPR)
1400 int1l += int2l - 1;
1401 low = int1l % int2l, hi = 0;
1402 break;
1405 /* ... fall through ... */
1407 case ROUND_MOD_EXPR:
1408 overflow = div_and_round_double (code, uns,
1409 int1l, int1h, int2l, int2h,
1410 &garbagel, &garbageh, &low, &hi);
1411 break;
1413 case MIN_EXPR:
1414 case MAX_EXPR:
1415 if (uns)
1416 low = (((unsigned HOST_WIDE_INT) int1h
1417 < (unsigned HOST_WIDE_INT) int2h)
1418 || (((unsigned HOST_WIDE_INT) int1h
1419 == (unsigned HOST_WIDE_INT) int2h)
1420 && int1l < int2l));
1421 else
1422 low = (int1h < int2h
1423 || (int1h == int2h && int1l < int2l));
1425 if (low == (code == MIN_EXPR))
1426 low = int1l, hi = int1h;
1427 else
1428 low = int2l, hi = int2h;
1429 break;
1431 default:
1432 gcc_unreachable ();
1435 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1437 if (notrunc)
1439 /* Propagate overflow flags ourselves. */
1440 if (((!uns || is_sizetype) && overflow)
1441 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1443 t = copy_node (t);
1444 TREE_OVERFLOW (t) = 1;
1445 TREE_CONSTANT_OVERFLOW (t) = 1;
1447 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1449 t = copy_node (t);
1450 TREE_CONSTANT_OVERFLOW (t) = 1;
1453 else
1454 t = force_fit_type (t, 1,
1455 ((!uns || is_sizetype) && overflow)
1456 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1457 TREE_CONSTANT_OVERFLOW (arg1)
1458 | TREE_CONSTANT_OVERFLOW (arg2));
1460 return t;
1463 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1464 constant. We assume ARG1 and ARG2 have the same data type, or at least
1465 are the same kind of constant and the same machine mode.
1467 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1469 static tree
1470 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1472 STRIP_NOPS (arg1);
1473 STRIP_NOPS (arg2);
1475 if (TREE_CODE (arg1) == INTEGER_CST)
1476 return int_const_binop (code, arg1, arg2, notrunc);
1478 if (TREE_CODE (arg1) == REAL_CST)
1480 enum machine_mode mode;
1481 REAL_VALUE_TYPE d1;
1482 REAL_VALUE_TYPE d2;
1483 REAL_VALUE_TYPE value;
1484 REAL_VALUE_TYPE result;
1485 bool inexact;
1486 tree t, type;
1488 d1 = TREE_REAL_CST (arg1);
1489 d2 = TREE_REAL_CST (arg2);
1491 type = TREE_TYPE (arg1);
1492 mode = TYPE_MODE (type);
1494 /* Don't perform operation if we honor signaling NaNs and
1495 either operand is a NaN. */
1496 if (HONOR_SNANS (mode)
1497 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1498 return NULL_TREE;
1500 /* Don't perform operation if it would raise a division
1501 by zero exception. */
1502 if (code == RDIV_EXPR
1503 && REAL_VALUES_EQUAL (d2, dconst0)
1504 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1505 return NULL_TREE;
1507 /* If either operand is a NaN, just return it. Otherwise, set up
1508 for floating-point trap; we return an overflow. */
1509 if (REAL_VALUE_ISNAN (d1))
1510 return arg1;
1511 else if (REAL_VALUE_ISNAN (d2))
1512 return arg2;
1514 inexact = real_arithmetic (&value, code, &d1, &d2);
1515 real_convert (&result, mode, &value);
1517 /* Don't constant fold this floating point operation if the
1518 result may dependent upon the run-time rounding mode and
1519 flag_rounding_math is set, or if GCC's software emulation
1520 is unable to accurately represent the result. */
1522 if ((flag_rounding_math
1523 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1524 && !flag_unsafe_math_optimizations))
1525 && (inexact || !real_identical (&result, &value)))
1526 return NULL_TREE;
1528 t = build_real (type, result);
1530 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1531 TREE_CONSTANT_OVERFLOW (t)
1532 = TREE_OVERFLOW (t)
1533 | TREE_CONSTANT_OVERFLOW (arg1)
1534 | TREE_CONSTANT_OVERFLOW (arg2);
1535 return t;
1537 if (TREE_CODE (arg1) == COMPLEX_CST)
1539 tree type = TREE_TYPE (arg1);
1540 tree r1 = TREE_REALPART (arg1);
1541 tree i1 = TREE_IMAGPART (arg1);
1542 tree r2 = TREE_REALPART (arg2);
1543 tree i2 = TREE_IMAGPART (arg2);
1544 tree t;
1546 switch (code)
1548 case PLUS_EXPR:
1549 t = build_complex (type,
1550 const_binop (PLUS_EXPR, r1, r2, notrunc),
1551 const_binop (PLUS_EXPR, i1, i2, notrunc));
1552 break;
1554 case MINUS_EXPR:
1555 t = build_complex (type,
1556 const_binop (MINUS_EXPR, r1, r2, notrunc),
1557 const_binop (MINUS_EXPR, i1, i2, notrunc));
1558 break;
1560 case MULT_EXPR:
1561 t = build_complex (type,
1562 const_binop (MINUS_EXPR,
1563 const_binop (MULT_EXPR,
1564 r1, r2, notrunc),
1565 const_binop (MULT_EXPR,
1566 i1, i2, notrunc),
1567 notrunc),
1568 const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR,
1570 r1, i2, notrunc),
1571 const_binop (MULT_EXPR,
1572 i1, r2, notrunc),
1573 notrunc));
1574 break;
1576 case RDIV_EXPR:
1578 tree magsquared
1579 = const_binop (PLUS_EXPR,
1580 const_binop (MULT_EXPR, r2, r2, notrunc),
1581 const_binop (MULT_EXPR, i2, i2, notrunc),
1582 notrunc);
1584 t = build_complex (type,
1585 const_binop
1586 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1587 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1588 const_binop (PLUS_EXPR,
1589 const_binop (MULT_EXPR, r1, r2,
1590 notrunc),
1591 const_binop (MULT_EXPR, i1, i2,
1592 notrunc),
1593 notrunc),
1594 magsquared, notrunc),
1595 const_binop
1596 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1597 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1598 const_binop (MINUS_EXPR,
1599 const_binop (MULT_EXPR, i1, r2,
1600 notrunc),
1601 const_binop (MULT_EXPR, r1, i2,
1602 notrunc),
1603 notrunc),
1604 magsquared, notrunc));
1606 break;
1608 default:
1609 gcc_unreachable ();
1611 return t;
1613 return 0;
1616 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1617 indicates which particular sizetype to create. */
1619 tree
1620 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1622 return build_int_cst (sizetype_tab[(int) kind], number);
1625 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1626 is a tree code. The type of the result is taken from the operands.
1627 Both must be the same type integer type and it must be a size type.
1628 If the operands are constant, so is the result. */
1630 tree
1631 size_binop (enum tree_code code, tree arg0, tree arg1)
1633 tree type = TREE_TYPE (arg0);
1635 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1636 && type == TREE_TYPE (arg1));
1638 /* Handle the special case of two integer constants faster. */
1639 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1641 /* And some specific cases even faster than that. */
1642 if (code == PLUS_EXPR && integer_zerop (arg0))
1643 return arg1;
1644 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1645 && integer_zerop (arg1))
1646 return arg0;
1647 else if (code == MULT_EXPR && integer_onep (arg0))
1648 return arg1;
1650 /* Handle general case of two integer constants. */
1651 return int_const_binop (code, arg0, arg1, 0);
1654 if (arg0 == error_mark_node || arg1 == error_mark_node)
1655 return error_mark_node;
1657 return fold_build2 (code, type, arg0, arg1);
1660 /* Given two values, either both of sizetype or both of bitsizetype,
1661 compute the difference between the two values. Return the value
1662 in signed type corresponding to the type of the operands. */
1664 tree
1665 size_diffop (tree arg0, tree arg1)
1667 tree type = TREE_TYPE (arg0);
1668 tree ctype;
1670 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1671 && type == TREE_TYPE (arg1));
1673 /* If the type is already signed, just do the simple thing. */
1674 if (!TYPE_UNSIGNED (type))
1675 return size_binop (MINUS_EXPR, arg0, arg1);
1677 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1679 /* If either operand is not a constant, do the conversions to the signed
1680 type and subtract. The hardware will do the right thing with any
1681 overflow in the subtraction. */
1682 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1683 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1684 fold_convert (ctype, arg1));
1686 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1687 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1688 overflow) and negate (which can't either). Special-case a result
1689 of zero while we're here. */
1690 if (tree_int_cst_equal (arg0, arg1))
1691 return fold_convert (ctype, integer_zero_node);
1692 else if (tree_int_cst_lt (arg1, arg0))
1693 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1694 else
1695 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1696 fold_convert (ctype, size_binop (MINUS_EXPR,
1697 arg1, arg0)));
1700 /* A subroutine of fold_convert_const handling conversions of an
1701 INTEGER_CST to another integer type. */
1703 static tree
1704 fold_convert_const_int_from_int (tree type, tree arg1)
1706 tree t;
1708 /* Given an integer constant, make new constant with new type,
1709 appropriately sign-extended or truncated. */
1710 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1711 TREE_INT_CST_HIGH (arg1));
1713 t = force_fit_type (t,
1714 /* Don't set the overflow when
1715 converting a pointer */
1716 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1717 (TREE_INT_CST_HIGH (arg1) < 0
1718 && (TYPE_UNSIGNED (type)
1719 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1720 | TREE_OVERFLOW (arg1),
1721 TREE_CONSTANT_OVERFLOW (arg1));
1723 return t;
1726 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1727 to an integer type. */
1729 static tree
1730 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1732 int overflow = 0;
1733 tree t;
1735 /* The following code implements the floating point to integer
1736 conversion rules required by the Java Language Specification,
1737 that IEEE NaNs are mapped to zero and values that overflow
1738 the target precision saturate, i.e. values greater than
1739 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1740 are mapped to INT_MIN. These semantics are allowed by the
1741 C and C++ standards that simply state that the behavior of
1742 FP-to-integer conversion is unspecified upon overflow. */
1744 HOST_WIDE_INT high, low;
1745 REAL_VALUE_TYPE r;
1746 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1748 switch (code)
1750 case FIX_TRUNC_EXPR:
1751 real_trunc (&r, VOIDmode, &x);
1752 break;
1754 case FIX_CEIL_EXPR:
1755 real_ceil (&r, VOIDmode, &x);
1756 break;
1758 case FIX_FLOOR_EXPR:
1759 real_floor (&r, VOIDmode, &x);
1760 break;
1762 case FIX_ROUND_EXPR:
1763 real_round (&r, VOIDmode, &x);
1764 break;
1766 default:
1767 gcc_unreachable ();
1770 /* If R is NaN, return zero and show we have an overflow. */
1771 if (REAL_VALUE_ISNAN (r))
1773 overflow = 1;
1774 high = 0;
1775 low = 0;
1778 /* See if R is less than the lower bound or greater than the
1779 upper bound. */
1781 if (! overflow)
1783 tree lt = TYPE_MIN_VALUE (type);
1784 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1785 if (REAL_VALUES_LESS (r, l))
1787 overflow = 1;
1788 high = TREE_INT_CST_HIGH (lt);
1789 low = TREE_INT_CST_LOW (lt);
1793 if (! overflow)
1795 tree ut = TYPE_MAX_VALUE (type);
1796 if (ut)
1798 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1799 if (REAL_VALUES_LESS (u, r))
1801 overflow = 1;
1802 high = TREE_INT_CST_HIGH (ut);
1803 low = TREE_INT_CST_LOW (ut);
1808 if (! overflow)
1809 REAL_VALUE_TO_INT (&low, &high, r);
1811 t = build_int_cst_wide (type, low, high);
1813 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1814 TREE_CONSTANT_OVERFLOW (arg1));
1815 return t;
1818 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1819 to another floating point type. */
1821 static tree
1822 fold_convert_const_real_from_real (tree type, tree arg1)
1824 REAL_VALUE_TYPE value;
1825 tree t;
1827 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1828 t = build_real (type, value);
1830 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1831 TREE_CONSTANT_OVERFLOW (t)
1832 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1833 return t;
1836 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1837 type TYPE. If no simplification can be done return NULL_TREE. */
1839 static tree
1840 fold_convert_const (enum tree_code code, tree type, tree arg1)
1842 if (TREE_TYPE (arg1) == type)
1843 return arg1;
1845 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return fold_convert_const_int_from_int (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_int_from_real (code, type, arg1);
1852 else if (TREE_CODE (type) == REAL_TYPE)
1854 if (TREE_CODE (arg1) == INTEGER_CST)
1855 return build_real_from_int_cst (type, arg1);
1856 if (TREE_CODE (arg1) == REAL_CST)
1857 return fold_convert_const_real_from_real (type, arg1);
1859 return NULL_TREE;
1862 /* Construct a vector of zero elements of vector type TYPE. */
1864 static tree
1865 build_zero_vector (tree type)
1867 tree elem, list;
1868 int i, units;
1870 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1871 units = TYPE_VECTOR_SUBPARTS (type);
1873 list = NULL_TREE;
1874 for (i = 0; i < units; i++)
1875 list = tree_cons (NULL_TREE, elem, list);
1876 return build_vector (type, list);
1879 /* Convert expression ARG to type TYPE. Used by the middle-end for
1880 simple conversions in preference to calling the front-end's convert. */
1882 tree
1883 fold_convert (tree type, tree arg)
1885 tree orig = TREE_TYPE (arg);
1886 tree tem;
1888 if (type == orig)
1889 return arg;
1891 if (TREE_CODE (arg) == ERROR_MARK
1892 || TREE_CODE (type) == ERROR_MARK
1893 || TREE_CODE (orig) == ERROR_MARK)
1894 return error_mark_node;
1896 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1897 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1898 TYPE_MAIN_VARIANT (orig)))
1899 return fold_build1 (NOP_EXPR, type, arg);
1901 switch (TREE_CODE (type))
1903 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1904 case POINTER_TYPE: case REFERENCE_TYPE:
1905 case OFFSET_TYPE:
1906 if (TREE_CODE (arg) == INTEGER_CST)
1908 tem = fold_convert_const (NOP_EXPR, type, arg);
1909 if (tem != NULL_TREE)
1910 return tem;
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1913 || TREE_CODE (orig) == OFFSET_TYPE)
1914 return fold_build1 (NOP_EXPR, type, arg);
1915 if (TREE_CODE (orig) == COMPLEX_TYPE)
1917 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1918 return fold_convert (type, tem);
1920 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922 return fold_build1 (NOP_EXPR, type, arg);
1924 case REAL_TYPE:
1925 if (TREE_CODE (arg) == INTEGER_CST)
1927 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1931 else if (TREE_CODE (arg) == REAL_CST)
1933 tem = fold_convert_const (NOP_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1935 return tem;
1938 switch (TREE_CODE (orig))
1940 case INTEGER_TYPE: case CHAR_TYPE:
1941 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1942 case POINTER_TYPE: case REFERENCE_TYPE:
1943 return fold_build1 (FLOAT_EXPR, type, arg);
1945 case REAL_TYPE:
1946 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1947 type, arg);
1949 case COMPLEX_TYPE:
1950 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1951 return fold_convert (type, tem);
1953 default:
1954 gcc_unreachable ();
1957 case COMPLEX_TYPE:
1958 switch (TREE_CODE (orig))
1960 case INTEGER_TYPE: case CHAR_TYPE:
1961 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1962 case POINTER_TYPE: case REFERENCE_TYPE:
1963 case REAL_TYPE:
1964 return build2 (COMPLEX_EXPR, type,
1965 fold_convert (TREE_TYPE (type), arg),
1966 fold_convert (TREE_TYPE (type), integer_zero_node));
1967 case COMPLEX_TYPE:
1969 tree rpart, ipart;
1971 if (TREE_CODE (arg) == COMPLEX_EXPR)
1973 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1974 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1975 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
1978 arg = save_expr (arg);
1979 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1980 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
1981 rpart = fold_convert (TREE_TYPE (type), rpart);
1982 ipart = fold_convert (TREE_TYPE (type), ipart);
1983 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
1986 default:
1987 gcc_unreachable ();
1990 case VECTOR_TYPE:
1991 if (integer_zerop (arg))
1992 return build_zero_vector (type);
1993 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1994 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1995 || TREE_CODE (orig) == VECTOR_TYPE);
1996 return fold_build1 (NOP_EXPR, type, arg);
1998 case VOID_TYPE:
1999 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2001 default:
2002 gcc_unreachable ();
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2008 tree
2009 non_lvalue (tree x)
2011 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2012 us. */
2013 if (in_gimple_form)
2014 return x;
2016 /* We only need to wrap lvalue tree codes. */
2017 switch (TREE_CODE (x))
2019 case VAR_DECL:
2020 case PARM_DECL:
2021 case RESULT_DECL:
2022 case LABEL_DECL:
2023 case FUNCTION_DECL:
2024 case SSA_NAME:
2026 case COMPONENT_REF:
2027 case INDIRECT_REF:
2028 case ALIGN_INDIRECT_REF:
2029 case MISALIGNED_INDIRECT_REF:
2030 case ARRAY_REF:
2031 case ARRAY_RANGE_REF:
2032 case BIT_FIELD_REF:
2033 case OBJ_TYPE_REF:
2035 case REALPART_EXPR:
2036 case IMAGPART_EXPR:
2037 case PREINCREMENT_EXPR:
2038 case PREDECREMENT_EXPR:
2039 case SAVE_EXPR:
2040 case TRY_CATCH_EXPR:
2041 case WITH_CLEANUP_EXPR:
2042 case COMPOUND_EXPR:
2043 case MODIFY_EXPR:
2044 case TARGET_EXPR:
2045 case COND_EXPR:
2046 case BIND_EXPR:
2047 case MIN_EXPR:
2048 case MAX_EXPR:
2049 break;
2051 default:
2052 /* Assume the worst for front-end tree codes. */
2053 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2054 break;
2055 return x;
2057 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2060 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2061 Zero means allow extended lvalues. */
2063 int pedantic_lvalues;
2065 /* When pedantic, return an expr equal to X but certainly not valid as a
2066 pedantic lvalue. Otherwise, return X. */
2068 static tree
2069 pedantic_non_lvalue (tree x)
2071 if (pedantic_lvalues)
2072 return non_lvalue (x);
2073 else
2074 return x;
2077 /* Given a tree comparison code, return the code that is the logical inverse
2078 of the given code. It is not safe to do this for floating-point
2079 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2080 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2082 static enum tree_code
2083 invert_tree_comparison (enum tree_code code, bool honor_nans)
2085 if (honor_nans && flag_trapping_math)
2086 return ERROR_MARK;
2088 switch (code)
2090 case EQ_EXPR:
2091 return NE_EXPR;
2092 case NE_EXPR:
2093 return EQ_EXPR;
2094 case GT_EXPR:
2095 return honor_nans ? UNLE_EXPR : LE_EXPR;
2096 case GE_EXPR:
2097 return honor_nans ? UNLT_EXPR : LT_EXPR;
2098 case LT_EXPR:
2099 return honor_nans ? UNGE_EXPR : GE_EXPR;
2100 case LE_EXPR:
2101 return honor_nans ? UNGT_EXPR : GT_EXPR;
2102 case LTGT_EXPR:
2103 return UNEQ_EXPR;
2104 case UNEQ_EXPR:
2105 return LTGT_EXPR;
2106 case UNGT_EXPR:
2107 return LE_EXPR;
2108 case UNGE_EXPR:
2109 return LT_EXPR;
2110 case UNLT_EXPR:
2111 return GE_EXPR;
2112 case UNLE_EXPR:
2113 return GT_EXPR;
2114 case ORDERED_EXPR:
2115 return UNORDERED_EXPR;
2116 case UNORDERED_EXPR:
2117 return ORDERED_EXPR;
2118 default:
2119 gcc_unreachable ();
2123 /* Similar, but return the comparison that results if the operands are
2124 swapped. This is safe for floating-point. */
2126 enum tree_code
2127 swap_tree_comparison (enum tree_code code)
2129 switch (code)
2131 case EQ_EXPR:
2132 case NE_EXPR:
2133 return code;
2134 case GT_EXPR:
2135 return LT_EXPR;
2136 case GE_EXPR:
2137 return LE_EXPR;
2138 case LT_EXPR:
2139 return GT_EXPR;
2140 case LE_EXPR:
2141 return GE_EXPR;
2142 default:
2143 gcc_unreachable ();
2148 /* Convert a comparison tree code from an enum tree_code representation
2149 into a compcode bit-based encoding. This function is the inverse of
2150 compcode_to_comparison. */
2152 static enum comparison_code
2153 comparison_to_compcode (enum tree_code code)
2155 switch (code)
2157 case LT_EXPR:
2158 return COMPCODE_LT;
2159 case EQ_EXPR:
2160 return COMPCODE_EQ;
2161 case LE_EXPR:
2162 return COMPCODE_LE;
2163 case GT_EXPR:
2164 return COMPCODE_GT;
2165 case NE_EXPR:
2166 return COMPCODE_NE;
2167 case GE_EXPR:
2168 return COMPCODE_GE;
2169 case ORDERED_EXPR:
2170 return COMPCODE_ORD;
2171 case UNORDERED_EXPR:
2172 return COMPCODE_UNORD;
2173 case UNLT_EXPR:
2174 return COMPCODE_UNLT;
2175 case UNEQ_EXPR:
2176 return COMPCODE_UNEQ;
2177 case UNLE_EXPR:
2178 return COMPCODE_UNLE;
2179 case UNGT_EXPR:
2180 return COMPCODE_UNGT;
2181 case LTGT_EXPR:
2182 return COMPCODE_LTGT;
2183 case UNGE_EXPR:
2184 return COMPCODE_UNGE;
2185 default:
2186 gcc_unreachable ();
2190 /* Convert a compcode bit-based encoding of a comparison operator back
2191 to GCC's enum tree_code representation. This function is the
2192 inverse of comparison_to_compcode. */
2194 static enum tree_code
2195 compcode_to_comparison (enum comparison_code code)
2197 switch (code)
2199 case COMPCODE_LT:
2200 return LT_EXPR;
2201 case COMPCODE_EQ:
2202 return EQ_EXPR;
2203 case COMPCODE_LE:
2204 return LE_EXPR;
2205 case COMPCODE_GT:
2206 return GT_EXPR;
2207 case COMPCODE_NE:
2208 return NE_EXPR;
2209 case COMPCODE_GE:
2210 return GE_EXPR;
2211 case COMPCODE_ORD:
2212 return ORDERED_EXPR;
2213 case COMPCODE_UNORD:
2214 return UNORDERED_EXPR;
2215 case COMPCODE_UNLT:
2216 return UNLT_EXPR;
2217 case COMPCODE_UNEQ:
2218 return UNEQ_EXPR;
2219 case COMPCODE_UNLE:
2220 return UNLE_EXPR;
2221 case COMPCODE_UNGT:
2222 return UNGT_EXPR;
2223 case COMPCODE_LTGT:
2224 return LTGT_EXPR;
2225 case COMPCODE_UNGE:
2226 return UNGE_EXPR;
2227 default:
2228 gcc_unreachable ();
2232 /* Return a tree for the comparison which is the combination of
2233 doing the AND or OR (depending on CODE) of the two operations LCODE
2234 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2235 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2236 if this makes the transformation invalid. */
2238 tree
2239 combine_comparisons (enum tree_code code, enum tree_code lcode,
2240 enum tree_code rcode, tree truth_type,
2241 tree ll_arg, tree lr_arg)
2243 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2244 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2245 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2246 enum comparison_code compcode;
2248 switch (code)
2250 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2251 compcode = lcompcode & rcompcode;
2252 break;
2254 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2255 compcode = lcompcode | rcompcode;
2256 break;
2258 default:
2259 return NULL_TREE;
2262 if (!honor_nans)
2264 /* Eliminate unordered comparisons, as well as LTGT and ORD
2265 which are not used unless the mode has NaNs. */
2266 compcode &= ~COMPCODE_UNORD;
2267 if (compcode == COMPCODE_LTGT)
2268 compcode = COMPCODE_NE;
2269 else if (compcode == COMPCODE_ORD)
2270 compcode = COMPCODE_TRUE;
2272 else if (flag_trapping_math)
2274 /* Check that the original operation and the optimized ones will trap
2275 under the same condition. */
2276 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2277 && (lcompcode != COMPCODE_EQ)
2278 && (lcompcode != COMPCODE_ORD);
2279 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2280 && (rcompcode != COMPCODE_EQ)
2281 && (rcompcode != COMPCODE_ORD);
2282 bool trap = (compcode & COMPCODE_UNORD) == 0
2283 && (compcode != COMPCODE_EQ)
2284 && (compcode != COMPCODE_ORD);
2286 /* In a short-circuited boolean expression the LHS might be
2287 such that the RHS, if evaluated, will never trap. For
2288 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2289 if neither x nor y is NaN. (This is a mixed blessing: for
2290 example, the expression above will never trap, hence
2291 optimizing it to x < y would be invalid). */
2292 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2293 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2294 rtrap = false;
2296 /* If the comparison was short-circuited, and only the RHS
2297 trapped, we may now generate a spurious trap. */
2298 if (rtrap && !ltrap
2299 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2300 return NULL_TREE;
2302 /* If we changed the conditions that cause a trap, we lose. */
2303 if ((ltrap || rtrap) != trap)
2304 return NULL_TREE;
2307 if (compcode == COMPCODE_TRUE)
2308 return constant_boolean_node (true, truth_type);
2309 else if (compcode == COMPCODE_FALSE)
2310 return constant_boolean_node (false, truth_type);
2311 else
2312 return fold_build2 (compcode_to_comparison (compcode),
2313 truth_type, ll_arg, lr_arg);
2316 /* Return nonzero if CODE is a tree code that represents a truth value. */
2318 static int
2319 truth_value_p (enum tree_code code)
2321 return (TREE_CODE_CLASS (code) == tcc_comparison
2322 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2323 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2324 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2327 /* Return nonzero if two operands (typically of the same tree node)
2328 are necessarily equal. If either argument has side-effects this
2329 function returns zero. FLAGS modifies behavior as follows:
2331 If OEP_ONLY_CONST is set, only return nonzero for constants.
2332 This function tests whether the operands are indistinguishable;
2333 it does not test whether they are equal using C's == operation.
2334 The distinction is important for IEEE floating point, because
2335 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2336 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2338 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2339 even though it may hold multiple values during a function.
2340 This is because a GCC tree node guarantees that nothing else is
2341 executed between the evaluation of its "operands" (which may often
2342 be evaluated in arbitrary order). Hence if the operands themselves
2343 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2344 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2345 unset means assuming isochronic (or instantaneous) tree equivalence.
2346 Unless comparing arbitrary expression trees, such as from different
2347 statements, this flag can usually be left unset.
2349 If OEP_PURE_SAME is set, then pure functions with identical arguments
2350 are considered the same. It is used when the caller has other ways
2351 to ensure that global memory is unchanged in between. */
2354 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2356 /* If either is ERROR_MARK, they aren't equal. */
2357 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2358 return 0;
2360 /* If both types don't have the same signedness, then we can't consider
2361 them equal. We must check this before the STRIP_NOPS calls
2362 because they may change the signedness of the arguments. */
2363 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2364 return 0;
2366 STRIP_NOPS (arg0);
2367 STRIP_NOPS (arg1);
2369 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2370 /* This is needed for conversions and for COMPONENT_REF.
2371 Might as well play it safe and always test this. */
2372 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2373 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2374 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2375 return 0;
2377 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2378 We don't care about side effects in that case because the SAVE_EXPR
2379 takes care of that for us. In all other cases, two expressions are
2380 equal if they have no side effects. If we have two identical
2381 expressions with side effects that should be treated the same due
2382 to the only side effects being identical SAVE_EXPR's, that will
2383 be detected in the recursive calls below. */
2384 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2385 && (TREE_CODE (arg0) == SAVE_EXPR
2386 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2387 return 1;
2389 /* Next handle constant cases, those for which we can return 1 even
2390 if ONLY_CONST is set. */
2391 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2392 switch (TREE_CODE (arg0))
2394 case INTEGER_CST:
2395 return (! TREE_CONSTANT_OVERFLOW (arg0)
2396 && ! TREE_CONSTANT_OVERFLOW (arg1)
2397 && tree_int_cst_equal (arg0, arg1));
2399 case REAL_CST:
2400 return (! TREE_CONSTANT_OVERFLOW (arg0)
2401 && ! TREE_CONSTANT_OVERFLOW (arg1)
2402 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2403 TREE_REAL_CST (arg1)));
2405 case VECTOR_CST:
2407 tree v1, v2;
2409 if (TREE_CONSTANT_OVERFLOW (arg0)
2410 || TREE_CONSTANT_OVERFLOW (arg1))
2411 return 0;
2413 v1 = TREE_VECTOR_CST_ELTS (arg0);
2414 v2 = TREE_VECTOR_CST_ELTS (arg1);
2415 while (v1 && v2)
2417 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2418 flags))
2419 return 0;
2420 v1 = TREE_CHAIN (v1);
2421 v2 = TREE_CHAIN (v2);
2424 return 1;
2427 case COMPLEX_CST:
2428 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2429 flags)
2430 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2431 flags));
2433 case STRING_CST:
2434 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2435 && ! memcmp (TREE_STRING_POINTER (arg0),
2436 TREE_STRING_POINTER (arg1),
2437 TREE_STRING_LENGTH (arg0)));
2439 case ADDR_EXPR:
2440 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2442 default:
2443 break;
2446 if (flags & OEP_ONLY_CONST)
2447 return 0;
2449 /* Define macros to test an operand from arg0 and arg1 for equality and a
2450 variant that allows null and views null as being different from any
2451 non-null value. In the latter case, if either is null, the both
2452 must be; otherwise, do the normal comparison. */
2453 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2454 TREE_OPERAND (arg1, N), flags)
2456 #define OP_SAME_WITH_NULL(N) \
2457 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2458 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2460 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2462 case tcc_unary:
2463 /* Two conversions are equal only if signedness and modes match. */
2464 switch (TREE_CODE (arg0))
2466 case NOP_EXPR:
2467 case CONVERT_EXPR:
2468 case FIX_CEIL_EXPR:
2469 case FIX_TRUNC_EXPR:
2470 case FIX_FLOOR_EXPR:
2471 case FIX_ROUND_EXPR:
2472 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2473 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2474 return 0;
2475 break;
2476 default:
2477 break;
2480 return OP_SAME (0);
2483 case tcc_comparison:
2484 case tcc_binary:
2485 if (OP_SAME (0) && OP_SAME (1))
2486 return 1;
2488 /* For commutative ops, allow the other order. */
2489 return (commutative_tree_code (TREE_CODE (arg0))
2490 && operand_equal_p (TREE_OPERAND (arg0, 0),
2491 TREE_OPERAND (arg1, 1), flags)
2492 && operand_equal_p (TREE_OPERAND (arg0, 1),
2493 TREE_OPERAND (arg1, 0), flags));
2495 case tcc_reference:
2496 /* If either of the pointer (or reference) expressions we are
2497 dereferencing contain a side effect, these cannot be equal. */
2498 if (TREE_SIDE_EFFECTS (arg0)
2499 || TREE_SIDE_EFFECTS (arg1))
2500 return 0;
2502 switch (TREE_CODE (arg0))
2504 case INDIRECT_REF:
2505 case ALIGN_INDIRECT_REF:
2506 case MISALIGNED_INDIRECT_REF:
2507 case REALPART_EXPR:
2508 case IMAGPART_EXPR:
2509 return OP_SAME (0);
2511 case ARRAY_REF:
2512 case ARRAY_RANGE_REF:
2513 /* Operands 2 and 3 may be null. */
2514 return (OP_SAME (0)
2515 && OP_SAME (1)
2516 && OP_SAME_WITH_NULL (2)
2517 && OP_SAME_WITH_NULL (3));
2519 case COMPONENT_REF:
2520 /* Handle operand 2 the same as for ARRAY_REF. */
2521 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2523 case BIT_FIELD_REF:
2524 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2526 default:
2527 return 0;
2530 case tcc_expression:
2531 switch (TREE_CODE (arg0))
2533 case ADDR_EXPR:
2534 case TRUTH_NOT_EXPR:
2535 return OP_SAME (0);
2537 case TRUTH_ANDIF_EXPR:
2538 case TRUTH_ORIF_EXPR:
2539 return OP_SAME (0) && OP_SAME (1);
2541 case TRUTH_AND_EXPR:
2542 case TRUTH_OR_EXPR:
2543 case TRUTH_XOR_EXPR:
2544 if (OP_SAME (0) && OP_SAME (1))
2545 return 1;
2547 /* Otherwise take into account this is a commutative operation. */
2548 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2549 TREE_OPERAND (arg1, 1), flags)
2550 && operand_equal_p (TREE_OPERAND (arg0, 1),
2551 TREE_OPERAND (arg1, 0), flags));
2553 case CALL_EXPR:
2554 /* If the CALL_EXPRs call different functions, then they
2555 clearly can not be equal. */
2556 if (!OP_SAME (0))
2557 return 0;
2560 unsigned int cef = call_expr_flags (arg0);
2561 if (flags & OEP_PURE_SAME)
2562 cef &= ECF_CONST | ECF_PURE;
2563 else
2564 cef &= ECF_CONST;
2565 if (!cef)
2566 return 0;
2569 /* Now see if all the arguments are the same. operand_equal_p
2570 does not handle TREE_LIST, so we walk the operands here
2571 feeding them to operand_equal_p. */
2572 arg0 = TREE_OPERAND (arg0, 1);
2573 arg1 = TREE_OPERAND (arg1, 1);
2574 while (arg0 && arg1)
2576 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2577 flags))
2578 return 0;
2580 arg0 = TREE_CHAIN (arg0);
2581 arg1 = TREE_CHAIN (arg1);
2584 /* If we get here and both argument lists are exhausted
2585 then the CALL_EXPRs are equal. */
2586 return ! (arg0 || arg1);
2588 default:
2589 return 0;
2592 case tcc_declaration:
2593 /* Consider __builtin_sqrt equal to sqrt. */
2594 return (TREE_CODE (arg0) == FUNCTION_DECL
2595 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2596 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2597 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2599 default:
2600 return 0;
2603 #undef OP_SAME
2604 #undef OP_SAME_WITH_NULL
2607 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2608 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2610 When in doubt, return 0. */
2612 static int
2613 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2615 int unsignedp1, unsignedpo;
2616 tree primarg0, primarg1, primother;
2617 unsigned int correct_width;
2619 if (operand_equal_p (arg0, arg1, 0))
2620 return 1;
2622 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2623 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2624 return 0;
2626 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2627 and see if the inner values are the same. This removes any
2628 signedness comparison, which doesn't matter here. */
2629 primarg0 = arg0, primarg1 = arg1;
2630 STRIP_NOPS (primarg0);
2631 STRIP_NOPS (primarg1);
2632 if (operand_equal_p (primarg0, primarg1, 0))
2633 return 1;
2635 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2636 actual comparison operand, ARG0.
2638 First throw away any conversions to wider types
2639 already present in the operands. */
2641 primarg1 = get_narrower (arg1, &unsignedp1);
2642 primother = get_narrower (other, &unsignedpo);
2644 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2645 if (unsignedp1 == unsignedpo
2646 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2647 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2649 tree type = TREE_TYPE (arg0);
2651 /* Make sure shorter operand is extended the right way
2652 to match the longer operand. */
2653 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2654 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2656 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2657 return 1;
2660 return 0;
2663 /* See if ARG is an expression that is either a comparison or is performing
2664 arithmetic on comparisons. The comparisons must only be comparing
2665 two different values, which will be stored in *CVAL1 and *CVAL2; if
2666 they are nonzero it means that some operands have already been found.
2667 No variables may be used anywhere else in the expression except in the
2668 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2669 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2671 If this is true, return 1. Otherwise, return zero. */
2673 static int
2674 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2676 enum tree_code code = TREE_CODE (arg);
2677 enum tree_code_class class = TREE_CODE_CLASS (code);
2679 /* We can handle some of the tcc_expression cases here. */
2680 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2681 class = tcc_unary;
2682 else if (class == tcc_expression
2683 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2684 || code == COMPOUND_EXPR))
2685 class = tcc_binary;
2687 else if (class == tcc_expression && code == SAVE_EXPR
2688 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2690 /* If we've already found a CVAL1 or CVAL2, this expression is
2691 two complex to handle. */
2692 if (*cval1 || *cval2)
2693 return 0;
2695 class = tcc_unary;
2696 *save_p = 1;
2699 switch (class)
2701 case tcc_unary:
2702 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2704 case tcc_binary:
2705 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2706 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2707 cval1, cval2, save_p));
2709 case tcc_constant:
2710 return 1;
2712 case tcc_expression:
2713 if (code == COND_EXPR)
2714 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2715 cval1, cval2, save_p)
2716 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2717 cval1, cval2, save_p)
2718 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2719 cval1, cval2, save_p));
2720 return 0;
2722 case tcc_comparison:
2723 /* First see if we can handle the first operand, then the second. For
2724 the second operand, we know *CVAL1 can't be zero. It must be that
2725 one side of the comparison is each of the values; test for the
2726 case where this isn't true by failing if the two operands
2727 are the same. */
2729 if (operand_equal_p (TREE_OPERAND (arg, 0),
2730 TREE_OPERAND (arg, 1), 0))
2731 return 0;
2733 if (*cval1 == 0)
2734 *cval1 = TREE_OPERAND (arg, 0);
2735 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2737 else if (*cval2 == 0)
2738 *cval2 = TREE_OPERAND (arg, 0);
2739 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2741 else
2742 return 0;
2744 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2746 else if (*cval2 == 0)
2747 *cval2 = TREE_OPERAND (arg, 1);
2748 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2750 else
2751 return 0;
2753 return 1;
2755 default:
2756 return 0;
2760 /* ARG is a tree that is known to contain just arithmetic operations and
2761 comparisons. Evaluate the operations in the tree substituting NEW0 for
2762 any occurrence of OLD0 as an operand of a comparison and likewise for
2763 NEW1 and OLD1. */
2765 static tree
2766 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2768 tree type = TREE_TYPE (arg);
2769 enum tree_code code = TREE_CODE (arg);
2770 enum tree_code_class class = TREE_CODE_CLASS (code);
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2774 class = tcc_unary;
2775 else if (class == tcc_expression
2776 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2777 class = tcc_binary;
2779 switch (class)
2781 case tcc_unary:
2782 return fold_build1 (code, type,
2783 eval_subst (TREE_OPERAND (arg, 0),
2784 old0, new0, old1, new1));
2786 case tcc_binary:
2787 return fold_build2 (code, type,
2788 eval_subst (TREE_OPERAND (arg, 0),
2789 old0, new0, old1, new1),
2790 eval_subst (TREE_OPERAND (arg, 1),
2791 old0, new0, old1, new1));
2793 case tcc_expression:
2794 switch (code)
2796 case SAVE_EXPR:
2797 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2799 case COMPOUND_EXPR:
2800 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2802 case COND_EXPR:
2803 return fold_build3 (code, type,
2804 eval_subst (TREE_OPERAND (arg, 0),
2805 old0, new0, old1, new1),
2806 eval_subst (TREE_OPERAND (arg, 1),
2807 old0, new0, old1, new1),
2808 eval_subst (TREE_OPERAND (arg, 2),
2809 old0, new0, old1, new1));
2810 default:
2811 break;
2813 /* Fall through - ??? */
2815 case tcc_comparison:
2817 tree arg0 = TREE_OPERAND (arg, 0);
2818 tree arg1 = TREE_OPERAND (arg, 1);
2820 /* We need to check both for exact equality and tree equality. The
2821 former will be true if the operand has a side-effect. In that
2822 case, we know the operand occurred exactly once. */
2824 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2825 arg0 = new0;
2826 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2827 arg0 = new1;
2829 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2830 arg1 = new0;
2831 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2832 arg1 = new1;
2834 return fold_build2 (code, type, arg0, arg1);
2837 default:
2838 return arg;
2842 /* Return a tree for the case when the result of an expression is RESULT
2843 converted to TYPE and OMITTED was previously an operand of the expression
2844 but is now not needed (e.g., we folded OMITTED * 0).
2846 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2847 the conversion of RESULT to TYPE. */
2849 tree
2850 omit_one_operand (tree type, tree result, tree omitted)
2852 tree t = fold_convert (type, result);
2854 if (TREE_SIDE_EFFECTS (omitted))
2855 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2857 return non_lvalue (t);
2860 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2862 static tree
2863 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2865 tree t = fold_convert (type, result);
2867 if (TREE_SIDE_EFFECTS (omitted))
2868 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2870 return pedantic_non_lvalue (t);
2873 /* Return a tree for the case when the result of an expression is RESULT
2874 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2875 of the expression but are now not needed.
2877 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2878 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2879 evaluated before OMITTED2. Otherwise, if neither has side effects,
2880 just do the conversion of RESULT to TYPE. */
2882 tree
2883 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2885 tree t = fold_convert (type, result);
2887 if (TREE_SIDE_EFFECTS (omitted2))
2888 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2889 if (TREE_SIDE_EFFECTS (omitted1))
2890 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2892 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2896 /* Return a simplified tree node for the truth-negation of ARG. This
2897 never alters ARG itself. We assume that ARG is an operation that
2898 returns a truth value (0 or 1).
2900 FIXME: one would think we would fold the result, but it causes
2901 problems with the dominator optimizer. */
2902 tree
2903 invert_truthvalue (tree arg)
2905 tree type = TREE_TYPE (arg);
2906 enum tree_code code = TREE_CODE (arg);
2908 if (code == ERROR_MARK)
2909 return arg;
2911 /* If this is a comparison, we can simply invert it, except for
2912 floating-point non-equality comparisons, in which case we just
2913 enclose a TRUTH_NOT_EXPR around what we have. */
2915 if (TREE_CODE_CLASS (code) == tcc_comparison)
2917 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2918 if (FLOAT_TYPE_P (op_type)
2919 && flag_trapping_math
2920 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2921 && code != NE_EXPR && code != EQ_EXPR)
2922 return build1 (TRUTH_NOT_EXPR, type, arg);
2923 else
2925 code = invert_tree_comparison (code,
2926 HONOR_NANS (TYPE_MODE (op_type)));
2927 if (code == ERROR_MARK)
2928 return build1 (TRUTH_NOT_EXPR, type, arg);
2929 else
2930 return build2 (code, type,
2931 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2935 switch (code)
2937 case INTEGER_CST:
2938 return constant_boolean_node (integer_zerop (arg), type);
2940 case TRUTH_AND_EXPR:
2941 return build2 (TRUTH_OR_EXPR, type,
2942 invert_truthvalue (TREE_OPERAND (arg, 0)),
2943 invert_truthvalue (TREE_OPERAND (arg, 1)));
2945 case TRUTH_OR_EXPR:
2946 return build2 (TRUTH_AND_EXPR, type,
2947 invert_truthvalue (TREE_OPERAND (arg, 0)),
2948 invert_truthvalue (TREE_OPERAND (arg, 1)));
2950 case TRUTH_XOR_EXPR:
2951 /* Here we can invert either operand. We invert the first operand
2952 unless the second operand is a TRUTH_NOT_EXPR in which case our
2953 result is the XOR of the first operand with the inside of the
2954 negation of the second operand. */
2956 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2957 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2958 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2959 else
2960 return build2 (TRUTH_XOR_EXPR, type,
2961 invert_truthvalue (TREE_OPERAND (arg, 0)),
2962 TREE_OPERAND (arg, 1));
2964 case TRUTH_ANDIF_EXPR:
2965 return build2 (TRUTH_ORIF_EXPR, type,
2966 invert_truthvalue (TREE_OPERAND (arg, 0)),
2967 invert_truthvalue (TREE_OPERAND (arg, 1)));
2969 case TRUTH_ORIF_EXPR:
2970 return build2 (TRUTH_ANDIF_EXPR, type,
2971 invert_truthvalue (TREE_OPERAND (arg, 0)),
2972 invert_truthvalue (TREE_OPERAND (arg, 1)));
2974 case TRUTH_NOT_EXPR:
2975 return TREE_OPERAND (arg, 0);
2977 case COND_EXPR:
2978 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2979 invert_truthvalue (TREE_OPERAND (arg, 1)),
2980 invert_truthvalue (TREE_OPERAND (arg, 2)));
2982 case COMPOUND_EXPR:
2983 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2984 invert_truthvalue (TREE_OPERAND (arg, 1)));
2986 case NON_LVALUE_EXPR:
2987 return invert_truthvalue (TREE_OPERAND (arg, 0));
2989 case NOP_EXPR:
2990 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2991 break;
2993 case CONVERT_EXPR:
2994 case FLOAT_EXPR:
2995 return build1 (TREE_CODE (arg), type,
2996 invert_truthvalue (TREE_OPERAND (arg, 0)));
2998 case BIT_AND_EXPR:
2999 if (!integer_onep (TREE_OPERAND (arg, 1)))
3000 break;
3001 return build2 (EQ_EXPR, type, arg,
3002 fold_convert (type, integer_zero_node));
3004 case SAVE_EXPR:
3005 return build1 (TRUTH_NOT_EXPR, type, arg);
3007 case CLEANUP_POINT_EXPR:
3008 return build1 (CLEANUP_POINT_EXPR, type,
3009 invert_truthvalue (TREE_OPERAND (arg, 0)));
3011 default:
3012 break;
3014 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3015 return build1 (TRUTH_NOT_EXPR, type, arg);
3018 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3019 operands are another bit-wise operation with a common input. If so,
3020 distribute the bit operations to save an operation and possibly two if
3021 constants are involved. For example, convert
3022 (A | B) & (A | C) into A | (B & C)
3023 Further simplification will occur if B and C are constants.
3025 If this optimization cannot be done, 0 will be returned. */
3027 static tree
3028 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3030 tree common;
3031 tree left, right;
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 || TREE_CODE (arg0) == code
3035 || (TREE_CODE (arg0) != BIT_AND_EXPR
3036 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3037 return 0;
3039 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3041 common = TREE_OPERAND (arg0, 0);
3042 left = TREE_OPERAND (arg0, 1);
3043 right = TREE_OPERAND (arg1, 1);
3045 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3047 common = TREE_OPERAND (arg0, 0);
3048 left = TREE_OPERAND (arg0, 1);
3049 right = TREE_OPERAND (arg1, 0);
3051 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3053 common = TREE_OPERAND (arg0, 1);
3054 left = TREE_OPERAND (arg0, 0);
3055 right = TREE_OPERAND (arg1, 1);
3057 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3059 common = TREE_OPERAND (arg0, 1);
3060 left = TREE_OPERAND (arg0, 0);
3061 right = TREE_OPERAND (arg1, 0);
3063 else
3064 return 0;
3066 return fold_build2 (TREE_CODE (arg0), type, common,
3067 fold_build2 (code, type, left, right));
3070 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3071 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3073 static tree
3074 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3075 int unsignedp)
3077 tree result;
3079 if (bitpos == 0)
3081 tree size = TYPE_SIZE (TREE_TYPE (inner));
3082 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3083 || POINTER_TYPE_P (TREE_TYPE (inner)))
3084 && host_integerp (size, 0)
3085 && tree_low_cst (size, 0) == bitsize)
3086 return fold_convert (type, inner);
3089 result = build3 (BIT_FIELD_REF, type, inner,
3090 size_int (bitsize), bitsize_int (bitpos));
3092 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3094 return result;
3097 /* Optimize a bit-field compare.
3099 There are two cases: First is a compare against a constant and the
3100 second is a comparison of two items where the fields are at the same
3101 bit position relative to the start of a chunk (byte, halfword, word)
3102 large enough to contain it. In these cases we can avoid the shift
3103 implicit in bitfield extractions.
3105 For constants, we emit a compare of the shifted constant with the
3106 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3107 compared. For two fields at the same position, we do the ANDs with the
3108 similar mask and compare the result of the ANDs.
3110 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3111 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3112 are the left and right operands of the comparison, respectively.
3114 If the optimization described above can be done, we return the resulting
3115 tree. Otherwise we return zero. */
3117 static tree
3118 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3119 tree lhs, tree rhs)
3121 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3122 tree type = TREE_TYPE (lhs);
3123 tree signed_type, unsigned_type;
3124 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3125 enum machine_mode lmode, rmode, nmode;
3126 int lunsignedp, runsignedp;
3127 int lvolatilep = 0, rvolatilep = 0;
3128 tree linner, rinner = NULL_TREE;
3129 tree mask;
3130 tree offset;
3132 /* Get all the information about the extractions being done. If the bit size
3133 if the same as the size of the underlying object, we aren't doing an
3134 extraction at all and so can do nothing. We also don't want to
3135 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3136 then will no longer be able to replace it. */
3137 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3138 &lunsignedp, &lvolatilep, false);
3139 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3140 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3141 return 0;
3143 if (!const_p)
3145 /* If this is not a constant, we can only do something if bit positions,
3146 sizes, and signedness are the same. */
3147 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3148 &runsignedp, &rvolatilep, false);
3150 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3151 || lunsignedp != runsignedp || offset != 0
3152 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3153 return 0;
3156 /* See if we can find a mode to refer to this field. We should be able to,
3157 but fail if we can't. */
3158 nmode = get_best_mode (lbitsize, lbitpos,
3159 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3160 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3161 TYPE_ALIGN (TREE_TYPE (rinner))),
3162 word_mode, lvolatilep || rvolatilep);
3163 if (nmode == VOIDmode)
3164 return 0;
3166 /* Set signed and unsigned types of the precision of this mode for the
3167 shifts below. */
3168 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3169 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3171 /* Compute the bit position and size for the new reference and our offset
3172 within it. If the new reference is the same size as the original, we
3173 won't optimize anything, so return zero. */
3174 nbitsize = GET_MODE_BITSIZE (nmode);
3175 nbitpos = lbitpos & ~ (nbitsize - 1);
3176 lbitpos -= nbitpos;
3177 if (nbitsize == lbitsize)
3178 return 0;
3180 if (BYTES_BIG_ENDIAN)
3181 lbitpos = nbitsize - lbitsize - lbitpos;
3183 /* Make the mask to be used against the extracted field. */
3184 mask = build_int_cst (unsigned_type, -1);
3185 mask = force_fit_type (mask, 0, false, false);
3186 mask = fold_convert (unsigned_type, mask);
3187 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3188 mask = const_binop (RSHIFT_EXPR, mask,
3189 size_int (nbitsize - lbitsize - lbitpos), 0);
3191 if (! const_p)
3192 /* If not comparing with constant, just rework the comparison
3193 and return. */
3194 return build2 (code, compare_type,
3195 build2 (BIT_AND_EXPR, unsigned_type,
3196 make_bit_field_ref (linner, unsigned_type,
3197 nbitsize, nbitpos, 1),
3198 mask),
3199 build2 (BIT_AND_EXPR, unsigned_type,
3200 make_bit_field_ref (rinner, unsigned_type,
3201 nbitsize, nbitpos, 1),
3202 mask));
3204 /* Otherwise, we are handling the constant case. See if the constant is too
3205 big for the field. Warn and return a tree of for 0 (false) if so. We do
3206 this not only for its own sake, but to avoid having to test for this
3207 error case below. If we didn't, we might generate wrong code.
3209 For unsigned fields, the constant shifted right by the field length should
3210 be all zero. For signed fields, the high-order bits should agree with
3211 the sign bit. */
3213 if (lunsignedp)
3215 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3216 fold_convert (unsigned_type, rhs),
3217 size_int (lbitsize), 0)))
3219 warning ("comparison is always %d due to width of bit-field",
3220 code == NE_EXPR);
3221 return constant_boolean_node (code == NE_EXPR, compare_type);
3224 else
3226 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3227 size_int (lbitsize - 1), 0);
3228 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3230 warning ("comparison is always %d due to width of bit-field",
3231 code == NE_EXPR);
3232 return constant_boolean_node (code == NE_EXPR, compare_type);
3236 /* Single-bit compares should always be against zero. */
3237 if (lbitsize == 1 && ! integer_zerop (rhs))
3239 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3240 rhs = fold_convert (type, integer_zero_node);
3243 /* Make a new bitfield reference, shift the constant over the
3244 appropriate number of bits and mask it with the computed mask
3245 (in case this was a signed field). If we changed it, make a new one. */
3246 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3247 if (lvolatilep)
3249 TREE_SIDE_EFFECTS (lhs) = 1;
3250 TREE_THIS_VOLATILE (lhs) = 1;
3253 rhs = fold (const_binop (BIT_AND_EXPR,
3254 const_binop (LSHIFT_EXPR,
3255 fold_convert (unsigned_type, rhs),
3256 size_int (lbitpos), 0),
3257 mask, 0));
3259 return build2 (code, compare_type,
3260 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3261 rhs);
3264 /* Subroutine for fold_truthop: decode a field reference.
3266 If EXP is a comparison reference, we return the innermost reference.
3268 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3269 set to the starting bit number.
3271 If the innermost field can be completely contained in a mode-sized
3272 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3274 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3275 otherwise it is not changed.
3277 *PUNSIGNEDP is set to the signedness of the field.
3279 *PMASK is set to the mask used. This is either contained in a
3280 BIT_AND_EXPR or derived from the width of the field.
3282 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3284 Return 0 if this is not a component reference or is one that we can't
3285 do anything with. */
3287 static tree
3288 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3289 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3290 int *punsignedp, int *pvolatilep,
3291 tree *pmask, tree *pand_mask)
3293 tree outer_type = 0;
3294 tree and_mask = 0;
3295 tree mask, inner, offset;
3296 tree unsigned_type;
3297 unsigned int precision;
3299 /* All the optimizations using this function assume integer fields.
3300 There are problems with FP fields since the type_for_size call
3301 below can fail for, e.g., XFmode. */
3302 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3303 return 0;
3305 /* We are interested in the bare arrangement of bits, so strip everything
3306 that doesn't affect the machine mode. However, record the type of the
3307 outermost expression if it may matter below. */
3308 if (TREE_CODE (exp) == NOP_EXPR
3309 || TREE_CODE (exp) == CONVERT_EXPR
3310 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3311 outer_type = TREE_TYPE (exp);
3312 STRIP_NOPS (exp);
3314 if (TREE_CODE (exp) == BIT_AND_EXPR)
3316 and_mask = TREE_OPERAND (exp, 1);
3317 exp = TREE_OPERAND (exp, 0);
3318 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3319 if (TREE_CODE (and_mask) != INTEGER_CST)
3320 return 0;
3323 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3324 punsignedp, pvolatilep, false);
3325 if ((inner == exp && and_mask == 0)
3326 || *pbitsize < 0 || offset != 0
3327 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3328 return 0;
3330 /* If the number of bits in the reference is the same as the bitsize of
3331 the outer type, then the outer type gives the signedness. Otherwise
3332 (in case of a small bitfield) the signedness is unchanged. */
3333 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3334 *punsignedp = TYPE_UNSIGNED (outer_type);
3336 /* Compute the mask to access the bitfield. */
3337 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3338 precision = TYPE_PRECISION (unsigned_type);
3340 mask = build_int_cst (unsigned_type, -1);
3341 mask = force_fit_type (mask, 0, false, false);
3343 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3344 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3346 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3347 if (and_mask != 0)
3348 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3349 fold_convert (unsigned_type, and_mask), mask);
3351 *pmask = mask;
3352 *pand_mask = and_mask;
3353 return inner;
3356 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3357 bit positions. */
3359 static int
3360 all_ones_mask_p (tree mask, int size)
3362 tree type = TREE_TYPE (mask);
3363 unsigned int precision = TYPE_PRECISION (type);
3364 tree tmask;
3366 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3367 tmask = force_fit_type (tmask, 0, false, false);
3369 return
3370 tree_int_cst_equal (mask,
3371 const_binop (RSHIFT_EXPR,
3372 const_binop (LSHIFT_EXPR, tmask,
3373 size_int (precision - size),
3375 size_int (precision - size), 0));
3378 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3379 represents the sign bit of EXP's type. If EXP represents a sign
3380 or zero extension, also test VAL against the unextended type.
3381 The return value is the (sub)expression whose sign bit is VAL,
3382 or NULL_TREE otherwise. */
3384 static tree
3385 sign_bit_p (tree exp, tree val)
3387 unsigned HOST_WIDE_INT mask_lo, lo;
3388 HOST_WIDE_INT mask_hi, hi;
3389 int width;
3390 tree t;
3392 /* Tree EXP must have an integral type. */
3393 t = TREE_TYPE (exp);
3394 if (! INTEGRAL_TYPE_P (t))
3395 return NULL_TREE;
3397 /* Tree VAL must be an integer constant. */
3398 if (TREE_CODE (val) != INTEGER_CST
3399 || TREE_CONSTANT_OVERFLOW (val))
3400 return NULL_TREE;
3402 width = TYPE_PRECISION (t);
3403 if (width > HOST_BITS_PER_WIDE_INT)
3405 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3406 lo = 0;
3408 mask_hi = ((unsigned HOST_WIDE_INT) -1
3409 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3410 mask_lo = -1;
3412 else
3414 hi = 0;
3415 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3417 mask_hi = 0;
3418 mask_lo = ((unsigned HOST_WIDE_INT) -1
3419 >> (HOST_BITS_PER_WIDE_INT - width));
3422 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3423 treat VAL as if it were unsigned. */
3424 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3425 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3426 return exp;
3428 /* Handle extension from a narrower type. */
3429 if (TREE_CODE (exp) == NOP_EXPR
3430 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3431 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3433 return NULL_TREE;
3436 /* Subroutine for fold_truthop: determine if an operand is simple enough
3437 to be evaluated unconditionally. */
3439 static int
3440 simple_operand_p (tree exp)
3442 /* Strip any conversions that don't change the machine mode. */
3443 STRIP_NOPS (exp);
3445 return (CONSTANT_CLASS_P (exp)
3446 || TREE_CODE (exp) == SSA_NAME
3447 || (DECL_P (exp)
3448 && ! TREE_ADDRESSABLE (exp)
3449 && ! TREE_THIS_VOLATILE (exp)
3450 && ! DECL_NONLOCAL (exp)
3451 /* Don't regard global variables as simple. They may be
3452 allocated in ways unknown to the compiler (shared memory,
3453 #pragma weak, etc). */
3454 && ! TREE_PUBLIC (exp)
3455 && ! DECL_EXTERNAL (exp)
3456 /* Loading a static variable is unduly expensive, but global
3457 registers aren't expensive. */
3458 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3461 /* The following functions are subroutines to fold_range_test and allow it to
3462 try to change a logical combination of comparisons into a range test.
3464 For example, both
3465 X == 2 || X == 3 || X == 4 || X == 5
3467 X >= 2 && X <= 5
3468 are converted to
3469 (unsigned) (X - 2) <= 3
3471 We describe each set of comparisons as being either inside or outside
3472 a range, using a variable named like IN_P, and then describe the
3473 range with a lower and upper bound. If one of the bounds is omitted,
3474 it represents either the highest or lowest value of the type.
3476 In the comments below, we represent a range by two numbers in brackets
3477 preceded by a "+" to designate being inside that range, or a "-" to
3478 designate being outside that range, so the condition can be inverted by
3479 flipping the prefix. An omitted bound is represented by a "-". For
3480 example, "- [-, 10]" means being outside the range starting at the lowest
3481 possible value and ending at 10, in other words, being greater than 10.
3482 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3483 always false.
3485 We set up things so that the missing bounds are handled in a consistent
3486 manner so neither a missing bound nor "true" and "false" need to be
3487 handled using a special case. */
3489 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3490 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3491 and UPPER1_P are nonzero if the respective argument is an upper bound
3492 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3493 must be specified for a comparison. ARG1 will be converted to ARG0's
3494 type if both are specified. */
3496 static tree
3497 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3498 tree arg1, int upper1_p)
3500 tree tem;
3501 int result;
3502 int sgn0, sgn1;
3504 /* If neither arg represents infinity, do the normal operation.
3505 Else, if not a comparison, return infinity. Else handle the special
3506 comparison rules. Note that most of the cases below won't occur, but
3507 are handled for consistency. */
3509 if (arg0 != 0 && arg1 != 0)
3511 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3512 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3513 STRIP_NOPS (tem);
3514 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3517 if (TREE_CODE_CLASS (code) != tcc_comparison)
3518 return 0;
3520 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3521 for neither. In real maths, we cannot assume open ended ranges are
3522 the same. But, this is computer arithmetic, where numbers are finite.
3523 We can therefore make the transformation of any unbounded range with
3524 the value Z, Z being greater than any representable number. This permits
3525 us to treat unbounded ranges as equal. */
3526 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3527 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3528 switch (code)
3530 case EQ_EXPR:
3531 result = sgn0 == sgn1;
3532 break;
3533 case NE_EXPR:
3534 result = sgn0 != sgn1;
3535 break;
3536 case LT_EXPR:
3537 result = sgn0 < sgn1;
3538 break;
3539 case LE_EXPR:
3540 result = sgn0 <= sgn1;
3541 break;
3542 case GT_EXPR:
3543 result = sgn0 > sgn1;
3544 break;
3545 case GE_EXPR:
3546 result = sgn0 >= sgn1;
3547 break;
3548 default:
3549 gcc_unreachable ();
3552 return constant_boolean_node (result, type);
3555 /* Given EXP, a logical expression, set the range it is testing into
3556 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3557 actually being tested. *PLOW and *PHIGH will be made of the same type
3558 as the returned expression. If EXP is not a comparison, we will most
3559 likely not be returning a useful value and range. */
3561 static tree
3562 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3564 enum tree_code code;
3565 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3566 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3567 int in_p, n_in_p;
3568 tree low, high, n_low, n_high;
3570 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3571 and see if we can refine the range. Some of the cases below may not
3572 happen, but it doesn't seem worth worrying about this. We "continue"
3573 the outer loop when we've changed something; otherwise we "break"
3574 the switch, which will "break" the while. */
3576 in_p = 0;
3577 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3579 while (1)
3581 code = TREE_CODE (exp);
3582 exp_type = TREE_TYPE (exp);
3584 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3586 if (TREE_CODE_LENGTH (code) > 0)
3587 arg0 = TREE_OPERAND (exp, 0);
3588 if (TREE_CODE_CLASS (code) == tcc_comparison
3589 || TREE_CODE_CLASS (code) == tcc_unary
3590 || TREE_CODE_CLASS (code) == tcc_binary)
3591 arg0_type = TREE_TYPE (arg0);
3592 if (TREE_CODE_CLASS (code) == tcc_binary
3593 || TREE_CODE_CLASS (code) == tcc_comparison
3594 || (TREE_CODE_CLASS (code) == tcc_expression
3595 && TREE_CODE_LENGTH (code) > 1))
3596 arg1 = TREE_OPERAND (exp, 1);
3599 switch (code)
3601 case TRUTH_NOT_EXPR:
3602 in_p = ! in_p, exp = arg0;
3603 continue;
3605 case EQ_EXPR: case NE_EXPR:
3606 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3607 /* We can only do something if the range is testing for zero
3608 and if the second operand is an integer constant. Note that
3609 saying something is "in" the range we make is done by
3610 complementing IN_P since it will set in the initial case of
3611 being not equal to zero; "out" is leaving it alone. */
3612 if (low == 0 || high == 0
3613 || ! integer_zerop (low) || ! integer_zerop (high)
3614 || TREE_CODE (arg1) != INTEGER_CST)
3615 break;
3617 switch (code)
3619 case NE_EXPR: /* - [c, c] */
3620 low = high = arg1;
3621 break;
3622 case EQ_EXPR: /* + [c, c] */
3623 in_p = ! in_p, low = high = arg1;
3624 break;
3625 case GT_EXPR: /* - [-, c] */
3626 low = 0, high = arg1;
3627 break;
3628 case GE_EXPR: /* + [c, -] */
3629 in_p = ! in_p, low = arg1, high = 0;
3630 break;
3631 case LT_EXPR: /* - [c, -] */
3632 low = arg1, high = 0;
3633 break;
3634 case LE_EXPR: /* + [-, c] */
3635 in_p = ! in_p, low = 0, high = arg1;
3636 break;
3637 default:
3638 gcc_unreachable ();
3641 /* If this is an unsigned comparison, we also know that EXP is
3642 greater than or equal to zero. We base the range tests we make
3643 on that fact, so we record it here so we can parse existing
3644 range tests. We test arg0_type since often the return type
3645 of, e.g. EQ_EXPR, is boolean. */
3646 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3648 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3649 in_p, low, high, 1,
3650 fold_convert (arg0_type, integer_zero_node),
3651 NULL_TREE))
3652 break;
3654 in_p = n_in_p, low = n_low, high = n_high;
3656 /* If the high bound is missing, but we have a nonzero low
3657 bound, reverse the range so it goes from zero to the low bound
3658 minus 1. */
3659 if (high == 0 && low && ! integer_zerop (low))
3661 in_p = ! in_p;
3662 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3663 integer_one_node, 0);
3664 low = fold_convert (arg0_type, integer_zero_node);
3668 exp = arg0;
3669 continue;
3671 case NEGATE_EXPR:
3672 /* (-x) IN [a,b] -> x in [-b, -a] */
3673 n_low = range_binop (MINUS_EXPR, exp_type,
3674 fold_convert (exp_type, integer_zero_node),
3675 0, high, 1);
3676 n_high = range_binop (MINUS_EXPR, exp_type,
3677 fold_convert (exp_type, integer_zero_node),
3678 0, low, 0);
3679 low = n_low, high = n_high;
3680 exp = arg0;
3681 continue;
3683 case BIT_NOT_EXPR:
3684 /* ~ X -> -X - 1 */
3685 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3686 fold_convert (exp_type, integer_one_node));
3687 continue;
3689 case PLUS_EXPR: case MINUS_EXPR:
3690 if (TREE_CODE (arg1) != INTEGER_CST)
3691 break;
3693 /* If EXP is signed, any overflow in the computation is undefined,
3694 so we don't worry about it so long as our computations on
3695 the bounds don't overflow. For unsigned, overflow is defined
3696 and this is exactly the right thing. */
3697 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3698 arg0_type, low, 0, arg1, 0);
3699 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3700 arg0_type, high, 1, arg1, 0);
3701 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3702 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3703 break;
3705 /* Check for an unsigned range which has wrapped around the maximum
3706 value thus making n_high < n_low, and normalize it. */
3707 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3709 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3710 integer_one_node, 0);
3711 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3712 integer_one_node, 0);
3714 /* If the range is of the form +/- [ x+1, x ], we won't
3715 be able to normalize it. But then, it represents the
3716 whole range or the empty set, so make it
3717 +/- [ -, - ]. */
3718 if (tree_int_cst_equal (n_low, low)
3719 && tree_int_cst_equal (n_high, high))
3720 low = high = 0;
3721 else
3722 in_p = ! in_p;
3724 else
3725 low = n_low, high = n_high;
3727 exp = arg0;
3728 continue;
3730 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3731 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3732 break;
3734 if (! INTEGRAL_TYPE_P (arg0_type)
3735 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3736 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3737 break;
3739 n_low = low, n_high = high;
3741 if (n_low != 0)
3742 n_low = fold_convert (arg0_type, n_low);
3744 if (n_high != 0)
3745 n_high = fold_convert (arg0_type, n_high);
3748 /* If we're converting arg0 from an unsigned type, to exp,
3749 a signed type, we will be doing the comparison as unsigned.
3750 The tests above have already verified that LOW and HIGH
3751 are both positive.
3753 So we have to ensure that we will handle large unsigned
3754 values the same way that the current signed bounds treat
3755 negative values. */
3757 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3759 tree high_positive;
3760 tree equiv_type = lang_hooks.types.type_for_mode
3761 (TYPE_MODE (arg0_type), 1);
3763 /* A range without an upper bound is, naturally, unbounded.
3764 Since convert would have cropped a very large value, use
3765 the max value for the destination type. */
3766 high_positive
3767 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3768 : TYPE_MAX_VALUE (arg0_type);
3770 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3771 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3772 fold_convert (arg0_type,
3773 high_positive),
3774 fold_convert (arg0_type,
3775 integer_one_node));
3777 /* If the low bound is specified, "and" the range with the
3778 range for which the original unsigned value will be
3779 positive. */
3780 if (low != 0)
3782 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3783 1, n_low, n_high, 1,
3784 fold_convert (arg0_type,
3785 integer_zero_node),
3786 high_positive))
3787 break;
3789 in_p = (n_in_p == in_p);
3791 else
3793 /* Otherwise, "or" the range with the range of the input
3794 that will be interpreted as negative. */
3795 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3796 0, n_low, n_high, 1,
3797 fold_convert (arg0_type,
3798 integer_zero_node),
3799 high_positive))
3800 break;
3802 in_p = (in_p != n_in_p);
3806 exp = arg0;
3807 low = n_low, high = n_high;
3808 continue;
3810 default:
3811 break;
3814 break;
3817 /* If EXP is a constant, we can evaluate whether this is true or false. */
3818 if (TREE_CODE (exp) == INTEGER_CST)
3820 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3821 exp, 0, low, 0))
3822 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3823 exp, 1, high, 1)));
3824 low = high = 0;
3825 exp = 0;
3828 *pin_p = in_p, *plow = low, *phigh = high;
3829 return exp;
3832 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3833 type, TYPE, return an expression to test if EXP is in (or out of, depending
3834 on IN_P) the range. Return 0 if the test couldn't be created. */
3836 static tree
3837 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3839 tree etype = TREE_TYPE (exp);
3840 tree value;
3842 if (! in_p)
3844 value = build_range_check (type, exp, 1, low, high);
3845 if (value != 0)
3846 return invert_truthvalue (value);
3848 return 0;
3851 if (low == 0 && high == 0)
3852 return fold_convert (type, integer_one_node);
3854 if (low == 0)
3855 return fold_build2 (LE_EXPR, type, exp, high);
3857 if (high == 0)
3858 return fold_build2 (GE_EXPR, type, exp, low);
3860 if (operand_equal_p (low, high, 0))
3861 return fold_build2 (EQ_EXPR, type, exp, low);
3863 if (integer_zerop (low))
3865 if (! TYPE_UNSIGNED (etype))
3867 etype = lang_hooks.types.unsigned_type (etype);
3868 high = fold_convert (etype, high);
3869 exp = fold_convert (etype, exp);
3871 return build_range_check (type, exp, 1, 0, high);
3874 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3875 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3877 unsigned HOST_WIDE_INT lo;
3878 HOST_WIDE_INT hi;
3879 int prec;
3881 prec = TYPE_PRECISION (etype);
3882 if (prec <= HOST_BITS_PER_WIDE_INT)
3884 hi = 0;
3885 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3887 else
3889 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3890 lo = (unsigned HOST_WIDE_INT) -1;
3893 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3895 if (TYPE_UNSIGNED (etype))
3897 etype = lang_hooks.types.signed_type (etype);
3898 exp = fold_convert (etype, exp);
3900 return fold_build2 (GT_EXPR, type, exp,
3901 fold_convert (etype, integer_zero_node));
3905 value = const_binop (MINUS_EXPR, high, low, 0);
3906 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3908 tree utype, minv, maxv;
3910 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3911 for the type in question, as we rely on this here. */
3912 switch (TREE_CODE (etype))
3914 case INTEGER_TYPE:
3915 case ENUMERAL_TYPE:
3916 case CHAR_TYPE:
3917 utype = lang_hooks.types.unsigned_type (etype);
3918 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3919 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3920 integer_one_node, 1);
3921 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3922 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3923 minv, 1, maxv, 1)))
3925 etype = utype;
3926 high = fold_convert (etype, high);
3927 low = fold_convert (etype, low);
3928 exp = fold_convert (etype, exp);
3929 value = const_binop (MINUS_EXPR, high, low, 0);
3931 break;
3932 default:
3933 break;
3937 if (value != 0 && ! TREE_OVERFLOW (value))
3938 return build_range_check (type,
3939 fold_build2 (MINUS_EXPR, etype, exp, low),
3940 1, fold_convert (etype, integer_zero_node),
3941 value);
3943 return 0;
3946 /* Given two ranges, see if we can merge them into one. Return 1 if we
3947 can, 0 if we can't. Set the output range into the specified parameters. */
3949 static int
3950 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3951 tree high0, int in1_p, tree low1, tree high1)
3953 int no_overlap;
3954 int subset;
3955 int temp;
3956 tree tem;
3957 int in_p;
3958 tree low, high;
3959 int lowequal = ((low0 == 0 && low1 == 0)
3960 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3961 low0, 0, low1, 0)));
3962 int highequal = ((high0 == 0 && high1 == 0)
3963 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3964 high0, 1, high1, 1)));
3966 /* Make range 0 be the range that starts first, or ends last if they
3967 start at the same value. Swap them if it isn't. */
3968 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3969 low0, 0, low1, 0))
3970 || (lowequal
3971 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3972 high1, 1, high0, 1))))
3974 temp = in0_p, in0_p = in1_p, in1_p = temp;
3975 tem = low0, low0 = low1, low1 = tem;
3976 tem = high0, high0 = high1, high1 = tem;
3979 /* Now flag two cases, whether the ranges are disjoint or whether the
3980 second range is totally subsumed in the first. Note that the tests
3981 below are simplified by the ones above. */
3982 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3983 high0, 1, low1, 0));
3984 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3985 high1, 1, high0, 1));
3987 /* We now have four cases, depending on whether we are including or
3988 excluding the two ranges. */
3989 if (in0_p && in1_p)
3991 /* If they don't overlap, the result is false. If the second range
3992 is a subset it is the result. Otherwise, the range is from the start
3993 of the second to the end of the first. */
3994 if (no_overlap)
3995 in_p = 0, low = high = 0;
3996 else if (subset)
3997 in_p = 1, low = low1, high = high1;
3998 else
3999 in_p = 1, low = low1, high = high0;
4002 else if (in0_p && ! in1_p)
4004 /* If they don't overlap, the result is the first range. If they are
4005 equal, the result is false. If the second range is a subset of the
4006 first, and the ranges begin at the same place, we go from just after
4007 the end of the first range to the end of the second. If the second
4008 range is not a subset of the first, or if it is a subset and both
4009 ranges end at the same place, the range starts at the start of the
4010 first range and ends just before the second range.
4011 Otherwise, we can't describe this as a single range. */
4012 if (no_overlap)
4013 in_p = 1, low = low0, high = high0;
4014 else if (lowequal && highequal)
4015 in_p = 0, low = high = 0;
4016 else if (subset && lowequal)
4018 in_p = 1, high = high0;
4019 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4020 integer_one_node, 0);
4022 else if (! subset || highequal)
4024 in_p = 1, low = low0;
4025 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4026 integer_one_node, 0);
4028 else
4029 return 0;
4032 else if (! in0_p && in1_p)
4034 /* If they don't overlap, the result is the second range. If the second
4035 is a subset of the first, the result is false. Otherwise,
4036 the range starts just after the first range and ends at the
4037 end of the second. */
4038 if (no_overlap)
4039 in_p = 1, low = low1, high = high1;
4040 else if (subset || highequal)
4041 in_p = 0, low = high = 0;
4042 else
4044 in_p = 1, high = high1;
4045 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4046 integer_one_node, 0);
4050 else
4052 /* The case where we are excluding both ranges. Here the complex case
4053 is if they don't overlap. In that case, the only time we have a
4054 range is if they are adjacent. If the second is a subset of the
4055 first, the result is the first. Otherwise, the range to exclude
4056 starts at the beginning of the first range and ends at the end of the
4057 second. */
4058 if (no_overlap)
4060 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4061 range_binop (PLUS_EXPR, NULL_TREE,
4062 high0, 1,
4063 integer_one_node, 1),
4064 1, low1, 0)))
4065 in_p = 0, low = low0, high = high1;
4066 else
4068 /* Canonicalize - [min, x] into - [-, x]. */
4069 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4070 switch (TREE_CODE (TREE_TYPE (low0)))
4072 case ENUMERAL_TYPE:
4073 if (TYPE_PRECISION (TREE_TYPE (low0))
4074 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4075 break;
4076 /* FALLTHROUGH */
4077 case INTEGER_TYPE:
4078 case CHAR_TYPE:
4079 if (tree_int_cst_equal (low0,
4080 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4081 low0 = 0;
4082 break;
4083 case POINTER_TYPE:
4084 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4085 && integer_zerop (low0))
4086 low0 = 0;
4087 break;
4088 default:
4089 break;
4092 /* Canonicalize - [x, max] into - [x, -]. */
4093 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4094 switch (TREE_CODE (TREE_TYPE (high1)))
4096 case ENUMERAL_TYPE:
4097 if (TYPE_PRECISION (TREE_TYPE (high1))
4098 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4099 break;
4100 /* FALLTHROUGH */
4101 case INTEGER_TYPE:
4102 case CHAR_TYPE:
4103 if (tree_int_cst_equal (high1,
4104 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4105 high1 = 0;
4106 break;
4107 case POINTER_TYPE:
4108 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4109 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4110 high1, 1,
4111 integer_one_node, 1)))
4112 high1 = 0;
4113 break;
4114 default:
4115 break;
4118 /* The ranges might be also adjacent between the maximum and
4119 minimum values of the given type. For
4120 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4121 return + [x + 1, y - 1]. */
4122 if (low0 == 0 && high1 == 0)
4124 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4125 integer_one_node, 1);
4126 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4127 integer_one_node, 0);
4128 if (low == 0 || high == 0)
4129 return 0;
4131 in_p = 1;
4133 else
4134 return 0;
4137 else if (subset)
4138 in_p = 0, low = low0, high = high0;
4139 else
4140 in_p = 0, low = low0, high = high1;
4143 *pin_p = in_p, *plow = low, *phigh = high;
4144 return 1;
4148 /* Subroutine of fold, looking inside expressions of the form
4149 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4150 of the COND_EXPR. This function is being used also to optimize
4151 A op B ? C : A, by reversing the comparison first.
4153 Return a folded expression whose code is not a COND_EXPR
4154 anymore, or NULL_TREE if no folding opportunity is found. */
4156 static tree
4157 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4159 enum tree_code comp_code = TREE_CODE (arg0);
4160 tree arg00 = TREE_OPERAND (arg0, 0);
4161 tree arg01 = TREE_OPERAND (arg0, 1);
4162 tree arg1_type = TREE_TYPE (arg1);
4163 tree tem;
4165 STRIP_NOPS (arg1);
4166 STRIP_NOPS (arg2);
4168 /* If we have A op 0 ? A : -A, consider applying the following
4169 transformations:
4171 A == 0? A : -A same as -A
4172 A != 0? A : -A same as A
4173 A >= 0? A : -A same as abs (A)
4174 A > 0? A : -A same as abs (A)
4175 A <= 0? A : -A same as -abs (A)
4176 A < 0? A : -A same as -abs (A)
4178 None of these transformations work for modes with signed
4179 zeros. If A is +/-0, the first two transformations will
4180 change the sign of the result (from +0 to -0, or vice
4181 versa). The last four will fix the sign of the result,
4182 even though the original expressions could be positive or
4183 negative, depending on the sign of A.
4185 Note that all these transformations are correct if A is
4186 NaN, since the two alternatives (A and -A) are also NaNs. */
4187 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4188 ? real_zerop (arg01)
4189 : integer_zerop (arg01))
4190 && ((TREE_CODE (arg2) == NEGATE_EXPR
4191 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4192 /* In the case that A is of the form X-Y, '-A' (arg2) may
4193 have already been folded to Y-X, check for that. */
4194 || (TREE_CODE (arg1) == MINUS_EXPR
4195 && TREE_CODE (arg2) == MINUS_EXPR
4196 && operand_equal_p (TREE_OPERAND (arg1, 0),
4197 TREE_OPERAND (arg2, 1), 0)
4198 && operand_equal_p (TREE_OPERAND (arg1, 1),
4199 TREE_OPERAND (arg2, 0), 0))))
4200 switch (comp_code)
4202 case EQ_EXPR:
4203 case UNEQ_EXPR:
4204 tem = fold_convert (arg1_type, arg1);
4205 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4206 case NE_EXPR:
4207 case LTGT_EXPR:
4208 return pedantic_non_lvalue (fold_convert (type, arg1));
4209 case UNGE_EXPR:
4210 case UNGT_EXPR:
4211 if (flag_trapping_math)
4212 break;
4213 /* Fall through. */
4214 case GE_EXPR:
4215 case GT_EXPR:
4216 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4217 arg1 = fold_convert (lang_hooks.types.signed_type
4218 (TREE_TYPE (arg1)), arg1);
4219 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4220 return pedantic_non_lvalue (fold_convert (type, tem));
4221 case UNLE_EXPR:
4222 case UNLT_EXPR:
4223 if (flag_trapping_math)
4224 break;
4225 case LE_EXPR:
4226 case LT_EXPR:
4227 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4228 arg1 = fold_convert (lang_hooks.types.signed_type
4229 (TREE_TYPE (arg1)), arg1);
4230 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4231 return negate_expr (fold_convert (type, tem));
4232 default:
4233 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4234 break;
4237 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4238 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4239 both transformations are correct when A is NaN: A != 0
4240 is then true, and A == 0 is false. */
4242 if (integer_zerop (arg01) && integer_zerop (arg2))
4244 if (comp_code == NE_EXPR)
4245 return pedantic_non_lvalue (fold_convert (type, arg1));
4246 else if (comp_code == EQ_EXPR)
4247 return fold_convert (type, integer_zero_node);
4250 /* Try some transformations of A op B ? A : B.
4252 A == B? A : B same as B
4253 A != B? A : B same as A
4254 A >= B? A : B same as max (A, B)
4255 A > B? A : B same as max (B, A)
4256 A <= B? A : B same as min (A, B)
4257 A < B? A : B same as min (B, A)
4259 As above, these transformations don't work in the presence
4260 of signed zeros. For example, if A and B are zeros of
4261 opposite sign, the first two transformations will change
4262 the sign of the result. In the last four, the original
4263 expressions give different results for (A=+0, B=-0) and
4264 (A=-0, B=+0), but the transformed expressions do not.
4266 The first two transformations are correct if either A or B
4267 is a NaN. In the first transformation, the condition will
4268 be false, and B will indeed be chosen. In the case of the
4269 second transformation, the condition A != B will be true,
4270 and A will be chosen.
4272 The conversions to max() and min() are not correct if B is
4273 a number and A is not. The conditions in the original
4274 expressions will be false, so all four give B. The min()
4275 and max() versions would give a NaN instead. */
4276 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4278 tree comp_op0 = arg00;
4279 tree comp_op1 = arg01;
4280 tree comp_type = TREE_TYPE (comp_op0);
4282 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4283 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4285 comp_type = type;
4286 comp_op0 = arg1;
4287 comp_op1 = arg2;
4290 switch (comp_code)
4292 case EQ_EXPR:
4293 return pedantic_non_lvalue (fold_convert (type, arg2));
4294 case NE_EXPR:
4295 return pedantic_non_lvalue (fold_convert (type, arg1));
4296 case LE_EXPR:
4297 case LT_EXPR:
4298 case UNLE_EXPR:
4299 case UNLT_EXPR:
4300 /* In C++ a ?: expression can be an lvalue, so put the
4301 operand which will be used if they are equal first
4302 so that we can convert this back to the
4303 corresponding COND_EXPR. */
4304 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4306 comp_op0 = fold_convert (comp_type, comp_op0);
4307 comp_op1 = fold_convert (comp_type, comp_op1);
4308 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4309 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4310 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4311 return pedantic_non_lvalue (fold_convert (type, tem));
4313 break;
4314 case GE_EXPR:
4315 case GT_EXPR:
4316 case UNGE_EXPR:
4317 case UNGT_EXPR:
4318 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4320 comp_op0 = fold_convert (comp_type, comp_op0);
4321 comp_op1 = fold_convert (comp_type, comp_op1);
4322 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4323 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4324 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4325 return pedantic_non_lvalue (fold_convert (type, tem));
4327 break;
4328 case UNEQ_EXPR:
4329 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4330 return pedantic_non_lvalue (fold_convert (type, arg2));
4331 break;
4332 case LTGT_EXPR:
4333 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4334 return pedantic_non_lvalue (fold_convert (type, arg1));
4335 break;
4336 default:
4337 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4338 break;
4342 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4343 we might still be able to simplify this. For example,
4344 if C1 is one less or one more than C2, this might have started
4345 out as a MIN or MAX and been transformed by this function.
4346 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4348 if (INTEGRAL_TYPE_P (type)
4349 && TREE_CODE (arg01) == INTEGER_CST
4350 && TREE_CODE (arg2) == INTEGER_CST)
4351 switch (comp_code)
4353 case EQ_EXPR:
4354 /* We can replace A with C1 in this case. */
4355 arg1 = fold_convert (type, arg01);
4356 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4358 case LT_EXPR:
4359 /* If C1 is C2 + 1, this is min(A, C2). */
4360 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4361 OEP_ONLY_CONST)
4362 && operand_equal_p (arg01,
4363 const_binop (PLUS_EXPR, arg2,
4364 integer_one_node, 0),
4365 OEP_ONLY_CONST))
4366 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4367 type, arg1, arg2));
4368 break;
4370 case LE_EXPR:
4371 /* If C1 is C2 - 1, this is min(A, C2). */
4372 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4373 OEP_ONLY_CONST)
4374 && operand_equal_p (arg01,
4375 const_binop (MINUS_EXPR, arg2,
4376 integer_one_node, 0),
4377 OEP_ONLY_CONST))
4378 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4379 type, arg1, arg2));
4380 break;
4382 case GT_EXPR:
4383 /* If C1 is C2 - 1, this is max(A, C2). */
4384 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4385 OEP_ONLY_CONST)
4386 && operand_equal_p (arg01,
4387 const_binop (MINUS_EXPR, arg2,
4388 integer_one_node, 0),
4389 OEP_ONLY_CONST))
4390 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4391 type, arg1, arg2));
4392 break;
4394 case GE_EXPR:
4395 /* If C1 is C2 + 1, this is max(A, C2). */
4396 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4397 OEP_ONLY_CONST)
4398 && operand_equal_p (arg01,
4399 const_binop (PLUS_EXPR, arg2,
4400 integer_one_node, 0),
4401 OEP_ONLY_CONST))
4402 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4403 type, arg1, arg2));
4404 break;
4405 case NE_EXPR:
4406 break;
4407 default:
4408 gcc_unreachable ();
4411 return NULL_TREE;
4416 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4417 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4418 #endif
4420 /* EXP is some logical combination of boolean tests. See if we can
4421 merge it into some range test. Return the new tree if so. */
4423 static tree
4424 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4426 int or_op = (code == TRUTH_ORIF_EXPR
4427 || code == TRUTH_OR_EXPR);
4428 int in0_p, in1_p, in_p;
4429 tree low0, low1, low, high0, high1, high;
4430 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4431 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4432 tree tem;
4434 /* If this is an OR operation, invert both sides; we will invert
4435 again at the end. */
4436 if (or_op)
4437 in0_p = ! in0_p, in1_p = ! in1_p;
4439 /* If both expressions are the same, if we can merge the ranges, and we
4440 can build the range test, return it or it inverted. If one of the
4441 ranges is always true or always false, consider it to be the same
4442 expression as the other. */
4443 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4444 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4445 in1_p, low1, high1)
4446 && 0 != (tem = (build_range_check (type,
4447 lhs != 0 ? lhs
4448 : rhs != 0 ? rhs : integer_zero_node,
4449 in_p, low, high))))
4450 return or_op ? invert_truthvalue (tem) : tem;
4452 /* On machines where the branch cost is expensive, if this is a
4453 short-circuited branch and the underlying object on both sides
4454 is the same, make a non-short-circuit operation. */
4455 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4456 && lhs != 0 && rhs != 0
4457 && (code == TRUTH_ANDIF_EXPR
4458 || code == TRUTH_ORIF_EXPR)
4459 && operand_equal_p (lhs, rhs, 0))
4461 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4462 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4463 which cases we can't do this. */
4464 if (simple_operand_p (lhs))
4465 return build2 (code == TRUTH_ANDIF_EXPR
4466 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4467 type, op0, op1);
4469 else if (lang_hooks.decls.global_bindings_p () == 0
4470 && ! CONTAINS_PLACEHOLDER_P (lhs))
4472 tree common = save_expr (lhs);
4474 if (0 != (lhs = build_range_check (type, common,
4475 or_op ? ! in0_p : in0_p,
4476 low0, high0))
4477 && (0 != (rhs = build_range_check (type, common,
4478 or_op ? ! in1_p : in1_p,
4479 low1, high1))))
4480 return build2 (code == TRUTH_ANDIF_EXPR
4481 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4482 type, lhs, rhs);
4486 return 0;
4489 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4490 bit value. Arrange things so the extra bits will be set to zero if and
4491 only if C is signed-extended to its full width. If MASK is nonzero,
4492 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4494 static tree
4495 unextend (tree c, int p, int unsignedp, tree mask)
4497 tree type = TREE_TYPE (c);
4498 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4499 tree temp;
4501 if (p == modesize || unsignedp)
4502 return c;
4504 /* We work by getting just the sign bit into the low-order bit, then
4505 into the high-order bit, then sign-extend. We then XOR that value
4506 with C. */
4507 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4508 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4510 /* We must use a signed type in order to get an arithmetic right shift.
4511 However, we must also avoid introducing accidental overflows, so that
4512 a subsequent call to integer_zerop will work. Hence we must
4513 do the type conversion here. At this point, the constant is either
4514 zero or one, and the conversion to a signed type can never overflow.
4515 We could get an overflow if this conversion is done anywhere else. */
4516 if (TYPE_UNSIGNED (type))
4517 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4519 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4520 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4521 if (mask != 0)
4522 temp = const_binop (BIT_AND_EXPR, temp,
4523 fold_convert (TREE_TYPE (c), mask), 0);
4524 /* If necessary, convert the type back to match the type of C. */
4525 if (TYPE_UNSIGNED (type))
4526 temp = fold_convert (type, temp);
4528 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4531 /* Find ways of folding logical expressions of LHS and RHS:
4532 Try to merge two comparisons to the same innermost item.
4533 Look for range tests like "ch >= '0' && ch <= '9'".
4534 Look for combinations of simple terms on machines with expensive branches
4535 and evaluate the RHS unconditionally.
4537 For example, if we have p->a == 2 && p->b == 4 and we can make an
4538 object large enough to span both A and B, we can do this with a comparison
4539 against the object ANDed with the a mask.
4541 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4542 operations to do this with one comparison.
4544 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4545 function and the one above.
4547 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4548 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4550 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4551 two operands.
4553 We return the simplified tree or 0 if no optimization is possible. */
4555 static tree
4556 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4558 /* If this is the "or" of two comparisons, we can do something if
4559 the comparisons are NE_EXPR. If this is the "and", we can do something
4560 if the comparisons are EQ_EXPR. I.e.,
4561 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4563 WANTED_CODE is this operation code. For single bit fields, we can
4564 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4565 comparison for one-bit fields. */
4567 enum tree_code wanted_code;
4568 enum tree_code lcode, rcode;
4569 tree ll_arg, lr_arg, rl_arg, rr_arg;
4570 tree ll_inner, lr_inner, rl_inner, rr_inner;
4571 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4572 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4573 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4574 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4575 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4576 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4577 enum machine_mode lnmode, rnmode;
4578 tree ll_mask, lr_mask, rl_mask, rr_mask;
4579 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4580 tree l_const, r_const;
4581 tree lntype, rntype, result;
4582 int first_bit, end_bit;
4583 int volatilep;
4585 /* Start by getting the comparison codes. Fail if anything is volatile.
4586 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4587 it were surrounded with a NE_EXPR. */
4589 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4590 return 0;
4592 lcode = TREE_CODE (lhs);
4593 rcode = TREE_CODE (rhs);
4595 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4597 lhs = build2 (NE_EXPR, truth_type, lhs,
4598 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4599 lcode = NE_EXPR;
4602 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4604 rhs = build2 (NE_EXPR, truth_type, rhs,
4605 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4606 rcode = NE_EXPR;
4609 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4610 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4611 return 0;
4613 ll_arg = TREE_OPERAND (lhs, 0);
4614 lr_arg = TREE_OPERAND (lhs, 1);
4615 rl_arg = TREE_OPERAND (rhs, 0);
4616 rr_arg = TREE_OPERAND (rhs, 1);
4618 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4619 if (simple_operand_p (ll_arg)
4620 && simple_operand_p (lr_arg))
4622 tree result;
4623 if (operand_equal_p (ll_arg, rl_arg, 0)
4624 && operand_equal_p (lr_arg, rr_arg, 0))
4626 result = combine_comparisons (code, lcode, rcode,
4627 truth_type, ll_arg, lr_arg);
4628 if (result)
4629 return result;
4631 else if (operand_equal_p (ll_arg, rr_arg, 0)
4632 && operand_equal_p (lr_arg, rl_arg, 0))
4634 result = combine_comparisons (code, lcode,
4635 swap_tree_comparison (rcode),
4636 truth_type, ll_arg, lr_arg);
4637 if (result)
4638 return result;
4642 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4643 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4645 /* If the RHS can be evaluated unconditionally and its operands are
4646 simple, it wins to evaluate the RHS unconditionally on machines
4647 with expensive branches. In this case, this isn't a comparison
4648 that can be merged. Avoid doing this if the RHS is a floating-point
4649 comparison since those can trap. */
4651 if (BRANCH_COST >= 2
4652 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4653 && simple_operand_p (rl_arg)
4654 && simple_operand_p (rr_arg))
4656 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4657 if (code == TRUTH_OR_EXPR
4658 && lcode == NE_EXPR && integer_zerop (lr_arg)
4659 && rcode == NE_EXPR && integer_zerop (rr_arg)
4660 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4661 return build2 (NE_EXPR, truth_type,
4662 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4663 ll_arg, rl_arg),
4664 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4666 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4667 if (code == TRUTH_AND_EXPR
4668 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4669 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4670 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4671 return build2 (EQ_EXPR, truth_type,
4672 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4673 ll_arg, rl_arg),
4674 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4676 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4677 return build2 (code, truth_type, lhs, rhs);
4680 /* See if the comparisons can be merged. Then get all the parameters for
4681 each side. */
4683 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4684 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4685 return 0;
4687 volatilep = 0;
4688 ll_inner = decode_field_reference (ll_arg,
4689 &ll_bitsize, &ll_bitpos, &ll_mode,
4690 &ll_unsignedp, &volatilep, &ll_mask,
4691 &ll_and_mask);
4692 lr_inner = decode_field_reference (lr_arg,
4693 &lr_bitsize, &lr_bitpos, &lr_mode,
4694 &lr_unsignedp, &volatilep, &lr_mask,
4695 &lr_and_mask);
4696 rl_inner = decode_field_reference (rl_arg,
4697 &rl_bitsize, &rl_bitpos, &rl_mode,
4698 &rl_unsignedp, &volatilep, &rl_mask,
4699 &rl_and_mask);
4700 rr_inner = decode_field_reference (rr_arg,
4701 &rr_bitsize, &rr_bitpos, &rr_mode,
4702 &rr_unsignedp, &volatilep, &rr_mask,
4703 &rr_and_mask);
4705 /* It must be true that the inner operation on the lhs of each
4706 comparison must be the same if we are to be able to do anything.
4707 Then see if we have constants. If not, the same must be true for
4708 the rhs's. */
4709 if (volatilep || ll_inner == 0 || rl_inner == 0
4710 || ! operand_equal_p (ll_inner, rl_inner, 0))
4711 return 0;
4713 if (TREE_CODE (lr_arg) == INTEGER_CST
4714 && TREE_CODE (rr_arg) == INTEGER_CST)
4715 l_const = lr_arg, r_const = rr_arg;
4716 else if (lr_inner == 0 || rr_inner == 0
4717 || ! operand_equal_p (lr_inner, rr_inner, 0))
4718 return 0;
4719 else
4720 l_const = r_const = 0;
4722 /* If either comparison code is not correct for our logical operation,
4723 fail. However, we can convert a one-bit comparison against zero into
4724 the opposite comparison against that bit being set in the field. */
4726 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4727 if (lcode != wanted_code)
4729 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4731 /* Make the left operand unsigned, since we are only interested
4732 in the value of one bit. Otherwise we are doing the wrong
4733 thing below. */
4734 ll_unsignedp = 1;
4735 l_const = ll_mask;
4737 else
4738 return 0;
4741 /* This is analogous to the code for l_const above. */
4742 if (rcode != wanted_code)
4744 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4746 rl_unsignedp = 1;
4747 r_const = rl_mask;
4749 else
4750 return 0;
4753 /* After this point all optimizations will generate bit-field
4754 references, which we might not want. */
4755 if (! lang_hooks.can_use_bit_fields_p ())
4756 return 0;
4758 /* See if we can find a mode that contains both fields being compared on
4759 the left. If we can't, fail. Otherwise, update all constants and masks
4760 to be relative to a field of that size. */
4761 first_bit = MIN (ll_bitpos, rl_bitpos);
4762 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4763 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4764 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4765 volatilep);
4766 if (lnmode == VOIDmode)
4767 return 0;
4769 lnbitsize = GET_MODE_BITSIZE (lnmode);
4770 lnbitpos = first_bit & ~ (lnbitsize - 1);
4771 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4772 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4774 if (BYTES_BIG_ENDIAN)
4776 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4777 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4780 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4781 size_int (xll_bitpos), 0);
4782 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4783 size_int (xrl_bitpos), 0);
4785 if (l_const)
4787 l_const = fold_convert (lntype, l_const);
4788 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4789 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4790 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4791 fold_build1 (BIT_NOT_EXPR,
4792 lntype, ll_mask),
4793 0)))
4795 warning ("comparison is always %d", wanted_code == NE_EXPR);
4797 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4800 if (r_const)
4802 r_const = fold_convert (lntype, r_const);
4803 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4804 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4805 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4806 fold_build1 (BIT_NOT_EXPR,
4807 lntype, rl_mask),
4808 0)))
4810 warning ("comparison is always %d", wanted_code == NE_EXPR);
4812 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4816 /* If the right sides are not constant, do the same for it. Also,
4817 disallow this optimization if a size or signedness mismatch occurs
4818 between the left and right sides. */
4819 if (l_const == 0)
4821 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4822 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4823 /* Make sure the two fields on the right
4824 correspond to the left without being swapped. */
4825 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4826 return 0;
4828 first_bit = MIN (lr_bitpos, rr_bitpos);
4829 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4830 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4831 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4832 volatilep);
4833 if (rnmode == VOIDmode)
4834 return 0;
4836 rnbitsize = GET_MODE_BITSIZE (rnmode);
4837 rnbitpos = first_bit & ~ (rnbitsize - 1);
4838 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4839 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4841 if (BYTES_BIG_ENDIAN)
4843 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4844 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4847 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4848 size_int (xlr_bitpos), 0);
4849 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4850 size_int (xrr_bitpos), 0);
4852 /* Make a mask that corresponds to both fields being compared.
4853 Do this for both items being compared. If the operands are the
4854 same size and the bits being compared are in the same position
4855 then we can do this by masking both and comparing the masked
4856 results. */
4857 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4858 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4859 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4861 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4862 ll_unsignedp || rl_unsignedp);
4863 if (! all_ones_mask_p (ll_mask, lnbitsize))
4864 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4866 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4867 lr_unsignedp || rr_unsignedp);
4868 if (! all_ones_mask_p (lr_mask, rnbitsize))
4869 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4871 return build2 (wanted_code, truth_type, lhs, rhs);
4874 /* There is still another way we can do something: If both pairs of
4875 fields being compared are adjacent, we may be able to make a wider
4876 field containing them both.
4878 Note that we still must mask the lhs/rhs expressions. Furthermore,
4879 the mask must be shifted to account for the shift done by
4880 make_bit_field_ref. */
4881 if ((ll_bitsize + ll_bitpos == rl_bitpos
4882 && lr_bitsize + lr_bitpos == rr_bitpos)
4883 || (ll_bitpos == rl_bitpos + rl_bitsize
4884 && lr_bitpos == rr_bitpos + rr_bitsize))
4886 tree type;
4888 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4889 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4890 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4891 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4893 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4894 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4895 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4896 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4898 /* Convert to the smaller type before masking out unwanted bits. */
4899 type = lntype;
4900 if (lntype != rntype)
4902 if (lnbitsize > rnbitsize)
4904 lhs = fold_convert (rntype, lhs);
4905 ll_mask = fold_convert (rntype, ll_mask);
4906 type = rntype;
4908 else if (lnbitsize < rnbitsize)
4910 rhs = fold_convert (lntype, rhs);
4911 lr_mask = fold_convert (lntype, lr_mask);
4912 type = lntype;
4916 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4917 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4919 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4920 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4922 return build2 (wanted_code, truth_type, lhs, rhs);
4925 return 0;
4928 /* Handle the case of comparisons with constants. If there is something in
4929 common between the masks, those bits of the constants must be the same.
4930 If not, the condition is always false. Test for this to avoid generating
4931 incorrect code below. */
4932 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4933 if (! integer_zerop (result)
4934 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4935 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4937 if (wanted_code == NE_EXPR)
4939 warning ("%<or%> of unmatched not-equal tests is always 1");
4940 return constant_boolean_node (true, truth_type);
4942 else
4944 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4945 return constant_boolean_node (false, truth_type);
4949 /* Construct the expression we will return. First get the component
4950 reference we will make. Unless the mask is all ones the width of
4951 that field, perform the mask operation. Then compare with the
4952 merged constant. */
4953 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4954 ll_unsignedp || rl_unsignedp);
4956 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4957 if (! all_ones_mask_p (ll_mask, lnbitsize))
4958 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4960 return build2 (wanted_code, truth_type, result,
4961 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4964 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4965 constant. */
4967 static tree
4968 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
4970 tree arg0 = op0;
4971 enum tree_code op_code;
4972 tree comp_const = op1;
4973 tree minmax_const;
4974 int consts_equal, consts_lt;
4975 tree inner;
4977 STRIP_SIGN_NOPS (arg0);
4979 op_code = TREE_CODE (arg0);
4980 minmax_const = TREE_OPERAND (arg0, 1);
4981 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4982 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4983 inner = TREE_OPERAND (arg0, 0);
4985 /* If something does not permit us to optimize, return the original tree. */
4986 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4987 || TREE_CODE (comp_const) != INTEGER_CST
4988 || TREE_CONSTANT_OVERFLOW (comp_const)
4989 || TREE_CODE (minmax_const) != INTEGER_CST
4990 || TREE_CONSTANT_OVERFLOW (minmax_const))
4991 return NULL_TREE;
4993 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4994 and GT_EXPR, doing the rest with recursive calls using logical
4995 simplifications. */
4996 switch (code)
4998 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5000 /* FIXME: We should be able to invert code without building a
5001 scratch tree node, but doing so would require us to
5002 duplicate a part of invert_truthvalue here. */
5003 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5004 tem = optimize_minmax_comparison (TREE_CODE (tem),
5005 TREE_TYPE (tem),
5006 TREE_OPERAND (tem, 0),
5007 TREE_OPERAND (tem, 1));
5008 return invert_truthvalue (tem);
5011 case GE_EXPR:
5012 return
5013 fold_build2 (TRUTH_ORIF_EXPR, type,
5014 optimize_minmax_comparison
5015 (EQ_EXPR, type, arg0, comp_const),
5016 optimize_minmax_comparison
5017 (GT_EXPR, type, arg0, comp_const));
5019 case EQ_EXPR:
5020 if (op_code == MAX_EXPR && consts_equal)
5021 /* MAX (X, 0) == 0 -> X <= 0 */
5022 return fold_build2 (LE_EXPR, type, inner, comp_const);
5024 else if (op_code == MAX_EXPR && consts_lt)
5025 /* MAX (X, 0) == 5 -> X == 5 */
5026 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5028 else if (op_code == MAX_EXPR)
5029 /* MAX (X, 0) == -1 -> false */
5030 return omit_one_operand (type, integer_zero_node, inner);
5032 else if (consts_equal)
5033 /* MIN (X, 0) == 0 -> X >= 0 */
5034 return fold_build2 (GE_EXPR, type, inner, comp_const);
5036 else if (consts_lt)
5037 /* MIN (X, 0) == 5 -> false */
5038 return omit_one_operand (type, integer_zero_node, inner);
5040 else
5041 /* MIN (X, 0) == -1 -> X == -1 */
5042 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5044 case GT_EXPR:
5045 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5046 /* MAX (X, 0) > 0 -> X > 0
5047 MAX (X, 0) > 5 -> X > 5 */
5048 return fold_build2 (GT_EXPR, type, inner, comp_const);
5050 else if (op_code == MAX_EXPR)
5051 /* MAX (X, 0) > -1 -> true */
5052 return omit_one_operand (type, integer_one_node, inner);
5054 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5055 /* MIN (X, 0) > 0 -> false
5056 MIN (X, 0) > 5 -> false */
5057 return omit_one_operand (type, integer_zero_node, inner);
5059 else
5060 /* MIN (X, 0) > -1 -> X > -1 */
5061 return fold_build2 (GT_EXPR, type, inner, comp_const);
5063 default:
5064 return NULL_TREE;
5068 /* T is an integer expression that is being multiplied, divided, or taken a
5069 modulus (CODE says which and what kind of divide or modulus) by a
5070 constant C. See if we can eliminate that operation by folding it with
5071 other operations already in T. WIDE_TYPE, if non-null, is a type that
5072 should be used for the computation if wider than our type.
5074 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5075 (X * 2) + (Y * 4). We must, however, be assured that either the original
5076 expression would not overflow or that overflow is undefined for the type
5077 in the language in question.
5079 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5080 the machine has a multiply-accumulate insn or that this is part of an
5081 addressing calculation.
5083 If we return a non-null expression, it is an equivalent form of the
5084 original computation, but need not be in the original type. */
5086 static tree
5087 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5089 /* To avoid exponential search depth, refuse to allow recursion past
5090 three levels. Beyond that (1) it's highly unlikely that we'll find
5091 something interesting and (2) we've probably processed it before
5092 when we built the inner expression. */
5094 static int depth;
5095 tree ret;
5097 if (depth > 3)
5098 return NULL;
5100 depth++;
5101 ret = extract_muldiv_1 (t, c, code, wide_type);
5102 depth--;
5104 return ret;
5107 static tree
5108 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5110 tree type = TREE_TYPE (t);
5111 enum tree_code tcode = TREE_CODE (t);
5112 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5113 > GET_MODE_SIZE (TYPE_MODE (type)))
5114 ? wide_type : type);
5115 tree t1, t2;
5116 int same_p = tcode == code;
5117 tree op0 = NULL_TREE, op1 = NULL_TREE;
5119 /* Don't deal with constants of zero here; they confuse the code below. */
5120 if (integer_zerop (c))
5121 return NULL_TREE;
5123 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5124 op0 = TREE_OPERAND (t, 0);
5126 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5127 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5129 /* Note that we need not handle conditional operations here since fold
5130 already handles those cases. So just do arithmetic here. */
5131 switch (tcode)
5133 case INTEGER_CST:
5134 /* For a constant, we can always simplify if we are a multiply
5135 or (for divide and modulus) if it is a multiple of our constant. */
5136 if (code == MULT_EXPR
5137 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5138 return const_binop (code, fold_convert (ctype, t),
5139 fold_convert (ctype, c), 0);
5140 break;
5142 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5143 /* If op0 is an expression ... */
5144 if ((COMPARISON_CLASS_P (op0)
5145 || UNARY_CLASS_P (op0)
5146 || BINARY_CLASS_P (op0)
5147 || EXPRESSION_CLASS_P (op0))
5148 /* ... and is unsigned, and its type is smaller than ctype,
5149 then we cannot pass through as widening. */
5150 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5151 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5152 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5153 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5154 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5155 /* ... or this is a truncation (t is narrower than op0),
5156 then we cannot pass through this narrowing. */
5157 || (GET_MODE_SIZE (TYPE_MODE (type))
5158 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5159 /* ... or signedness changes for division or modulus,
5160 then we cannot pass through this conversion. */
5161 || (code != MULT_EXPR
5162 && (TYPE_UNSIGNED (ctype)
5163 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5164 break;
5166 /* Pass the constant down and see if we can make a simplification. If
5167 we can, replace this expression with the inner simplification for
5168 possible later conversion to our or some other type. */
5169 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5170 && TREE_CODE (t2) == INTEGER_CST
5171 && ! TREE_CONSTANT_OVERFLOW (t2)
5172 && (0 != (t1 = extract_muldiv (op0, t2, code,
5173 code == MULT_EXPR
5174 ? ctype : NULL_TREE))))
5175 return t1;
5176 break;
5178 case ABS_EXPR:
5179 /* If widening the type changes it from signed to unsigned, then we
5180 must avoid building ABS_EXPR itself as unsigned. */
5181 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5183 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5184 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5186 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5187 return fold_convert (ctype, t1);
5189 break;
5191 /* FALLTHROUGH */
5192 case NEGATE_EXPR:
5193 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5194 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5195 break;
5197 case MIN_EXPR: case MAX_EXPR:
5198 /* If widening the type changes the signedness, then we can't perform
5199 this optimization as that changes the result. */
5200 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5201 break;
5203 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5204 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5205 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5207 if (tree_int_cst_sgn (c) < 0)
5208 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5210 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5211 fold_convert (ctype, t2));
5213 break;
5215 case LSHIFT_EXPR: case RSHIFT_EXPR:
5216 /* If the second operand is constant, this is a multiplication
5217 or floor division, by a power of two, so we can treat it that
5218 way unless the multiplier or divisor overflows. Signed
5219 left-shift overflow is implementation-defined rather than
5220 undefined in C90, so do not convert signed left shift into
5221 multiplication. */
5222 if (TREE_CODE (op1) == INTEGER_CST
5223 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5224 /* const_binop may not detect overflow correctly,
5225 so check for it explicitly here. */
5226 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5227 && TREE_INT_CST_HIGH (op1) == 0
5228 && 0 != (t1 = fold_convert (ctype,
5229 const_binop (LSHIFT_EXPR,
5230 size_one_node,
5231 op1, 0)))
5232 && ! TREE_OVERFLOW (t1))
5233 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5234 ? MULT_EXPR : FLOOR_DIV_EXPR,
5235 ctype, fold_convert (ctype, op0), t1),
5236 c, code, wide_type);
5237 break;
5239 case PLUS_EXPR: case MINUS_EXPR:
5240 /* See if we can eliminate the operation on both sides. If we can, we
5241 can return a new PLUS or MINUS. If we can't, the only remaining
5242 cases where we can do anything are if the second operand is a
5243 constant. */
5244 t1 = extract_muldiv (op0, c, code, wide_type);
5245 t2 = extract_muldiv (op1, c, code, wide_type);
5246 if (t1 != 0 && t2 != 0
5247 && (code == MULT_EXPR
5248 /* If not multiplication, we can only do this if both operands
5249 are divisible by c. */
5250 || (multiple_of_p (ctype, op0, c)
5251 && multiple_of_p (ctype, op1, c))))
5252 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5253 fold_convert (ctype, t2));
5255 /* If this was a subtraction, negate OP1 and set it to be an addition.
5256 This simplifies the logic below. */
5257 if (tcode == MINUS_EXPR)
5258 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5260 if (TREE_CODE (op1) != INTEGER_CST)
5261 break;
5263 /* If either OP1 or C are negative, this optimization is not safe for
5264 some of the division and remainder types while for others we need
5265 to change the code. */
5266 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5268 if (code == CEIL_DIV_EXPR)
5269 code = FLOOR_DIV_EXPR;
5270 else if (code == FLOOR_DIV_EXPR)
5271 code = CEIL_DIV_EXPR;
5272 else if (code != MULT_EXPR
5273 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5274 break;
5277 /* If it's a multiply or a division/modulus operation of a multiple
5278 of our constant, do the operation and verify it doesn't overflow. */
5279 if (code == MULT_EXPR
5280 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5282 op1 = const_binop (code, fold_convert (ctype, op1),
5283 fold_convert (ctype, c), 0);
5284 /* We allow the constant to overflow with wrapping semantics. */
5285 if (op1 == 0
5286 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5287 break;
5289 else
5290 break;
5292 /* If we have an unsigned type is not a sizetype, we cannot widen
5293 the operation since it will change the result if the original
5294 computation overflowed. */
5295 if (TYPE_UNSIGNED (ctype)
5296 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5297 && ctype != type)
5298 break;
5300 /* If we were able to eliminate our operation from the first side,
5301 apply our operation to the second side and reform the PLUS. */
5302 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5303 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5305 /* The last case is if we are a multiply. In that case, we can
5306 apply the distributive law to commute the multiply and addition
5307 if the multiplication of the constants doesn't overflow. */
5308 if (code == MULT_EXPR)
5309 return fold_build2 (tcode, ctype,
5310 fold_build2 (code, ctype,
5311 fold_convert (ctype, op0),
5312 fold_convert (ctype, c)),
5313 op1);
5315 break;
5317 case MULT_EXPR:
5318 /* We have a special case here if we are doing something like
5319 (C * 8) % 4 since we know that's zero. */
5320 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5321 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5322 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5323 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5324 return omit_one_operand (type, integer_zero_node, op0);
5326 /* ... fall through ... */
5328 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5329 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5330 /* If we can extract our operation from the LHS, do so and return a
5331 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5332 do something only if the second operand is a constant. */
5333 if (same_p
5334 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5335 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5336 fold_convert (ctype, op1));
5337 else if (tcode == MULT_EXPR && code == MULT_EXPR
5338 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5339 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5340 fold_convert (ctype, t1));
5341 else if (TREE_CODE (op1) != INTEGER_CST)
5342 return 0;
5344 /* If these are the same operation types, we can associate them
5345 assuming no overflow. */
5346 if (tcode == code
5347 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5348 fold_convert (ctype, c), 0))
5349 && ! TREE_OVERFLOW (t1))
5350 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5352 /* If these operations "cancel" each other, we have the main
5353 optimizations of this pass, which occur when either constant is a
5354 multiple of the other, in which case we replace this with either an
5355 operation or CODE or TCODE.
5357 If we have an unsigned type that is not a sizetype, we cannot do
5358 this since it will change the result if the original computation
5359 overflowed. */
5360 if ((! TYPE_UNSIGNED (ctype)
5361 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5362 && ! flag_wrapv
5363 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5364 || (tcode == MULT_EXPR
5365 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5366 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5368 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5369 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5370 fold_convert (ctype,
5371 const_binop (TRUNC_DIV_EXPR,
5372 op1, c, 0)));
5373 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5374 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5375 fold_convert (ctype,
5376 const_binop (TRUNC_DIV_EXPR,
5377 c, op1, 0)));
5379 break;
5381 default:
5382 break;
5385 return 0;
5388 /* Return a node which has the indicated constant VALUE (either 0 or
5389 1), and is of the indicated TYPE. */
5391 tree
5392 constant_boolean_node (int value, tree type)
5394 if (type == integer_type_node)
5395 return value ? integer_one_node : integer_zero_node;
5396 else if (type == boolean_type_node)
5397 return value ? boolean_true_node : boolean_false_node;
5398 else
5399 return build_int_cst (type, value);
5403 /* Return true if expr looks like an ARRAY_REF and set base and
5404 offset to the appropriate trees. If there is no offset,
5405 offset is set to NULL_TREE. */
5407 static bool
5408 extract_array_ref (tree expr, tree *base, tree *offset)
5410 /* We have to be careful with stripping nops as with the
5411 base type the meaning of the offset can change. */
5412 tree inner_expr = expr;
5413 STRIP_NOPS (inner_expr);
5414 /* One canonical form is a PLUS_EXPR with the first
5415 argument being an ADDR_EXPR with a possible NOP_EXPR
5416 attached. */
5417 if (TREE_CODE (expr) == PLUS_EXPR)
5419 tree op0 = TREE_OPERAND (expr, 0);
5420 STRIP_NOPS (op0);
5421 if (TREE_CODE (op0) == ADDR_EXPR)
5423 *base = TREE_OPERAND (expr, 0);
5424 *offset = TREE_OPERAND (expr, 1);
5425 return true;
5428 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5429 which we transform into an ADDR_EXPR with appropriate
5430 offset. For other arguments to the ADDR_EXPR we assume
5431 zero offset and as such do not care about the ADDR_EXPR
5432 type and strip possible nops from it. */
5433 else if (TREE_CODE (inner_expr) == ADDR_EXPR)
5435 tree op0 = TREE_OPERAND (inner_expr, 0);
5436 if (TREE_CODE (op0) == ARRAY_REF)
5438 *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
5439 *offset = TREE_OPERAND (op0, 1);
5441 else
5443 *base = inner_expr;
5444 *offset = NULL_TREE;
5446 return true;
5449 return false;
5453 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5454 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5455 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5456 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5457 COND is the first argument to CODE; otherwise (as in the example
5458 given here), it is the second argument. TYPE is the type of the
5459 original expression. Return NULL_TREE if no simplification is
5460 possible. */
5462 static tree
5463 fold_binary_op_with_conditional_arg (enum tree_code code,
5464 tree type, tree op0, tree op1,
5465 tree cond, tree arg, int cond_first_p)
5467 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5468 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5469 tree test, true_value, false_value;
5470 tree lhs = NULL_TREE;
5471 tree rhs = NULL_TREE;
5473 /* This transformation is only worthwhile if we don't have to wrap
5474 arg in a SAVE_EXPR, and the operation can be simplified on at least
5475 one of the branches once its pushed inside the COND_EXPR. */
5476 if (!TREE_CONSTANT (arg))
5477 return NULL_TREE;
5479 if (TREE_CODE (cond) == COND_EXPR)
5481 test = TREE_OPERAND (cond, 0);
5482 true_value = TREE_OPERAND (cond, 1);
5483 false_value = TREE_OPERAND (cond, 2);
5484 /* If this operand throws an expression, then it does not make
5485 sense to try to perform a logical or arithmetic operation
5486 involving it. */
5487 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5488 lhs = true_value;
5489 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5490 rhs = false_value;
5492 else
5494 tree testtype = TREE_TYPE (cond);
5495 test = cond;
5496 true_value = constant_boolean_node (true, testtype);
5497 false_value = constant_boolean_node (false, testtype);
5500 arg = fold_convert (arg_type, arg);
5501 if (lhs == 0)
5503 true_value = fold_convert (cond_type, true_value);
5504 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5505 : build2 (code, type, arg, true_value));
5507 if (rhs == 0)
5509 false_value = fold_convert (cond_type, false_value);
5510 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5511 : build2 (code, type, arg, false_value));
5514 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5515 return fold_convert (type, test);
5519 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5521 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5522 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5523 ADDEND is the same as X.
5525 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5526 and finite. The problematic cases are when X is zero, and its mode
5527 has signed zeros. In the case of rounding towards -infinity,
5528 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5529 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5531 static bool
5532 fold_real_zero_addition_p (tree type, tree addend, int negate)
5534 if (!real_zerop (addend))
5535 return false;
5537 /* Don't allow the fold with -fsignaling-nans. */
5538 if (HONOR_SNANS (TYPE_MODE (type)))
5539 return false;
5541 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5542 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5543 return true;
5545 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5546 if (TREE_CODE (addend) == REAL_CST
5547 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5548 negate = !negate;
5550 /* The mode has signed zeros, and we have to honor their sign.
5551 In this situation, there is only one case we can return true for.
5552 X - 0 is the same as X unless rounding towards -infinity is
5553 supported. */
5554 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5557 /* Subroutine of fold() that checks comparisons of built-in math
5558 functions against real constants.
5560 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5561 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5562 is the type of the result and ARG0 and ARG1 are the operands of the
5563 comparison. ARG1 must be a TREE_REAL_CST.
5565 The function returns the constant folded tree if a simplification
5566 can be made, and NULL_TREE otherwise. */
5568 static tree
5569 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5570 tree type, tree arg0, tree arg1)
5572 REAL_VALUE_TYPE c;
5574 if (BUILTIN_SQRT_P (fcode))
5576 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5577 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5579 c = TREE_REAL_CST (arg1);
5580 if (REAL_VALUE_NEGATIVE (c))
5582 /* sqrt(x) < y is always false, if y is negative. */
5583 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5584 return omit_one_operand (type, integer_zero_node, arg);
5586 /* sqrt(x) > y is always true, if y is negative and we
5587 don't care about NaNs, i.e. negative values of x. */
5588 if (code == NE_EXPR || !HONOR_NANS (mode))
5589 return omit_one_operand (type, integer_one_node, arg);
5591 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5592 return fold_build2 (GE_EXPR, type, arg,
5593 build_real (TREE_TYPE (arg), dconst0));
5595 else if (code == GT_EXPR || code == GE_EXPR)
5597 REAL_VALUE_TYPE c2;
5599 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5600 real_convert (&c2, mode, &c2);
5602 if (REAL_VALUE_ISINF (c2))
5604 /* sqrt(x) > y is x == +Inf, when y is very large. */
5605 if (HONOR_INFINITIES (mode))
5606 return fold_build2 (EQ_EXPR, type, arg,
5607 build_real (TREE_TYPE (arg), c2));
5609 /* sqrt(x) > y is always false, when y is very large
5610 and we don't care about infinities. */
5611 return omit_one_operand (type, integer_zero_node, arg);
5614 /* sqrt(x) > c is the same as x > c*c. */
5615 return fold_build2 (code, type, arg,
5616 build_real (TREE_TYPE (arg), c2));
5618 else if (code == LT_EXPR || code == LE_EXPR)
5620 REAL_VALUE_TYPE c2;
5622 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5623 real_convert (&c2, mode, &c2);
5625 if (REAL_VALUE_ISINF (c2))
5627 /* sqrt(x) < y is always true, when y is a very large
5628 value and we don't care about NaNs or Infinities. */
5629 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5630 return omit_one_operand (type, integer_one_node, arg);
5632 /* sqrt(x) < y is x != +Inf when y is very large and we
5633 don't care about NaNs. */
5634 if (! HONOR_NANS (mode))
5635 return fold_build2 (NE_EXPR, type, arg,
5636 build_real (TREE_TYPE (arg), c2));
5638 /* sqrt(x) < y is x >= 0 when y is very large and we
5639 don't care about Infinities. */
5640 if (! HONOR_INFINITIES (mode))
5641 return fold_build2 (GE_EXPR, type, arg,
5642 build_real (TREE_TYPE (arg), dconst0));
5644 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5645 if (lang_hooks.decls.global_bindings_p () != 0
5646 || CONTAINS_PLACEHOLDER_P (arg))
5647 return NULL_TREE;
5649 arg = save_expr (arg);
5650 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5651 fold_build2 (GE_EXPR, type, arg,
5652 build_real (TREE_TYPE (arg),
5653 dconst0)),
5654 fold_build2 (NE_EXPR, type, arg,
5655 build_real (TREE_TYPE (arg),
5656 c2)));
5659 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5660 if (! HONOR_NANS (mode))
5661 return fold_build2 (code, type, arg,
5662 build_real (TREE_TYPE (arg), c2));
5664 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5665 if (lang_hooks.decls.global_bindings_p () == 0
5666 && ! CONTAINS_PLACEHOLDER_P (arg))
5668 arg = save_expr (arg);
5669 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5670 fold_build2 (GE_EXPR, type, arg,
5671 build_real (TREE_TYPE (arg),
5672 dconst0)),
5673 fold_build2 (code, type, arg,
5674 build_real (TREE_TYPE (arg),
5675 c2)));
5680 return NULL_TREE;
5683 /* Subroutine of fold() that optimizes comparisons against Infinities,
5684 either +Inf or -Inf.
5686 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5687 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5688 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5690 The function returns the constant folded tree if a simplification
5691 can be made, and NULL_TREE otherwise. */
5693 static tree
5694 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5696 enum machine_mode mode;
5697 REAL_VALUE_TYPE max;
5698 tree temp;
5699 bool neg;
5701 mode = TYPE_MODE (TREE_TYPE (arg0));
5703 /* For negative infinity swap the sense of the comparison. */
5704 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5705 if (neg)
5706 code = swap_tree_comparison (code);
5708 switch (code)
5710 case GT_EXPR:
5711 /* x > +Inf is always false, if with ignore sNANs. */
5712 if (HONOR_SNANS (mode))
5713 return NULL_TREE;
5714 return omit_one_operand (type, integer_zero_node, arg0);
5716 case LE_EXPR:
5717 /* x <= +Inf is always true, if we don't case about NaNs. */
5718 if (! HONOR_NANS (mode))
5719 return omit_one_operand (type, integer_one_node, arg0);
5721 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5722 if (lang_hooks.decls.global_bindings_p () == 0
5723 && ! CONTAINS_PLACEHOLDER_P (arg0))
5725 arg0 = save_expr (arg0);
5726 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5728 break;
5730 case EQ_EXPR:
5731 case GE_EXPR:
5732 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5733 real_maxval (&max, neg, mode);
5734 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5735 arg0, build_real (TREE_TYPE (arg0), max));
5737 case LT_EXPR:
5738 /* x < +Inf is always equal to x <= DBL_MAX. */
5739 real_maxval (&max, neg, mode);
5740 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5741 arg0, build_real (TREE_TYPE (arg0), max));
5743 case NE_EXPR:
5744 /* x != +Inf is always equal to !(x > DBL_MAX). */
5745 real_maxval (&max, neg, mode);
5746 if (! HONOR_NANS (mode))
5747 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5748 arg0, build_real (TREE_TYPE (arg0), max));
5750 /* The transformation below creates non-gimple code and thus is
5751 not appropriate if we are in gimple form. */
5752 if (in_gimple_form)
5753 return NULL_TREE;
5755 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5756 arg0, build_real (TREE_TYPE (arg0), max));
5757 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5759 default:
5760 break;
5763 return NULL_TREE;
5766 /* Subroutine of fold() that optimizes comparisons of a division by
5767 a nonzero integer constant against an integer constant, i.e.
5768 X/C1 op C2.
5770 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5771 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5772 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5774 The function returns the constant folded tree if a simplification
5775 can be made, and NULL_TREE otherwise. */
5777 static tree
5778 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5780 tree prod, tmp, hi, lo;
5781 tree arg00 = TREE_OPERAND (arg0, 0);
5782 tree arg01 = TREE_OPERAND (arg0, 1);
5783 unsigned HOST_WIDE_INT lpart;
5784 HOST_WIDE_INT hpart;
5785 int overflow;
5787 /* We have to do this the hard way to detect unsigned overflow.
5788 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5789 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5790 TREE_INT_CST_HIGH (arg01),
5791 TREE_INT_CST_LOW (arg1),
5792 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5793 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5794 prod = force_fit_type (prod, -1, overflow, false);
5796 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5798 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5799 lo = prod;
5801 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5802 overflow = add_double (TREE_INT_CST_LOW (prod),
5803 TREE_INT_CST_HIGH (prod),
5804 TREE_INT_CST_LOW (tmp),
5805 TREE_INT_CST_HIGH (tmp),
5806 &lpart, &hpart);
5807 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5808 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5809 TREE_CONSTANT_OVERFLOW (prod));
5811 else if (tree_int_cst_sgn (arg01) >= 0)
5813 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5814 switch (tree_int_cst_sgn (arg1))
5816 case -1:
5817 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5818 hi = prod;
5819 break;
5821 case 0:
5822 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5823 hi = tmp;
5824 break;
5826 case 1:
5827 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5828 lo = prod;
5829 break;
5831 default:
5832 gcc_unreachable ();
5835 else
5837 /* A negative divisor reverses the relational operators. */
5838 code = swap_tree_comparison (code);
5840 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5841 switch (tree_int_cst_sgn (arg1))
5843 case -1:
5844 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5845 lo = prod;
5846 break;
5848 case 0:
5849 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5850 lo = tmp;
5851 break;
5853 case 1:
5854 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5855 hi = prod;
5856 break;
5858 default:
5859 gcc_unreachable ();
5863 switch (code)
5865 case EQ_EXPR:
5866 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5867 return omit_one_operand (type, integer_zero_node, arg00);
5868 if (TREE_OVERFLOW (hi))
5869 return fold_build2 (GE_EXPR, type, arg00, lo);
5870 if (TREE_OVERFLOW (lo))
5871 return fold_build2 (LE_EXPR, type, arg00, hi);
5872 return build_range_check (type, arg00, 1, lo, hi);
5874 case NE_EXPR:
5875 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5876 return omit_one_operand (type, integer_one_node, arg00);
5877 if (TREE_OVERFLOW (hi))
5878 return fold_build2 (LT_EXPR, type, arg00, lo);
5879 if (TREE_OVERFLOW (lo))
5880 return fold_build2 (GT_EXPR, type, arg00, hi);
5881 return build_range_check (type, arg00, 0, lo, hi);
5883 case LT_EXPR:
5884 if (TREE_OVERFLOW (lo))
5885 return omit_one_operand (type, integer_zero_node, arg00);
5886 return fold_build2 (LT_EXPR, type, arg00, lo);
5888 case LE_EXPR:
5889 if (TREE_OVERFLOW (hi))
5890 return omit_one_operand (type, integer_one_node, arg00);
5891 return fold_build2 (LE_EXPR, type, arg00, hi);
5893 case GT_EXPR:
5894 if (TREE_OVERFLOW (hi))
5895 return omit_one_operand (type, integer_zero_node, arg00);
5896 return fold_build2 (GT_EXPR, type, arg00, hi);
5898 case GE_EXPR:
5899 if (TREE_OVERFLOW (lo))
5900 return omit_one_operand (type, integer_one_node, arg00);
5901 return fold_build2 (GE_EXPR, type, arg00, lo);
5903 default:
5904 break;
5907 return NULL_TREE;
5911 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5912 equality/inequality test, then return a simplified form of
5913 the test using shifts and logical operations. Otherwise return
5914 NULL. TYPE is the desired result type. */
5916 tree
5917 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5918 tree result_type)
5920 /* If this is testing a single bit, we can optimize the test. */
5921 if ((code == NE_EXPR || code == EQ_EXPR)
5922 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5923 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5925 tree inner = TREE_OPERAND (arg0, 0);
5926 tree type = TREE_TYPE (arg0);
5927 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5928 enum machine_mode operand_mode = TYPE_MODE (type);
5929 int ops_unsigned;
5930 tree signed_type, unsigned_type, intermediate_type;
5931 tree arg00;
5933 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5934 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5935 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5936 if (arg00 != NULL_TREE
5937 /* This is only a win if casting to a signed type is cheap,
5938 i.e. when arg00's type is not a partial mode. */
5939 && TYPE_PRECISION (TREE_TYPE (arg00))
5940 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5942 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5943 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5944 result_type, fold_convert (stype, arg00),
5945 fold_convert (stype, integer_zero_node));
5948 /* Otherwise we have (A & C) != 0 where C is a single bit,
5949 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5950 Similarly for (A & C) == 0. */
5952 /* If INNER is a right shift of a constant and it plus BITNUM does
5953 not overflow, adjust BITNUM and INNER. */
5954 if (TREE_CODE (inner) == RSHIFT_EXPR
5955 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5956 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5957 && bitnum < TYPE_PRECISION (type)
5958 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5959 bitnum - TYPE_PRECISION (type)))
5961 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5962 inner = TREE_OPERAND (inner, 0);
5965 /* If we are going to be able to omit the AND below, we must do our
5966 operations as unsigned. If we must use the AND, we have a choice.
5967 Normally unsigned is faster, but for some machines signed is. */
5968 #ifdef LOAD_EXTEND_OP
5969 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
5970 && !flag_syntax_only) ? 0 : 1;
5971 #else
5972 ops_unsigned = 1;
5973 #endif
5975 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5976 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5977 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5978 inner = fold_convert (intermediate_type, inner);
5980 if (bitnum != 0)
5981 inner = build2 (RSHIFT_EXPR, intermediate_type,
5982 inner, size_int (bitnum));
5984 if (code == EQ_EXPR)
5985 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
5986 inner, integer_one_node);
5988 /* Put the AND last so it can combine with more things. */
5989 inner = build2 (BIT_AND_EXPR, intermediate_type,
5990 inner, integer_one_node);
5992 /* Make sure to return the proper type. */
5993 inner = fold_convert (result_type, inner);
5995 return inner;
5997 return NULL_TREE;
6000 /* Check whether we are allowed to reorder operands arg0 and arg1,
6001 such that the evaluation of arg1 occurs before arg0. */
6003 static bool
6004 reorder_operands_p (tree arg0, tree arg1)
6006 if (! flag_evaluation_order)
6007 return true;
6008 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6009 return true;
6010 return ! TREE_SIDE_EFFECTS (arg0)
6011 && ! TREE_SIDE_EFFECTS (arg1);
6014 /* Test whether it is preferable two swap two operands, ARG0 and
6015 ARG1, for example because ARG0 is an integer constant and ARG1
6016 isn't. If REORDER is true, only recommend swapping if we can
6017 evaluate the operands in reverse order. */
6019 bool
6020 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6022 STRIP_SIGN_NOPS (arg0);
6023 STRIP_SIGN_NOPS (arg1);
6025 if (TREE_CODE (arg1) == INTEGER_CST)
6026 return 0;
6027 if (TREE_CODE (arg0) == INTEGER_CST)
6028 return 1;
6030 if (TREE_CODE (arg1) == REAL_CST)
6031 return 0;
6032 if (TREE_CODE (arg0) == REAL_CST)
6033 return 1;
6035 if (TREE_CODE (arg1) == COMPLEX_CST)
6036 return 0;
6037 if (TREE_CODE (arg0) == COMPLEX_CST)
6038 return 1;
6040 if (TREE_CONSTANT (arg1))
6041 return 0;
6042 if (TREE_CONSTANT (arg0))
6043 return 1;
6045 if (optimize_size)
6046 return 0;
6048 if (reorder && flag_evaluation_order
6049 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6050 return 0;
6052 if (DECL_P (arg1))
6053 return 0;
6054 if (DECL_P (arg0))
6055 return 1;
6057 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6058 for commutative and comparison operators. Ensuring a canonical
6059 form allows the optimizers to find additional redundancies without
6060 having to explicitly check for both orderings. */
6061 if (TREE_CODE (arg0) == SSA_NAME
6062 && TREE_CODE (arg1) == SSA_NAME
6063 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6064 return 1;
6066 return 0;
6069 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6070 ARG0 is extended to a wider type. */
6072 static tree
6073 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6075 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6076 tree arg1_unw;
6077 tree shorter_type, outer_type;
6078 tree min, max;
6079 bool above, below;
6081 if (arg0_unw == arg0)
6082 return NULL_TREE;
6083 shorter_type = TREE_TYPE (arg0_unw);
6085 #ifdef HAVE_canonicalize_funcptr_for_compare
6086 /* Disable this optimization if we're casting a function pointer
6087 type on targets that require function pointer canonicalization. */
6088 if (HAVE_canonicalize_funcptr_for_compare
6089 && TREE_CODE (shorter_type) == POINTER_TYPE
6090 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6091 return NULL_TREE;
6092 #endif
6094 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6095 return NULL_TREE;
6097 arg1_unw = get_unwidened (arg1, shorter_type);
6098 if (!arg1_unw)
6099 return NULL_TREE;
6101 /* If possible, express the comparison in the shorter mode. */
6102 if ((code == EQ_EXPR || code == NE_EXPR
6103 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6104 && (TREE_TYPE (arg1_unw) == shorter_type
6105 || (TREE_CODE (arg1_unw) == INTEGER_CST
6106 && TREE_CODE (shorter_type) == INTEGER_TYPE
6107 && int_fits_type_p (arg1_unw, shorter_type))))
6108 return fold_build2 (code, type, arg0_unw,
6109 fold_convert (shorter_type, arg1_unw));
6111 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6112 return NULL_TREE;
6114 /* If we are comparing with the integer that does not fit into the range
6115 of the shorter type, the result is known. */
6116 outer_type = TREE_TYPE (arg1_unw);
6117 min = lower_bound_in_type (outer_type, shorter_type);
6118 max = upper_bound_in_type (outer_type, shorter_type);
6120 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6121 max, arg1_unw));
6122 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6123 arg1_unw, min));
6125 switch (code)
6127 case EQ_EXPR:
6128 if (above || below)
6129 return omit_one_operand (type, integer_zero_node, arg0);
6130 break;
6132 case NE_EXPR:
6133 if (above || below)
6134 return omit_one_operand (type, integer_one_node, arg0);
6135 break;
6137 case LT_EXPR:
6138 case LE_EXPR:
6139 if (above)
6140 return omit_one_operand (type, integer_one_node, arg0);
6141 else if (below)
6142 return omit_one_operand (type, integer_zero_node, arg0);
6144 case GT_EXPR:
6145 case GE_EXPR:
6146 if (above)
6147 return omit_one_operand (type, integer_zero_node, arg0);
6148 else if (below)
6149 return omit_one_operand (type, integer_one_node, arg0);
6151 default:
6152 break;
6155 return NULL_TREE;
6158 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6159 ARG0 just the signedness is changed. */
6161 static tree
6162 fold_sign_changed_comparison (enum tree_code code, tree type,
6163 tree arg0, tree arg1)
6165 tree arg0_inner, tmp;
6166 tree inner_type, outer_type;
6168 if (TREE_CODE (arg0) != NOP_EXPR
6169 && TREE_CODE (arg0) != CONVERT_EXPR)
6170 return NULL_TREE;
6172 outer_type = TREE_TYPE (arg0);
6173 arg0_inner = TREE_OPERAND (arg0, 0);
6174 inner_type = TREE_TYPE (arg0_inner);
6176 #ifdef HAVE_canonicalize_funcptr_for_compare
6177 /* Disable this optimization if we're casting a function pointer
6178 type on targets that require function pointer canonicalization. */
6179 if (HAVE_canonicalize_funcptr_for_compare
6180 && TREE_CODE (inner_type) == POINTER_TYPE
6181 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6182 return NULL_TREE;
6183 #endif
6185 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6186 return NULL_TREE;
6188 if (TREE_CODE (arg1) != INTEGER_CST
6189 && !((TREE_CODE (arg1) == NOP_EXPR
6190 || TREE_CODE (arg1) == CONVERT_EXPR)
6191 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6192 return NULL_TREE;
6194 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6195 && code != NE_EXPR
6196 && code != EQ_EXPR)
6197 return NULL_TREE;
6199 if (TREE_CODE (arg1) == INTEGER_CST)
6201 tmp = build_int_cst_wide (inner_type,
6202 TREE_INT_CST_LOW (arg1),
6203 TREE_INT_CST_HIGH (arg1));
6204 arg1 = force_fit_type (tmp, 0,
6205 TREE_OVERFLOW (arg1),
6206 TREE_CONSTANT_OVERFLOW (arg1));
6208 else
6209 arg1 = fold_convert (inner_type, arg1);
6211 return fold_build2 (code, type, arg0_inner, arg1);
6214 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6215 step of the array. ADDR is the address. MULT is the multiplicative expression.
6216 If the function succeeds, the new address expression is returned. Otherwise
6217 NULL_TREE is returned. */
6219 static tree
6220 try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
6222 tree s, delta, step;
6223 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6224 tree ref = TREE_OPERAND (addr, 0), pref;
6225 tree ret, pos;
6226 tree itype;
6228 STRIP_NOPS (arg0);
6229 STRIP_NOPS (arg1);
6231 if (TREE_CODE (arg0) == INTEGER_CST)
6233 s = arg0;
6234 delta = arg1;
6236 else if (TREE_CODE (arg1) == INTEGER_CST)
6238 s = arg1;
6239 delta = arg0;
6241 else
6242 return NULL_TREE;
6244 for (;; ref = TREE_OPERAND (ref, 0))
6246 if (TREE_CODE (ref) == ARRAY_REF)
6248 step = array_ref_element_size (ref);
6250 if (TREE_CODE (step) != INTEGER_CST)
6251 continue;
6253 itype = TREE_TYPE (step);
6255 /* If the type sizes do not match, we might run into problems
6256 when one of them would overflow. */
6257 if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
6258 continue;
6260 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6261 continue;
6263 delta = fold_convert (itype, delta);
6264 break;
6267 if (!handled_component_p (ref))
6268 return NULL_TREE;
6271 /* We found the suitable array reference. So copy everything up to it,
6272 and replace the index. */
6274 pref = TREE_OPERAND (addr, 0);
6275 ret = copy_node (pref);
6276 pos = ret;
6278 while (pref != ref)
6280 pref = TREE_OPERAND (pref, 0);
6281 TREE_OPERAND (pos, 0) = copy_node (pref);
6282 pos = TREE_OPERAND (pos, 0);
6285 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6286 TREE_OPERAND (pos, 1),
6287 delta);
6289 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6293 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6294 means A >= Y && A != MAX, but in this case we know that
6295 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6297 static tree
6298 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6300 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6302 if (TREE_CODE (bound) == LT_EXPR)
6303 a = TREE_OPERAND (bound, 0);
6304 else if (TREE_CODE (bound) == GT_EXPR)
6305 a = TREE_OPERAND (bound, 1);
6306 else
6307 return NULL_TREE;
6309 typea = TREE_TYPE (a);
6310 if (!INTEGRAL_TYPE_P (typea)
6311 && !POINTER_TYPE_P (typea))
6312 return NULL_TREE;
6314 if (TREE_CODE (ineq) == LT_EXPR)
6316 a1 = TREE_OPERAND (ineq, 1);
6317 y = TREE_OPERAND (ineq, 0);
6319 else if (TREE_CODE (ineq) == GT_EXPR)
6321 a1 = TREE_OPERAND (ineq, 0);
6322 y = TREE_OPERAND (ineq, 1);
6324 else
6325 return NULL_TREE;
6327 if (TREE_TYPE (a1) != typea)
6328 return NULL_TREE;
6330 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6331 if (!integer_onep (diff))
6332 return NULL_TREE;
6334 return fold_build2 (GE_EXPR, type, a, y);
6337 /* Fold complex addition when both components are accessible by parts.
6338 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6339 or MINUS_EXPR for subtraction. */
6341 static tree
6342 fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
6344 tree ar, ai, br, bi, rr, ri, inner_type;
6346 if (TREE_CODE (ac) == COMPLEX_EXPR)
6347 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6348 else if (TREE_CODE (ac) == COMPLEX_CST)
6349 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6350 else
6351 return NULL;
6353 if (TREE_CODE (bc) == COMPLEX_EXPR)
6354 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6355 else if (TREE_CODE (bc) == COMPLEX_CST)
6356 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6357 else
6358 return NULL;
6360 inner_type = TREE_TYPE (type);
6362 rr = fold_build2 (code, inner_type, ar, br);
6363 ri = fold_build2 (code, inner_type, ai, bi);
6365 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6368 /* Perform some simplifications of complex multiplication when one or more
6369 of the components are constants or zeros. Return non-null if successful. */
6371 tree
6372 fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
6374 tree rr, ri, inner_type, zero;
6375 bool ar0, ai0, br0, bi0, bi1;
6377 inner_type = TREE_TYPE (type);
6378 zero = NULL;
6380 if (SCALAR_FLOAT_TYPE_P (inner_type))
6382 ar0 = ai0 = br0 = bi0 = bi1 = false;
6384 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6386 if (TREE_CODE (ar) == REAL_CST
6387 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6388 ar0 = true, zero = ar;
6390 if (TREE_CODE (ai) == REAL_CST
6391 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6392 ai0 = true, zero = ai;
6394 if (TREE_CODE (br) == REAL_CST
6395 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6396 br0 = true, zero = br;
6398 if (TREE_CODE (bi) == REAL_CST)
6400 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6401 bi0 = true, zero = bi;
6402 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6403 bi1 = true;
6406 else
6408 ar0 = integer_zerop (ar);
6409 if (ar0)
6410 zero = ar;
6411 ai0 = integer_zerop (ai);
6412 if (ai0)
6413 zero = ai;
6414 br0 = integer_zerop (br);
6415 if (br0)
6416 zero = br;
6417 bi0 = integer_zerop (bi);
6418 if (bi0)
6420 zero = bi;
6421 bi1 = false;
6423 else
6424 bi1 = integer_onep (bi);
6427 /* We won't optimize anything below unless something is zero. */
6428 if (zero == NULL)
6429 return NULL;
6431 if (ai0 && br0 && bi1)
6433 rr = zero;
6434 ri = ar;
6436 else if (ai0 && bi0)
6438 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6439 ri = zero;
6441 else if (ai0 && br0)
6443 rr = zero;
6444 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6446 else if (ar0 && bi0)
6448 rr = zero;
6449 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6451 else if (ar0 && br0)
6453 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6454 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6455 ri = zero;
6457 else if (bi0)
6459 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6460 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6462 else if (ai0)
6464 rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
6465 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6467 else if (br0)
6469 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6470 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6471 ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
6473 else if (ar0)
6475 rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
6476 rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
6477 ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
6479 else
6480 return NULL;
6482 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6485 static tree
6486 fold_complex_mult (tree type, tree ac, tree bc)
6488 tree ar, ai, br, bi;
6490 if (TREE_CODE (ac) == COMPLEX_EXPR)
6491 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6492 else if (TREE_CODE (ac) == COMPLEX_CST)
6493 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6494 else
6495 return NULL;
6497 if (TREE_CODE (bc) == COMPLEX_EXPR)
6498 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6499 else if (TREE_CODE (bc) == COMPLEX_CST)
6500 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6501 else
6502 return NULL;
6504 return fold_complex_mult_parts (type, ar, ai, br, bi);
6507 /* Perform some simplifications of complex division when one or more of
6508 the components are constants or zeros. Return non-null if successful. */
6510 tree
6511 fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
6512 enum tree_code code)
6514 tree rr, ri, inner_type, zero;
6515 bool ar0, ai0, br0, bi0, bi1;
6517 inner_type = TREE_TYPE (type);
6518 zero = NULL;
6520 if (SCALAR_FLOAT_TYPE_P (inner_type))
6522 ar0 = ai0 = br0 = bi0 = bi1 = false;
6524 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6526 if (TREE_CODE (ar) == REAL_CST
6527 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
6528 ar0 = true, zero = ar;
6530 if (TREE_CODE (ai) == REAL_CST
6531 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
6532 ai0 = true, zero = ai;
6534 if (TREE_CODE (br) == REAL_CST
6535 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
6536 br0 = true, zero = br;
6538 if (TREE_CODE (bi) == REAL_CST)
6540 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
6541 bi0 = true, zero = bi;
6542 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
6543 bi1 = true;
6546 else
6548 ar0 = integer_zerop (ar);
6549 if (ar0)
6550 zero = ar;
6551 ai0 = integer_zerop (ai);
6552 if (ai0)
6553 zero = ai;
6554 br0 = integer_zerop (br);
6555 if (br0)
6556 zero = br;
6557 bi0 = integer_zerop (bi);
6558 if (bi0)
6560 zero = bi;
6561 bi1 = false;
6563 else
6564 bi1 = integer_onep (bi);
6567 /* We won't optimize anything below unless something is zero. */
6568 if (zero == NULL)
6569 return NULL;
6571 if (ai0 && bi0)
6573 rr = fold_build2 (code, inner_type, ar, br);
6574 ri = zero;
6576 else if (ai0 && br0)
6578 rr = zero;
6579 ri = fold_build2 (code, inner_type, ar, bi);
6580 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6582 else if (ar0 && bi0)
6584 rr = zero;
6585 ri = fold_build2 (code, inner_type, ai, br);
6587 else if (ar0 && br0)
6589 rr = fold_build2 (code, inner_type, ai, bi);
6590 ri = zero;
6592 else if (bi0)
6594 rr = fold_build2 (code, inner_type, ar, br);
6595 ri = fold_build2 (code, inner_type, ai, br);
6597 else if (br0)
6599 rr = fold_build2 (code, inner_type, ai, bi);
6600 ri = fold_build2 (code, inner_type, ar, bi);
6601 ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
6603 else
6604 return NULL;
6606 return fold_build2 (COMPLEX_EXPR, type, rr, ri);
6609 static tree
6610 fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
6612 tree ar, ai, br, bi;
6614 if (TREE_CODE (ac) == COMPLEX_EXPR)
6615 ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
6616 else if (TREE_CODE (ac) == COMPLEX_CST)
6617 ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
6618 else
6619 return NULL;
6621 if (TREE_CODE (bc) == COMPLEX_EXPR)
6622 br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
6623 else if (TREE_CODE (bc) == COMPLEX_CST)
6624 br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
6625 else
6626 return NULL;
6628 return fold_complex_div_parts (type, ar, ai, br, bi, code);
6631 /* Fold a unary expression of code CODE and type TYPE with operand
6632 OP0. Return the folded expression if folding is successful.
6633 Otherwise, return NULL_TREE. */
6635 static tree
6636 fold_unary (enum tree_code code, tree type, tree op0)
6638 tree tem;
6639 tree arg0;
6640 enum tree_code_class kind = TREE_CODE_CLASS (code);
6642 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6643 && TREE_CODE_LENGTH (code) == 1);
6645 arg0 = op0;
6646 if (arg0)
6648 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6650 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6651 STRIP_SIGN_NOPS (arg0);
6653 else
6655 /* Strip any conversions that don't change the mode. This
6656 is safe for every expression, except for a comparison
6657 expression because its signedness is derived from its
6658 operands.
6660 Note that this is done as an internal manipulation within
6661 the constant folder, in order to find the simplest
6662 representation of the arguments so that their form can be
6663 studied. In any cases, the appropriate type conversions
6664 should be put back in the tree that will get out of the
6665 constant folder. */
6666 STRIP_NOPS (arg0);
6670 if (TREE_CODE_CLASS (code) == tcc_unary)
6672 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6673 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6674 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6675 else if (TREE_CODE (arg0) == COND_EXPR)
6677 tree arg01 = TREE_OPERAND (arg0, 1);
6678 tree arg02 = TREE_OPERAND (arg0, 2);
6679 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6680 arg01 = fold_build1 (code, type, arg01);
6681 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6682 arg02 = fold_build1 (code, type, arg02);
6683 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6684 arg01, arg02);
6686 /* If this was a conversion, and all we did was to move into
6687 inside the COND_EXPR, bring it back out. But leave it if
6688 it is a conversion from integer to integer and the
6689 result precision is no wider than a word since such a
6690 conversion is cheap and may be optimized away by combine,
6691 while it couldn't if it were outside the COND_EXPR. Then return
6692 so we don't get into an infinite recursion loop taking the
6693 conversion out and then back in. */
6695 if ((code == NOP_EXPR || code == CONVERT_EXPR
6696 || code == NON_LVALUE_EXPR)
6697 && TREE_CODE (tem) == COND_EXPR
6698 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6699 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6700 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6701 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6702 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6703 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6704 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6705 && (INTEGRAL_TYPE_P
6706 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6707 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6708 || flag_syntax_only))
6709 tem = build1 (code, type,
6710 build3 (COND_EXPR,
6711 TREE_TYPE (TREE_OPERAND
6712 (TREE_OPERAND (tem, 1), 0)),
6713 TREE_OPERAND (tem, 0),
6714 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6715 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6716 return tem;
6718 else if (COMPARISON_CLASS_P (arg0))
6720 if (TREE_CODE (type) == BOOLEAN_TYPE)
6722 arg0 = copy_node (arg0);
6723 TREE_TYPE (arg0) = type;
6724 return arg0;
6726 else if (TREE_CODE (type) != INTEGER_TYPE)
6727 return fold_build3 (COND_EXPR, type, arg0,
6728 fold_build1 (code, type,
6729 integer_one_node),
6730 fold_build1 (code, type,
6731 integer_zero_node));
6735 switch (code)
6737 case NOP_EXPR:
6738 case FLOAT_EXPR:
6739 case CONVERT_EXPR:
6740 case FIX_TRUNC_EXPR:
6741 case FIX_CEIL_EXPR:
6742 case FIX_FLOOR_EXPR:
6743 case FIX_ROUND_EXPR:
6744 if (TREE_TYPE (op0) == type)
6745 return op0;
6747 /* Handle cases of two conversions in a row. */
6748 if (TREE_CODE (op0) == NOP_EXPR
6749 || TREE_CODE (op0) == CONVERT_EXPR)
6751 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6752 tree inter_type = TREE_TYPE (op0);
6753 int inside_int = INTEGRAL_TYPE_P (inside_type);
6754 int inside_ptr = POINTER_TYPE_P (inside_type);
6755 int inside_float = FLOAT_TYPE_P (inside_type);
6756 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6757 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6758 int inter_int = INTEGRAL_TYPE_P (inter_type);
6759 int inter_ptr = POINTER_TYPE_P (inter_type);
6760 int inter_float = FLOAT_TYPE_P (inter_type);
6761 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6762 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6763 int final_int = INTEGRAL_TYPE_P (type);
6764 int final_ptr = POINTER_TYPE_P (type);
6765 int final_float = FLOAT_TYPE_P (type);
6766 unsigned int final_prec = TYPE_PRECISION (type);
6767 int final_unsignedp = TYPE_UNSIGNED (type);
6769 /* In addition to the cases of two conversions in a row
6770 handled below, if we are converting something to its own
6771 type via an object of identical or wider precision, neither
6772 conversion is needed. */
6773 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6774 && ((inter_int && final_int) || (inter_float && final_float))
6775 && inter_prec >= final_prec)
6776 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6778 /* Likewise, if the intermediate and final types are either both
6779 float or both integer, we don't need the middle conversion if
6780 it is wider than the final type and doesn't change the signedness
6781 (for integers). Avoid this if the final type is a pointer
6782 since then we sometimes need the inner conversion. Likewise if
6783 the outer has a precision not equal to the size of its mode. */
6784 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6785 || (inter_float && inside_float))
6786 && inter_prec >= inside_prec
6787 && (inter_float || inter_unsignedp == inside_unsignedp)
6788 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6789 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6790 && ! final_ptr)
6791 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6793 /* If we have a sign-extension of a zero-extended value, we can
6794 replace that by a single zero-extension. */
6795 if (inside_int && inter_int && final_int
6796 && inside_prec < inter_prec && inter_prec < final_prec
6797 && inside_unsignedp && !inter_unsignedp)
6798 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6800 /* Two conversions in a row are not needed unless:
6801 - some conversion is floating-point (overstrict for now), or
6802 - the intermediate type is narrower than both initial and
6803 final, or
6804 - the intermediate type and innermost type differ in signedness,
6805 and the outermost type is wider than the intermediate, or
6806 - the initial type is a pointer type and the precisions of the
6807 intermediate and final types differ, or
6808 - the final type is a pointer type and the precisions of the
6809 initial and intermediate types differ. */
6810 if (! inside_float && ! inter_float && ! final_float
6811 && (inter_prec > inside_prec || inter_prec > final_prec)
6812 && ! (inside_int && inter_int
6813 && inter_unsignedp != inside_unsignedp
6814 && inter_prec < final_prec)
6815 && ((inter_unsignedp && inter_prec > inside_prec)
6816 == (final_unsignedp && final_prec > inter_prec))
6817 && ! (inside_ptr && inter_prec != final_prec)
6818 && ! (final_ptr && inside_prec != inter_prec)
6819 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6820 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6821 && ! final_ptr)
6822 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6825 if (TREE_CODE (op0) == MODIFY_EXPR
6826 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6827 /* Detect assigning a bitfield. */
6828 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6829 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6831 /* Don't leave an assignment inside a conversion
6832 unless assigning a bitfield. */
6833 tem = build1 (code, type, TREE_OPERAND (op0, 1));
6834 /* First do the assignment, then return converted constant. */
6835 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, fold (tem));
6836 TREE_NO_WARNING (tem) = 1;
6837 TREE_USED (tem) = 1;
6838 return tem;
6841 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6842 constants (if x has signed type, the sign bit cannot be set
6843 in c). This folds extension into the BIT_AND_EXPR. */
6844 if (INTEGRAL_TYPE_P (type)
6845 && TREE_CODE (type) != BOOLEAN_TYPE
6846 && TREE_CODE (op0) == BIT_AND_EXPR
6847 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6849 tree and = op0;
6850 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6851 int change = 0;
6853 if (TYPE_UNSIGNED (TREE_TYPE (and))
6854 || (TYPE_PRECISION (type)
6855 <= TYPE_PRECISION (TREE_TYPE (and))))
6856 change = 1;
6857 else if (TYPE_PRECISION (TREE_TYPE (and1))
6858 <= HOST_BITS_PER_WIDE_INT
6859 && host_integerp (and1, 1))
6861 unsigned HOST_WIDE_INT cst;
6863 cst = tree_low_cst (and1, 1);
6864 cst &= (HOST_WIDE_INT) -1
6865 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6866 change = (cst == 0);
6867 #ifdef LOAD_EXTEND_OP
6868 if (change
6869 && !flag_syntax_only
6870 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6871 == ZERO_EXTEND))
6873 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6874 and0 = fold_convert (uns, and0);
6875 and1 = fold_convert (uns, and1);
6877 #endif
6879 if (change)
6881 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6882 TREE_INT_CST_HIGH (and1));
6883 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6884 TREE_CONSTANT_OVERFLOW (and1));
6885 return fold_build2 (BIT_AND_EXPR, type,
6886 fold_convert (type, and0), tem);
6890 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6891 T2 being pointers to types of the same size. */
6892 if (POINTER_TYPE_P (type)
6893 && BINARY_CLASS_P (arg0)
6894 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6895 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6897 tree arg00 = TREE_OPERAND (arg0, 0);
6898 tree t0 = type;
6899 tree t1 = TREE_TYPE (arg00);
6900 tree tt0 = TREE_TYPE (t0);
6901 tree tt1 = TREE_TYPE (t1);
6902 tree s0 = TYPE_SIZE (tt0);
6903 tree s1 = TYPE_SIZE (tt1);
6905 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6906 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6907 TREE_OPERAND (arg0, 1));
6910 tem = fold_convert_const (code, type, arg0);
6911 return tem ? tem : NULL_TREE;
6913 case VIEW_CONVERT_EXPR:
6914 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6915 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6916 return NULL_TREE;
6918 case NEGATE_EXPR:
6919 if (negate_expr_p (arg0))
6920 return fold_convert (type, negate_expr (arg0));
6921 /* Convert - (~A) to A + 1. */
6922 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6923 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6924 build_int_cst (type, 1));
6925 return NULL_TREE;
6927 case ABS_EXPR:
6928 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6929 return fold_abs_const (arg0, type);
6930 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6931 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6932 /* Convert fabs((double)float) into (double)fabsf(float). */
6933 else if (TREE_CODE (arg0) == NOP_EXPR
6934 && TREE_CODE (type) == REAL_TYPE)
6936 tree targ0 = strip_float_extensions (arg0);
6937 if (targ0 != arg0)
6938 return fold_convert (type, fold_build1 (ABS_EXPR,
6939 TREE_TYPE (targ0),
6940 targ0));
6942 else if (tree_expr_nonnegative_p (arg0))
6943 return arg0;
6945 /* Strip sign ops from argument. */
6946 if (TREE_CODE (type) == REAL_TYPE)
6948 tem = fold_strip_sign_ops (arg0);
6949 if (tem)
6950 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6952 return NULL_TREE;
6954 case CONJ_EXPR:
6955 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6956 return fold_convert (type, arg0);
6957 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6958 return build2 (COMPLEX_EXPR, type,
6959 TREE_OPERAND (arg0, 0),
6960 negate_expr (TREE_OPERAND (arg0, 1)));
6961 else if (TREE_CODE (arg0) == COMPLEX_CST)
6962 return build_complex (type, TREE_REALPART (arg0),
6963 negate_expr (TREE_IMAGPART (arg0)));
6964 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6965 return fold_build2 (TREE_CODE (arg0), type,
6966 fold_build1 (CONJ_EXPR, type,
6967 TREE_OPERAND (arg0, 0)),
6968 fold_build1 (CONJ_EXPR, type,
6969 TREE_OPERAND (arg0, 1)));
6970 else if (TREE_CODE (arg0) == CONJ_EXPR)
6971 return TREE_OPERAND (arg0, 0);
6972 return NULL_TREE;
6974 case BIT_NOT_EXPR:
6975 if (TREE_CODE (arg0) == INTEGER_CST)
6976 return fold_not_const (arg0, type);
6977 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6978 return TREE_OPERAND (arg0, 0);
6979 /* Convert ~ (-A) to A - 1. */
6980 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6981 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6982 build_int_cst (type, 1));
6983 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6984 else if (INTEGRAL_TYPE_P (type)
6985 && ((TREE_CODE (arg0) == MINUS_EXPR
6986 && integer_onep (TREE_OPERAND (arg0, 1)))
6987 || (TREE_CODE (arg0) == PLUS_EXPR
6988 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6989 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6990 return NULL_TREE;
6992 case TRUTH_NOT_EXPR:
6993 /* The argument to invert_truthvalue must have Boolean type. */
6994 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6995 arg0 = fold_convert (boolean_type_node, arg0);
6997 /* Note that the operand of this must be an int
6998 and its values must be 0 or 1.
6999 ("true" is a fixed value perhaps depending on the language,
7000 but we don't handle values other than 1 correctly yet.) */
7001 tem = invert_truthvalue (arg0);
7002 /* Avoid infinite recursion. */
7003 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7004 return NULL_TREE;
7005 return fold_convert (type, tem);
7007 case REALPART_EXPR:
7008 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7009 return NULL_TREE;
7010 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7011 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7012 TREE_OPERAND (arg0, 1));
7013 else if (TREE_CODE (arg0) == COMPLEX_CST)
7014 return TREE_REALPART (arg0);
7015 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7016 return fold_build2 (TREE_CODE (arg0), type,
7017 fold_build1 (REALPART_EXPR, type,
7018 TREE_OPERAND (arg0, 0)),
7019 fold_build1 (REALPART_EXPR, type,
7020 TREE_OPERAND (arg0, 1)));
7021 return NULL_TREE;
7023 case IMAGPART_EXPR:
7024 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7025 return fold_convert (type, integer_zero_node);
7026 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7027 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7028 TREE_OPERAND (arg0, 0));
7029 else if (TREE_CODE (arg0) == COMPLEX_CST)
7030 return TREE_IMAGPART (arg0);
7031 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7032 return fold_build2 (TREE_CODE (arg0), type,
7033 fold_build1 (IMAGPART_EXPR, type,
7034 TREE_OPERAND (arg0, 0)),
7035 fold_build1 (IMAGPART_EXPR, type,
7036 TREE_OPERAND (arg0, 1)));
7037 return NULL_TREE;
7039 default:
7040 return NULL_TREE;
7041 } /* switch (code) */
7044 /* Fold a binary expression of code CODE and type TYPE with operands
7045 OP0 and OP1. Return the folded expression if folding is
7046 successful. Otherwise, return NULL_TREE. */
7048 static tree
7049 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7051 tree t1 = NULL_TREE;
7052 tree tem;
7053 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7054 enum tree_code_class kind = TREE_CODE_CLASS (code);
7056 /* WINS will be nonzero when the switch is done
7057 if all operands are constant. */
7058 int wins = 1;
7060 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7061 && TREE_CODE_LENGTH (code) == 2);
7063 arg0 = op0;
7064 arg1 = op1;
7066 if (arg0)
7068 tree subop;
7070 /* Strip any conversions that don't change the mode. This is
7071 safe for every expression, except for a comparison expression
7072 because its signedness is derived from its operands. So, in
7073 the latter case, only strip conversions that don't change the
7074 signedness.
7076 Note that this is done as an internal manipulation within the
7077 constant folder, in order to find the simplest representation
7078 of the arguments so that their form can be studied. In any
7079 cases, the appropriate type conversions should be put back in
7080 the tree that will get out of the constant folder. */
7081 if (kind == tcc_comparison)
7082 STRIP_SIGN_NOPS (arg0);
7083 else
7084 STRIP_NOPS (arg0);
7086 if (TREE_CODE (arg0) == COMPLEX_CST)
7087 subop = TREE_REALPART (arg0);
7088 else
7089 subop = arg0;
7091 if (TREE_CODE (subop) != INTEGER_CST
7092 && TREE_CODE (subop) != REAL_CST)
7093 /* Note that TREE_CONSTANT isn't enough:
7094 static var addresses are constant but we can't
7095 do arithmetic on them. */
7096 wins = 0;
7099 if (arg1)
7101 tree subop;
7103 /* Strip any conversions that don't change the mode. This is
7104 safe for every expression, except for a comparison expression
7105 because its signedness is derived from its operands. So, in
7106 the latter case, only strip conversions that don't change the
7107 signedness.
7109 Note that this is done as an internal manipulation within the
7110 constant folder, in order to find the simplest representation
7111 of the arguments so that their form can be studied. In any
7112 cases, the appropriate type conversions should be put back in
7113 the tree that will get out of the constant folder. */
7114 if (kind == tcc_comparison)
7115 STRIP_SIGN_NOPS (arg1);
7116 else
7117 STRIP_NOPS (arg1);
7119 if (TREE_CODE (arg1) == COMPLEX_CST)
7120 subop = TREE_REALPART (arg1);
7121 else
7122 subop = arg1;
7124 if (TREE_CODE (subop) != INTEGER_CST
7125 && TREE_CODE (subop) != REAL_CST)
7126 /* Note that TREE_CONSTANT isn't enough:
7127 static var addresses are constant but we can't
7128 do arithmetic on them. */
7129 wins = 0;
7132 /* If this is a commutative operation, and ARG0 is a constant, move it
7133 to ARG1 to reduce the number of tests below. */
7134 if (commutative_tree_code (code)
7135 && tree_swap_operands_p (arg0, arg1, true))
7136 return fold_build2 (code, type, op1, op0);
7138 /* Now WINS is set as described above,
7139 ARG0 is the first operand of EXPR,
7140 and ARG1 is the second operand (if it has more than one operand).
7142 First check for cases where an arithmetic operation is applied to a
7143 compound, conditional, or comparison operation. Push the arithmetic
7144 operation inside the compound or conditional to see if any folding
7145 can then be done. Convert comparison to conditional for this purpose.
7146 The also optimizes non-constant cases that used to be done in
7147 expand_expr.
7149 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7150 one of the operands is a comparison and the other is a comparison, a
7151 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7152 code below would make the expression more complex. Change it to a
7153 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7154 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7156 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7157 || code == EQ_EXPR || code == NE_EXPR)
7158 && ((truth_value_p (TREE_CODE (arg0))
7159 && (truth_value_p (TREE_CODE (arg1))
7160 || (TREE_CODE (arg1) == BIT_AND_EXPR
7161 && integer_onep (TREE_OPERAND (arg1, 1)))))
7162 || (truth_value_p (TREE_CODE (arg1))
7163 && (truth_value_p (TREE_CODE (arg0))
7164 || (TREE_CODE (arg0) == BIT_AND_EXPR
7165 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7167 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7168 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7169 : TRUTH_XOR_EXPR,
7170 boolean_type_node,
7171 fold_convert (boolean_type_node, arg0),
7172 fold_convert (boolean_type_node, arg1));
7174 if (code == EQ_EXPR)
7175 tem = invert_truthvalue (tem);
7177 return fold_convert (type, tem);
7180 if (TREE_CODE_CLASS (code) == tcc_comparison
7181 && TREE_CODE (arg0) == COMPOUND_EXPR)
7182 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7183 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7184 else if (TREE_CODE_CLASS (code) == tcc_comparison
7185 && TREE_CODE (arg1) == COMPOUND_EXPR)
7186 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7187 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7188 else if (TREE_CODE_CLASS (code) == tcc_binary
7189 || TREE_CODE_CLASS (code) == tcc_comparison)
7191 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7192 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7193 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7194 arg1));
7195 if (TREE_CODE (arg1) == COMPOUND_EXPR
7196 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7197 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7198 fold_build2 (code, type,
7199 arg0, TREE_OPERAND (arg1, 1)));
7201 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7203 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7204 arg0, arg1,
7205 /*cond_first_p=*/1);
7206 if (tem != NULL_TREE)
7207 return tem;
7210 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7212 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7213 arg1, arg0,
7214 /*cond_first_p=*/0);
7215 if (tem != NULL_TREE)
7216 return tem;
7220 switch (code)
7222 case PLUS_EXPR:
7223 /* A + (-B) -> A - B */
7224 if (TREE_CODE (arg1) == NEGATE_EXPR)
7225 return fold_build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7226 /* (-A) + B -> B - A */
7227 if (TREE_CODE (arg0) == NEGATE_EXPR
7228 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7229 return fold_build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0));
7230 /* Convert ~A + 1 to -A. */
7231 if (INTEGRAL_TYPE_P (type)
7232 && TREE_CODE (arg0) == BIT_NOT_EXPR
7233 && integer_onep (arg1))
7234 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7236 if (TREE_CODE (type) == COMPLEX_TYPE)
7238 tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
7239 if (tem)
7240 return tem;
7243 if (! FLOAT_TYPE_P (type))
7245 if (integer_zerop (arg1))
7246 return non_lvalue (fold_convert (type, arg0));
7248 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7249 with a constant, and the two constants have no bits in common,
7250 we should treat this as a BIT_IOR_EXPR since this may produce more
7251 simplifications. */
7252 if (TREE_CODE (arg0) == BIT_AND_EXPR
7253 && TREE_CODE (arg1) == BIT_AND_EXPR
7254 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7255 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7256 && integer_zerop (const_binop (BIT_AND_EXPR,
7257 TREE_OPERAND (arg0, 1),
7258 TREE_OPERAND (arg1, 1), 0)))
7260 code = BIT_IOR_EXPR;
7261 goto bit_ior;
7264 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7265 (plus (plus (mult) (mult)) (foo)) so that we can
7266 take advantage of the factoring cases below. */
7267 if (((TREE_CODE (arg0) == PLUS_EXPR
7268 || TREE_CODE (arg0) == MINUS_EXPR)
7269 && TREE_CODE (arg1) == MULT_EXPR)
7270 || ((TREE_CODE (arg1) == PLUS_EXPR
7271 || TREE_CODE (arg1) == MINUS_EXPR)
7272 && TREE_CODE (arg0) == MULT_EXPR))
7274 tree parg0, parg1, parg, marg;
7275 enum tree_code pcode;
7277 if (TREE_CODE (arg1) == MULT_EXPR)
7278 parg = arg0, marg = arg1;
7279 else
7280 parg = arg1, marg = arg0;
7281 pcode = TREE_CODE (parg);
7282 parg0 = TREE_OPERAND (parg, 0);
7283 parg1 = TREE_OPERAND (parg, 1);
7284 STRIP_NOPS (parg0);
7285 STRIP_NOPS (parg1);
7287 if (TREE_CODE (parg0) == MULT_EXPR
7288 && TREE_CODE (parg1) != MULT_EXPR)
7289 return fold_build2 (pcode, type,
7290 fold_build2 (PLUS_EXPR, type,
7291 fold_convert (type, parg0),
7292 fold_convert (type, marg)),
7293 fold_convert (type, parg1));
7294 if (TREE_CODE (parg0) != MULT_EXPR
7295 && TREE_CODE (parg1) == MULT_EXPR)
7296 return fold_build2 (PLUS_EXPR, type,
7297 fold_convert (type, parg0),
7298 fold_build2 (pcode, type,
7299 fold_convert (type, marg),
7300 fold_convert (type,
7301 parg1)));
7304 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7306 tree arg00, arg01, arg10, arg11;
7307 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7309 /* (A * C) + (B * C) -> (A+B) * C.
7310 We are most concerned about the case where C is a constant,
7311 but other combinations show up during loop reduction. Since
7312 it is not difficult, try all four possibilities. */
7314 arg00 = TREE_OPERAND (arg0, 0);
7315 arg01 = TREE_OPERAND (arg0, 1);
7316 arg10 = TREE_OPERAND (arg1, 0);
7317 arg11 = TREE_OPERAND (arg1, 1);
7318 same = NULL_TREE;
7320 if (operand_equal_p (arg01, arg11, 0))
7321 same = arg01, alt0 = arg00, alt1 = arg10;
7322 else if (operand_equal_p (arg00, arg10, 0))
7323 same = arg00, alt0 = arg01, alt1 = arg11;
7324 else if (operand_equal_p (arg00, arg11, 0))
7325 same = arg00, alt0 = arg01, alt1 = arg10;
7326 else if (operand_equal_p (arg01, arg10, 0))
7327 same = arg01, alt0 = arg00, alt1 = arg11;
7329 /* No identical multiplicands; see if we can find a common
7330 power-of-two factor in non-power-of-two multiplies. This
7331 can help in multi-dimensional array access. */
7332 else if (TREE_CODE (arg01) == INTEGER_CST
7333 && TREE_CODE (arg11) == INTEGER_CST
7334 && TREE_INT_CST_HIGH (arg01) == 0
7335 && TREE_INT_CST_HIGH (arg11) == 0)
7337 HOST_WIDE_INT int01, int11, tmp;
7338 int01 = TREE_INT_CST_LOW (arg01);
7339 int11 = TREE_INT_CST_LOW (arg11);
7341 /* Move min of absolute values to int11. */
7342 if ((int01 >= 0 ? int01 : -int01)
7343 < (int11 >= 0 ? int11 : -int11))
7345 tmp = int01, int01 = int11, int11 = tmp;
7346 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7347 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7350 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7352 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7353 build_int_cst (NULL_TREE,
7354 int01 / int11));
7355 alt1 = arg10;
7356 same = arg11;
7360 if (same)
7361 return fold_build2 (MULT_EXPR, type,
7362 fold_build2 (PLUS_EXPR, type,
7363 fold_convert (type, alt0),
7364 fold_convert (type, alt1)),
7365 same);
7368 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7369 of the array. Loop optimizer sometimes produce this type of
7370 expressions. */
7371 if (TREE_CODE (arg0) == ADDR_EXPR
7372 && TREE_CODE (arg1) == MULT_EXPR)
7374 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7375 if (tem)
7376 return fold_convert (type, fold (tem));
7378 else if (TREE_CODE (arg1) == ADDR_EXPR
7379 && TREE_CODE (arg0) == MULT_EXPR)
7381 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7382 if (tem)
7383 return fold_convert (type, fold (tem));
7386 else
7388 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7389 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7390 return non_lvalue (fold_convert (type, arg0));
7392 /* Likewise if the operands are reversed. */
7393 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7394 return non_lvalue (fold_convert (type, arg1));
7396 /* Convert X + -C into X - C. */
7397 if (TREE_CODE (arg1) == REAL_CST
7398 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7400 tem = fold_negate_const (arg1, type);
7401 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7402 return fold_build2 (MINUS_EXPR, type,
7403 fold_convert (type, arg0),
7404 fold_convert (type, tem));
7407 /* Convert x+x into x*2.0. */
7408 if (operand_equal_p (arg0, arg1, 0)
7409 && SCALAR_FLOAT_TYPE_P (type))
7410 return fold_build2 (MULT_EXPR, type, arg0,
7411 build_real (type, dconst2));
7413 /* Convert x*c+x into x*(c+1). */
7414 if (flag_unsafe_math_optimizations
7415 && TREE_CODE (arg0) == MULT_EXPR
7416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7417 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7420 REAL_VALUE_TYPE c;
7422 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7423 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7424 return fold_build2 (MULT_EXPR, type, arg1,
7425 build_real (type, c));
7428 /* Convert x+x*c into x*(c+1). */
7429 if (flag_unsafe_math_optimizations
7430 && TREE_CODE (arg1) == MULT_EXPR
7431 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7432 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7433 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7435 REAL_VALUE_TYPE c;
7437 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7438 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7439 return fold_build2 (MULT_EXPR, type, arg0,
7440 build_real (type, c));
7443 /* Convert x*c1+x*c2 into x*(c1+c2). */
7444 if (flag_unsafe_math_optimizations
7445 && TREE_CODE (arg0) == MULT_EXPR
7446 && TREE_CODE (arg1) == MULT_EXPR
7447 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7448 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7449 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7450 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7451 && operand_equal_p (TREE_OPERAND (arg0, 0),
7452 TREE_OPERAND (arg1, 0), 0))
7454 REAL_VALUE_TYPE c1, c2;
7456 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7457 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7458 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7459 return fold_build2 (MULT_EXPR, type,
7460 TREE_OPERAND (arg0, 0),
7461 build_real (type, c1));
7463 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7464 if (flag_unsafe_math_optimizations
7465 && TREE_CODE (arg1) == PLUS_EXPR
7466 && TREE_CODE (arg0) != MULT_EXPR)
7468 tree tree10 = TREE_OPERAND (arg1, 0);
7469 tree tree11 = TREE_OPERAND (arg1, 1);
7470 if (TREE_CODE (tree11) == MULT_EXPR
7471 && TREE_CODE (tree10) == MULT_EXPR)
7473 tree tree0;
7474 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7475 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7478 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7479 if (flag_unsafe_math_optimizations
7480 && TREE_CODE (arg0) == PLUS_EXPR
7481 && TREE_CODE (arg1) != MULT_EXPR)
7483 tree tree00 = TREE_OPERAND (arg0, 0);
7484 tree tree01 = TREE_OPERAND (arg0, 1);
7485 if (TREE_CODE (tree01) == MULT_EXPR
7486 && TREE_CODE (tree00) == MULT_EXPR)
7488 tree tree0;
7489 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7490 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7495 bit_rotate:
7496 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7497 is a rotate of A by C1 bits. */
7498 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7499 is a rotate of A by B bits. */
7501 enum tree_code code0, code1;
7502 code0 = TREE_CODE (arg0);
7503 code1 = TREE_CODE (arg1);
7504 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7505 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7506 && operand_equal_p (TREE_OPERAND (arg0, 0),
7507 TREE_OPERAND (arg1, 0), 0)
7508 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7510 tree tree01, tree11;
7511 enum tree_code code01, code11;
7513 tree01 = TREE_OPERAND (arg0, 1);
7514 tree11 = TREE_OPERAND (arg1, 1);
7515 STRIP_NOPS (tree01);
7516 STRIP_NOPS (tree11);
7517 code01 = TREE_CODE (tree01);
7518 code11 = TREE_CODE (tree11);
7519 if (code01 == INTEGER_CST
7520 && code11 == INTEGER_CST
7521 && TREE_INT_CST_HIGH (tree01) == 0
7522 && TREE_INT_CST_HIGH (tree11) == 0
7523 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7524 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7525 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7526 code0 == LSHIFT_EXPR ? tree01 : tree11);
7527 else if (code11 == MINUS_EXPR)
7529 tree tree110, tree111;
7530 tree110 = TREE_OPERAND (tree11, 0);
7531 tree111 = TREE_OPERAND (tree11, 1);
7532 STRIP_NOPS (tree110);
7533 STRIP_NOPS (tree111);
7534 if (TREE_CODE (tree110) == INTEGER_CST
7535 && 0 == compare_tree_int (tree110,
7536 TYPE_PRECISION
7537 (TREE_TYPE (TREE_OPERAND
7538 (arg0, 0))))
7539 && operand_equal_p (tree01, tree111, 0))
7540 return build2 ((code0 == LSHIFT_EXPR
7541 ? LROTATE_EXPR
7542 : RROTATE_EXPR),
7543 type, TREE_OPERAND (arg0, 0), tree01);
7545 else if (code01 == MINUS_EXPR)
7547 tree tree010, tree011;
7548 tree010 = TREE_OPERAND (tree01, 0);
7549 tree011 = TREE_OPERAND (tree01, 1);
7550 STRIP_NOPS (tree010);
7551 STRIP_NOPS (tree011);
7552 if (TREE_CODE (tree010) == INTEGER_CST
7553 && 0 == compare_tree_int (tree010,
7554 TYPE_PRECISION
7555 (TREE_TYPE (TREE_OPERAND
7556 (arg0, 0))))
7557 && operand_equal_p (tree11, tree011, 0))
7558 return build2 ((code0 != LSHIFT_EXPR
7559 ? LROTATE_EXPR
7560 : RROTATE_EXPR),
7561 type, TREE_OPERAND (arg0, 0), tree11);
7566 associate:
7567 /* In most languages, can't associate operations on floats through
7568 parentheses. Rather than remember where the parentheses were, we
7569 don't associate floats at all, unless the user has specified
7570 -funsafe-math-optimizations. */
7572 if (! wins
7573 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7575 tree var0, con0, lit0, minus_lit0;
7576 tree var1, con1, lit1, minus_lit1;
7578 /* Split both trees into variables, constants, and literals. Then
7579 associate each group together, the constants with literals,
7580 then the result with variables. This increases the chances of
7581 literals being recombined later and of generating relocatable
7582 expressions for the sum of a constant and literal. */
7583 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7584 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7585 code == MINUS_EXPR);
7587 /* Only do something if we found more than two objects. Otherwise,
7588 nothing has changed and we risk infinite recursion. */
7589 if (2 < ((var0 != 0) + (var1 != 0)
7590 + (con0 != 0) + (con1 != 0)
7591 + (lit0 != 0) + (lit1 != 0)
7592 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7594 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7595 if (code == MINUS_EXPR)
7596 code = PLUS_EXPR;
7598 var0 = associate_trees (var0, var1, code, type);
7599 con0 = associate_trees (con0, con1, code, type);
7600 lit0 = associate_trees (lit0, lit1, code, type);
7601 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7603 /* Preserve the MINUS_EXPR if the negative part of the literal is
7604 greater than the positive part. Otherwise, the multiplicative
7605 folding code (i.e extract_muldiv) may be fooled in case
7606 unsigned constants are subtracted, like in the following
7607 example: ((X*2 + 4) - 8U)/2. */
7608 if (minus_lit0 && lit0)
7610 if (TREE_CODE (lit0) == INTEGER_CST
7611 && TREE_CODE (minus_lit0) == INTEGER_CST
7612 && tree_int_cst_lt (lit0, minus_lit0))
7614 minus_lit0 = associate_trees (minus_lit0, lit0,
7615 MINUS_EXPR, type);
7616 lit0 = 0;
7618 else
7620 lit0 = associate_trees (lit0, minus_lit0,
7621 MINUS_EXPR, type);
7622 minus_lit0 = 0;
7625 if (minus_lit0)
7627 if (con0 == 0)
7628 return fold_convert (type,
7629 associate_trees (var0, minus_lit0,
7630 MINUS_EXPR, type));
7631 else
7633 con0 = associate_trees (con0, minus_lit0,
7634 MINUS_EXPR, type);
7635 return fold_convert (type,
7636 associate_trees (var0, con0,
7637 PLUS_EXPR, type));
7641 con0 = associate_trees (con0, lit0, code, type);
7642 return fold_convert (type, associate_trees (var0, con0,
7643 code, type));
7647 binary:
7648 if (wins)
7649 t1 = const_binop (code, arg0, arg1, 0);
7650 if (t1 != NULL_TREE)
7652 /* The return value should always have
7653 the same type as the original expression. */
7654 if (TREE_TYPE (t1) != type)
7655 t1 = fold_convert (type, t1);
7657 return t1;
7659 return NULL_TREE;
7661 case MINUS_EXPR:
7662 /* A - (-B) -> A + B */
7663 if (TREE_CODE (arg1) == NEGATE_EXPR)
7664 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7665 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7666 if (TREE_CODE (arg0) == NEGATE_EXPR
7667 && (FLOAT_TYPE_P (type)
7668 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7669 && negate_expr_p (arg1)
7670 && reorder_operands_p (arg0, arg1))
7671 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7672 TREE_OPERAND (arg0, 0));
7673 /* Convert -A - 1 to ~A. */
7674 if (INTEGRAL_TYPE_P (type)
7675 && TREE_CODE (arg0) == NEGATE_EXPR
7676 && integer_onep (arg1))
7677 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7679 /* Convert -1 - A to ~A. */
7680 if (INTEGRAL_TYPE_P (type)
7681 && integer_all_onesp (arg0))
7682 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7684 if (TREE_CODE (type) == COMPLEX_TYPE)
7686 tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
7687 if (tem)
7688 return tem;
7691 if (! FLOAT_TYPE_P (type))
7693 if (! wins && integer_zerop (arg0))
7694 return negate_expr (fold_convert (type, arg1));
7695 if (integer_zerop (arg1))
7696 return non_lvalue (fold_convert (type, arg0));
7698 /* Fold A - (A & B) into ~B & A. */
7699 if (!TREE_SIDE_EFFECTS (arg0)
7700 && TREE_CODE (arg1) == BIT_AND_EXPR)
7702 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7703 return fold_build2 (BIT_AND_EXPR, type,
7704 fold_build1 (BIT_NOT_EXPR, type,
7705 TREE_OPERAND (arg1, 0)),
7706 arg0);
7707 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7708 return fold_build2 (BIT_AND_EXPR, type,
7709 fold_build1 (BIT_NOT_EXPR, type,
7710 TREE_OPERAND (arg1, 1)),
7711 arg0);
7714 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7715 any power of 2 minus 1. */
7716 if (TREE_CODE (arg0) == BIT_AND_EXPR
7717 && TREE_CODE (arg1) == BIT_AND_EXPR
7718 && operand_equal_p (TREE_OPERAND (arg0, 0),
7719 TREE_OPERAND (arg1, 0), 0))
7721 tree mask0 = TREE_OPERAND (arg0, 1);
7722 tree mask1 = TREE_OPERAND (arg1, 1);
7723 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7725 if (operand_equal_p (tem, mask1, 0))
7727 tem = fold_build2 (BIT_XOR_EXPR, type,
7728 TREE_OPERAND (arg0, 0), mask1);
7729 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7734 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7735 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7736 return non_lvalue (fold_convert (type, arg0));
7738 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7739 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7740 (-ARG1 + ARG0) reduces to -ARG1. */
7741 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7742 return negate_expr (fold_convert (type, arg1));
7744 /* Fold &x - &x. This can happen from &x.foo - &x.
7745 This is unsafe for certain floats even in non-IEEE formats.
7746 In IEEE, it is unsafe because it does wrong for NaNs.
7747 Also note that operand_equal_p is always false if an operand
7748 is volatile. */
7750 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7751 && operand_equal_p (arg0, arg1, 0))
7752 return fold_convert (type, integer_zero_node);
7754 /* A - B -> A + (-B) if B is easily negatable. */
7755 if (!wins && negate_expr_p (arg1)
7756 && ((FLOAT_TYPE_P (type)
7757 /* Avoid this transformation if B is a positive REAL_CST. */
7758 && (TREE_CODE (arg1) != REAL_CST
7759 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7760 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7761 return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1));
7763 /* Try folding difference of addresses. */
7765 HOST_WIDE_INT diff;
7767 if ((TREE_CODE (arg0) == ADDR_EXPR
7768 || TREE_CODE (arg1) == ADDR_EXPR)
7769 && ptr_difference_const (arg0, arg1, &diff))
7770 return build_int_cst_type (type, diff);
7773 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7774 of the array. Loop optimizer sometimes produce this type of
7775 expressions. */
7776 if (TREE_CODE (arg0) == ADDR_EXPR
7777 && TREE_CODE (arg1) == MULT_EXPR)
7779 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7780 if (tem)
7781 return fold_convert (type, fold (tem));
7784 if (TREE_CODE (arg0) == MULT_EXPR
7785 && TREE_CODE (arg1) == MULT_EXPR
7786 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7788 /* (A * C) - (B * C) -> (A-B) * C. */
7789 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7790 TREE_OPERAND (arg1, 1), 0))
7791 return fold_build2 (MULT_EXPR, type,
7792 fold_build2 (MINUS_EXPR, type,
7793 TREE_OPERAND (arg0, 0),
7794 TREE_OPERAND (arg1, 0)),
7795 TREE_OPERAND (arg0, 1));
7796 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7797 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7798 TREE_OPERAND (arg1, 0), 0))
7799 return fold_build2 (MULT_EXPR, type,
7800 TREE_OPERAND (arg0, 0),
7801 fold_build2 (MINUS_EXPR, type,
7802 TREE_OPERAND (arg0, 1),
7803 TREE_OPERAND (arg1, 1)));
7806 goto associate;
7808 case MULT_EXPR:
7809 /* (-A) * (-B) -> A * B */
7810 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7811 return fold_build2 (MULT_EXPR, type,
7812 TREE_OPERAND (arg0, 0),
7813 negate_expr (arg1));
7814 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7815 return fold_build2 (MULT_EXPR, type,
7816 negate_expr (arg0),
7817 TREE_OPERAND (arg1, 0));
7819 if (TREE_CODE (type) == COMPLEX_TYPE)
7821 tem = fold_complex_mult (type, arg0, arg1);
7822 if (tem)
7823 return tem;
7826 if (! FLOAT_TYPE_P (type))
7828 if (integer_zerop (arg1))
7829 return omit_one_operand (type, arg1, arg0);
7830 if (integer_onep (arg1))
7831 return non_lvalue (fold_convert (type, arg0));
7832 /* Transform x * -1 into -x. */
7833 if (integer_all_onesp (arg1))
7834 return fold_convert (type, negate_expr (arg0));
7836 /* (a * (1 << b)) is (a << b) */
7837 if (TREE_CODE (arg1) == LSHIFT_EXPR
7838 && integer_onep (TREE_OPERAND (arg1, 0)))
7839 return fold_build2 (LSHIFT_EXPR, type, arg0,
7840 TREE_OPERAND (arg1, 1));
7841 if (TREE_CODE (arg0) == LSHIFT_EXPR
7842 && integer_onep (TREE_OPERAND (arg0, 0)))
7843 return fold_build2 (LSHIFT_EXPR, type, arg1,
7844 TREE_OPERAND (arg0, 1));
7846 if (TREE_CODE (arg1) == INTEGER_CST
7847 && 0 != (tem = extract_muldiv (op0,
7848 fold_convert (type, arg1),
7849 code, NULL_TREE)))
7850 return fold_convert (type, tem);
7853 else
7855 /* Maybe fold x * 0 to 0. The expressions aren't the same
7856 when x is NaN, since x * 0 is also NaN. Nor are they the
7857 same in modes with signed zeros, since multiplying a
7858 negative value by 0 gives -0, not +0. */
7859 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7860 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7861 && real_zerop (arg1))
7862 return omit_one_operand (type, arg1, arg0);
7863 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7864 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7865 && real_onep (arg1))
7866 return non_lvalue (fold_convert (type, arg0));
7868 /* Transform x * -1.0 into -x. */
7869 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7870 && real_minus_onep (arg1))
7871 return fold_convert (type, negate_expr (arg0));
7873 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7874 if (flag_unsafe_math_optimizations
7875 && TREE_CODE (arg0) == RDIV_EXPR
7876 && TREE_CODE (arg1) == REAL_CST
7877 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7879 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7880 arg1, 0);
7881 if (tem)
7882 return fold_build2 (RDIV_EXPR, type, tem,
7883 TREE_OPERAND (arg0, 1));
7886 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7887 if (operand_equal_p (arg0, arg1, 0))
7889 tree tem = fold_strip_sign_ops (arg0);
7890 if (tem != NULL_TREE)
7892 tem = fold_convert (type, tem);
7893 return fold_build2 (MULT_EXPR, type, tem, tem);
7897 if (flag_unsafe_math_optimizations)
7899 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7900 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7902 /* Optimizations of root(...)*root(...). */
7903 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7905 tree rootfn, arg, arglist;
7906 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7907 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7909 /* Optimize sqrt(x)*sqrt(x) as x. */
7910 if (BUILTIN_SQRT_P (fcode0)
7911 && operand_equal_p (arg00, arg10, 0)
7912 && ! HONOR_SNANS (TYPE_MODE (type)))
7913 return arg00;
7915 /* Optimize root(x)*root(y) as root(x*y). */
7916 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7917 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7918 arglist = build_tree_list (NULL_TREE, arg);
7919 return build_function_call_expr (rootfn, arglist);
7922 /* Optimize expN(x)*expN(y) as expN(x+y). */
7923 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7925 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7926 tree arg = build2 (PLUS_EXPR, type,
7927 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7928 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7929 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7930 return build_function_call_expr (expfn, arglist);
7933 /* Optimizations of pow(...)*pow(...). */
7934 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7935 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7936 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7938 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7939 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7940 1)));
7941 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7942 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7943 1)));
7945 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7946 if (operand_equal_p (arg01, arg11, 0))
7948 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7949 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7950 tree arglist = tree_cons (NULL_TREE, fold (arg),
7951 build_tree_list (NULL_TREE,
7952 arg01));
7953 return build_function_call_expr (powfn, arglist);
7956 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7957 if (operand_equal_p (arg00, arg10, 0))
7959 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7960 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7961 tree arglist = tree_cons (NULL_TREE, arg00,
7962 build_tree_list (NULL_TREE,
7963 arg));
7964 return build_function_call_expr (powfn, arglist);
7968 /* Optimize tan(x)*cos(x) as sin(x). */
7969 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7970 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7971 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7972 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7973 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7974 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7975 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7976 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7978 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7980 if (sinfn != NULL_TREE)
7981 return build_function_call_expr (sinfn,
7982 TREE_OPERAND (arg0, 1));
7985 /* Optimize x*pow(x,c) as pow(x,c+1). */
7986 if (fcode1 == BUILT_IN_POW
7987 || fcode1 == BUILT_IN_POWF
7988 || fcode1 == BUILT_IN_POWL)
7990 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7991 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7992 1)));
7993 if (TREE_CODE (arg11) == REAL_CST
7994 && ! TREE_CONSTANT_OVERFLOW (arg11)
7995 && operand_equal_p (arg0, arg10, 0))
7997 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7998 REAL_VALUE_TYPE c;
7999 tree arg, arglist;
8001 c = TREE_REAL_CST (arg11);
8002 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8003 arg = build_real (type, c);
8004 arglist = build_tree_list (NULL_TREE, arg);
8005 arglist = tree_cons (NULL_TREE, arg0, arglist);
8006 return build_function_call_expr (powfn, arglist);
8010 /* Optimize pow(x,c)*x as pow(x,c+1). */
8011 if (fcode0 == BUILT_IN_POW
8012 || fcode0 == BUILT_IN_POWF
8013 || fcode0 == BUILT_IN_POWL)
8015 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8016 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8017 1)));
8018 if (TREE_CODE (arg01) == REAL_CST
8019 && ! TREE_CONSTANT_OVERFLOW (arg01)
8020 && operand_equal_p (arg1, arg00, 0))
8022 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8023 REAL_VALUE_TYPE c;
8024 tree arg, arglist;
8026 c = TREE_REAL_CST (arg01);
8027 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8028 arg = build_real (type, c);
8029 arglist = build_tree_list (NULL_TREE, arg);
8030 arglist = tree_cons (NULL_TREE, arg1, arglist);
8031 return build_function_call_expr (powfn, arglist);
8035 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8036 if (! optimize_size
8037 && operand_equal_p (arg0, arg1, 0))
8039 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8041 if (powfn)
8043 tree arg = build_real (type, dconst2);
8044 tree arglist = build_tree_list (NULL_TREE, arg);
8045 arglist = tree_cons (NULL_TREE, arg0, arglist);
8046 return build_function_call_expr (powfn, arglist);
8051 goto associate;
8053 case BIT_IOR_EXPR:
8054 bit_ior:
8055 if (integer_all_onesp (arg1))
8056 return omit_one_operand (type, arg1, arg0);
8057 if (integer_zerop (arg1))
8058 return non_lvalue (fold_convert (type, arg0));
8059 if (operand_equal_p (arg0, arg1, 0))
8060 return non_lvalue (fold_convert (type, arg0));
8062 /* ~X | X is -1. */
8063 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8064 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8066 t1 = build_int_cst (type, -1);
8067 t1 = force_fit_type (t1, 0, false, false);
8068 return omit_one_operand (type, t1, arg1);
8071 /* X | ~X is -1. */
8072 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8073 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8075 t1 = build_int_cst (type, -1);
8076 t1 = force_fit_type (t1, 0, false, false);
8077 return omit_one_operand (type, t1, arg0);
8080 t1 = distribute_bit_expr (code, type, arg0, arg1);
8081 if (t1 != NULL_TREE)
8082 return t1;
8084 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8086 This results in more efficient code for machines without a NAND
8087 instruction. Combine will canonicalize to the first form
8088 which will allow use of NAND instructions provided by the
8089 backend if they exist. */
8090 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8091 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8093 return fold_build1 (BIT_NOT_EXPR, type,
8094 build2 (BIT_AND_EXPR, type,
8095 TREE_OPERAND (arg0, 0),
8096 TREE_OPERAND (arg1, 0)));
8099 /* See if this can be simplified into a rotate first. If that
8100 is unsuccessful continue in the association code. */
8101 goto bit_rotate;
8103 case BIT_XOR_EXPR:
8104 if (integer_zerop (arg1))
8105 return non_lvalue (fold_convert (type, arg0));
8106 if (integer_all_onesp (arg1))
8107 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8108 if (operand_equal_p (arg0, arg1, 0))
8109 return omit_one_operand (type, integer_zero_node, arg0);
8111 /* ~X ^ X is -1. */
8112 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8113 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8115 t1 = build_int_cst (type, -1);
8116 t1 = force_fit_type (t1, 0, false, false);
8117 return omit_one_operand (type, t1, arg1);
8120 /* X ^ ~X is -1. */
8121 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8122 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8124 t1 = build_int_cst (type, -1);
8125 t1 = force_fit_type (t1, 0, false, false);
8126 return omit_one_operand (type, t1, arg0);
8129 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8130 with a constant, and the two constants have no bits in common,
8131 we should treat this as a BIT_IOR_EXPR since this may produce more
8132 simplifications. */
8133 if (TREE_CODE (arg0) == BIT_AND_EXPR
8134 && TREE_CODE (arg1) == BIT_AND_EXPR
8135 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8136 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8137 && integer_zerop (const_binop (BIT_AND_EXPR,
8138 TREE_OPERAND (arg0, 1),
8139 TREE_OPERAND (arg1, 1), 0)))
8141 code = BIT_IOR_EXPR;
8142 goto bit_ior;
8145 /* See if this can be simplified into a rotate first. If that
8146 is unsuccessful continue in the association code. */
8147 goto bit_rotate;
8149 case BIT_AND_EXPR:
8150 if (integer_all_onesp (arg1))
8151 return non_lvalue (fold_convert (type, arg0));
8152 if (integer_zerop (arg1))
8153 return omit_one_operand (type, arg1, arg0);
8154 if (operand_equal_p (arg0, arg1, 0))
8155 return non_lvalue (fold_convert (type, arg0));
8157 /* ~X & X is always zero. */
8158 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8159 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8160 return omit_one_operand (type, integer_zero_node, arg1);
8162 /* X & ~X is always zero. */
8163 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8164 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8165 return omit_one_operand (type, integer_zero_node, arg0);
8167 t1 = distribute_bit_expr (code, type, arg0, arg1);
8168 if (t1 != NULL_TREE)
8169 return t1;
8170 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8171 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8172 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8174 unsigned int prec
8175 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8177 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8178 && (~TREE_INT_CST_LOW (arg1)
8179 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8180 return fold_convert (type, TREE_OPERAND (arg0, 0));
8183 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8185 This results in more efficient code for machines without a NOR
8186 instruction. Combine will canonicalize to the first form
8187 which will allow use of NOR instructions provided by the
8188 backend if they exist. */
8189 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8190 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8192 return fold_build1 (BIT_NOT_EXPR, type,
8193 build2 (BIT_IOR_EXPR, type,
8194 TREE_OPERAND (arg0, 0),
8195 TREE_OPERAND (arg1, 0)));
8198 goto associate;
8200 case RDIV_EXPR:
8201 /* Don't touch a floating-point divide by zero unless the mode
8202 of the constant can represent infinity. */
8203 if (TREE_CODE (arg1) == REAL_CST
8204 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8205 && real_zerop (arg1))
8206 return NULL_TREE;
8208 /* (-A) / (-B) -> A / B */
8209 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8210 return fold_build2 (RDIV_EXPR, type,
8211 TREE_OPERAND (arg0, 0),
8212 negate_expr (arg1));
8213 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8214 return fold_build2 (RDIV_EXPR, type,
8215 negate_expr (arg0),
8216 TREE_OPERAND (arg1, 0));
8218 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8219 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8220 && real_onep (arg1))
8221 return non_lvalue (fold_convert (type, arg0));
8223 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8224 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8225 && real_minus_onep (arg1))
8226 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8228 /* If ARG1 is a constant, we can convert this to a multiply by the
8229 reciprocal. This does not have the same rounding properties,
8230 so only do this if -funsafe-math-optimizations. We can actually
8231 always safely do it if ARG1 is a power of two, but it's hard to
8232 tell if it is or not in a portable manner. */
8233 if (TREE_CODE (arg1) == REAL_CST)
8235 if (flag_unsafe_math_optimizations
8236 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8237 arg1, 0)))
8238 return fold_build2 (MULT_EXPR, type, arg0, tem);
8239 /* Find the reciprocal if optimizing and the result is exact. */
8240 if (optimize)
8242 REAL_VALUE_TYPE r;
8243 r = TREE_REAL_CST (arg1);
8244 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8246 tem = build_real (type, r);
8247 return fold_build2 (MULT_EXPR, type, arg0, tem);
8251 /* Convert A/B/C to A/(B*C). */
8252 if (flag_unsafe_math_optimizations
8253 && TREE_CODE (arg0) == RDIV_EXPR)
8254 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8255 fold_build2 (MULT_EXPR, type,
8256 TREE_OPERAND (arg0, 1), arg1));
8258 /* Convert A/(B/C) to (A/B)*C. */
8259 if (flag_unsafe_math_optimizations
8260 && TREE_CODE (arg1) == RDIV_EXPR)
8261 return fold_build2 (MULT_EXPR, type,
8262 fold_build2 (RDIV_EXPR, type, arg0,
8263 TREE_OPERAND (arg1, 0)),
8264 TREE_OPERAND (arg1, 1));
8266 /* Convert C1/(X*C2) into (C1/C2)/X. */
8267 if (flag_unsafe_math_optimizations
8268 && TREE_CODE (arg1) == MULT_EXPR
8269 && TREE_CODE (arg0) == REAL_CST
8270 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8272 tree tem = const_binop (RDIV_EXPR, arg0,
8273 TREE_OPERAND (arg1, 1), 0);
8274 if (tem)
8275 return fold_build2 (RDIV_EXPR, type, tem,
8276 TREE_OPERAND (arg1, 0));
8279 if (TREE_CODE (type) == COMPLEX_TYPE)
8281 tem = fold_complex_div (type, arg0, arg1, code);
8282 if (tem)
8283 return tem;
8286 if (flag_unsafe_math_optimizations)
8288 enum built_in_function fcode = builtin_mathfn_code (arg1);
8289 /* Optimize x/expN(y) into x*expN(-y). */
8290 if (BUILTIN_EXPONENT_P (fcode))
8292 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8293 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8294 tree arglist = build_tree_list (NULL_TREE,
8295 fold_convert (type, arg));
8296 arg1 = build_function_call_expr (expfn, arglist);
8297 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8300 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8301 if (fcode == BUILT_IN_POW
8302 || fcode == BUILT_IN_POWF
8303 || fcode == BUILT_IN_POWL)
8305 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8306 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8307 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8308 tree neg11 = fold_convert (type, negate_expr (arg11));
8309 tree arglist = tree_cons(NULL_TREE, arg10,
8310 build_tree_list (NULL_TREE, neg11));
8311 arg1 = build_function_call_expr (powfn, arglist);
8312 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8316 if (flag_unsafe_math_optimizations)
8318 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8319 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8321 /* Optimize sin(x)/cos(x) as tan(x). */
8322 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8323 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8324 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8325 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8326 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8328 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8330 if (tanfn != NULL_TREE)
8331 return build_function_call_expr (tanfn,
8332 TREE_OPERAND (arg0, 1));
8335 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8336 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8337 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8338 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8339 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8340 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8342 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8344 if (tanfn != NULL_TREE)
8346 tree tmp = TREE_OPERAND (arg0, 1);
8347 tmp = build_function_call_expr (tanfn, tmp);
8348 return fold_build2 (RDIV_EXPR, type,
8349 build_real (type, dconst1), tmp);
8353 /* Optimize pow(x,c)/x as pow(x,c-1). */
8354 if (fcode0 == BUILT_IN_POW
8355 || fcode0 == BUILT_IN_POWF
8356 || fcode0 == BUILT_IN_POWL)
8358 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8359 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8360 if (TREE_CODE (arg01) == REAL_CST
8361 && ! TREE_CONSTANT_OVERFLOW (arg01)
8362 && operand_equal_p (arg1, arg00, 0))
8364 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8365 REAL_VALUE_TYPE c;
8366 tree arg, arglist;
8368 c = TREE_REAL_CST (arg01);
8369 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8370 arg = build_real (type, c);
8371 arglist = build_tree_list (NULL_TREE, arg);
8372 arglist = tree_cons (NULL_TREE, arg1, arglist);
8373 return build_function_call_expr (powfn, arglist);
8377 goto binary;
8379 case TRUNC_DIV_EXPR:
8380 case ROUND_DIV_EXPR:
8381 case FLOOR_DIV_EXPR:
8382 case CEIL_DIV_EXPR:
8383 case EXACT_DIV_EXPR:
8384 if (integer_onep (arg1))
8385 return non_lvalue (fold_convert (type, arg0));
8386 if (integer_zerop (arg1))
8387 return NULL_TREE;
8388 /* X / -1 is -X. */
8389 if (!TYPE_UNSIGNED (type)
8390 && TREE_CODE (arg1) == INTEGER_CST
8391 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8392 && TREE_INT_CST_HIGH (arg1) == -1)
8393 return fold_convert (type, negate_expr (arg0));
8395 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8396 operation, EXACT_DIV_EXPR.
8398 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8399 At one time others generated faster code, it's not clear if they do
8400 after the last round to changes to the DIV code in expmed.c. */
8401 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8402 && multiple_of_p (type, arg0, arg1))
8403 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8405 if (TREE_CODE (arg1) == INTEGER_CST
8406 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8407 return fold_convert (type, tem);
8409 if (TREE_CODE (type) == COMPLEX_TYPE)
8411 tem = fold_complex_div (type, arg0, arg1, code);
8412 if (tem)
8413 return tem;
8415 goto binary;
8417 case CEIL_MOD_EXPR:
8418 case FLOOR_MOD_EXPR:
8419 case ROUND_MOD_EXPR:
8420 case TRUNC_MOD_EXPR:
8421 /* X % 1 is always zero, but be sure to preserve any side
8422 effects in X. */
8423 if (integer_onep (arg1))
8424 return omit_one_operand (type, integer_zero_node, arg0);
8426 /* X % 0, return X % 0 unchanged so that we can get the
8427 proper warnings and errors. */
8428 if (integer_zerop (arg1))
8429 return NULL_TREE;
8431 /* 0 % X is always zero, but be sure to preserve any side
8432 effects in X. Place this after checking for X == 0. */
8433 if (integer_zerop (arg0))
8434 return omit_one_operand (type, integer_zero_node, arg1);
8436 /* X % -1 is zero. */
8437 if (!TYPE_UNSIGNED (type)
8438 && TREE_CODE (arg1) == INTEGER_CST
8439 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8440 && TREE_INT_CST_HIGH (arg1) == -1)
8441 return omit_one_operand (type, integer_zero_node, arg0);
8443 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8444 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8445 if (code == TRUNC_MOD_EXPR
8446 && TYPE_UNSIGNED (type)
8447 && integer_pow2p (arg1))
8449 unsigned HOST_WIDE_INT high, low;
8450 tree mask;
8451 int l;
8453 l = tree_log2 (arg1);
8454 if (l >= HOST_BITS_PER_WIDE_INT)
8456 high = ((unsigned HOST_WIDE_INT) 1
8457 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8458 low = -1;
8460 else
8462 high = 0;
8463 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8466 mask = build_int_cst_wide (type, low, high);
8467 return fold_build2 (BIT_AND_EXPR, type,
8468 fold_convert (type, arg0), mask);
8471 /* X % -C is the same as X % C. */
8472 if (code == TRUNC_MOD_EXPR
8473 && !TYPE_UNSIGNED (type)
8474 && TREE_CODE (arg1) == INTEGER_CST
8475 && TREE_INT_CST_HIGH (arg1) < 0
8476 && !flag_trapv
8477 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8478 && !sign_bit_p (arg1, arg1))
8479 return fold_build2 (code, type, fold_convert (type, arg0),
8480 fold_convert (type, negate_expr (arg1)));
8482 /* X % -Y is the same as X % Y. */
8483 if (code == TRUNC_MOD_EXPR
8484 && !TYPE_UNSIGNED (type)
8485 && TREE_CODE (arg1) == NEGATE_EXPR
8486 && !flag_trapv)
8487 return fold_build2 (code, type, fold_convert (type, arg0),
8488 fold_convert (type, TREE_OPERAND (arg1, 0)));
8490 if (TREE_CODE (arg1) == INTEGER_CST
8491 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8492 return fold_convert (type, tem);
8494 goto binary;
8496 case LROTATE_EXPR:
8497 case RROTATE_EXPR:
8498 if (integer_all_onesp (arg0))
8499 return omit_one_operand (type, arg0, arg1);
8500 goto shift;
8502 case RSHIFT_EXPR:
8503 /* Optimize -1 >> x for arithmetic right shifts. */
8504 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8505 return omit_one_operand (type, arg0, arg1);
8506 /* ... fall through ... */
8508 case LSHIFT_EXPR:
8509 shift:
8510 if (integer_zerop (arg1))
8511 return non_lvalue (fold_convert (type, arg0));
8512 if (integer_zerop (arg0))
8513 return omit_one_operand (type, arg0, arg1);
8515 /* Since negative shift count is not well-defined,
8516 don't try to compute it in the compiler. */
8517 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8518 return NULL_TREE;
8519 /* Rewrite an LROTATE_EXPR by a constant into an
8520 RROTATE_EXPR by a new constant. */
8521 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8523 tree tem = build_int_cst (NULL_TREE,
8524 GET_MODE_BITSIZE (TYPE_MODE (type)));
8525 tem = fold_convert (TREE_TYPE (arg1), tem);
8526 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8527 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8530 /* If we have a rotate of a bit operation with the rotate count and
8531 the second operand of the bit operation both constant,
8532 permute the two operations. */
8533 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8534 && (TREE_CODE (arg0) == BIT_AND_EXPR
8535 || TREE_CODE (arg0) == BIT_IOR_EXPR
8536 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8537 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8538 return fold_build2 (TREE_CODE (arg0), type,
8539 fold_build2 (code, type,
8540 TREE_OPERAND (arg0, 0), arg1),
8541 fold_build2 (code, type,
8542 TREE_OPERAND (arg0, 1), arg1));
8544 /* Two consecutive rotates adding up to the width of the mode can
8545 be ignored. */
8546 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8547 && TREE_CODE (arg0) == RROTATE_EXPR
8548 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8549 && TREE_INT_CST_HIGH (arg1) == 0
8550 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8551 && ((TREE_INT_CST_LOW (arg1)
8552 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8553 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8554 return TREE_OPERAND (arg0, 0);
8556 goto binary;
8558 case MIN_EXPR:
8559 if (operand_equal_p (arg0, arg1, 0))
8560 return omit_one_operand (type, arg0, arg1);
8561 if (INTEGRAL_TYPE_P (type)
8562 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8563 return omit_one_operand (type, arg1, arg0);
8564 goto associate;
8566 case MAX_EXPR:
8567 if (operand_equal_p (arg0, arg1, 0))
8568 return omit_one_operand (type, arg0, arg1);
8569 if (INTEGRAL_TYPE_P (type)
8570 && TYPE_MAX_VALUE (type)
8571 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8572 return omit_one_operand (type, arg1, arg0);
8573 goto associate;
8575 case TRUTH_ANDIF_EXPR:
8576 /* Note that the operands of this must be ints
8577 and their values must be 0 or 1.
8578 ("true" is a fixed value perhaps depending on the language.) */
8579 /* If first arg is constant zero, return it. */
8580 if (integer_zerop (arg0))
8581 return fold_convert (type, arg0);
8582 case TRUTH_AND_EXPR:
8583 /* If either arg is constant true, drop it. */
8584 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8585 return non_lvalue (fold_convert (type, arg1));
8586 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8587 /* Preserve sequence points. */
8588 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8589 return non_lvalue (fold_convert (type, arg0));
8590 /* If second arg is constant zero, result is zero, but first arg
8591 must be evaluated. */
8592 if (integer_zerop (arg1))
8593 return omit_one_operand (type, arg1, arg0);
8594 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8595 case will be handled here. */
8596 if (integer_zerop (arg0))
8597 return omit_one_operand (type, arg0, arg1);
8599 /* !X && X is always false. */
8600 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8601 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8602 return omit_one_operand (type, integer_zero_node, arg1);
8603 /* X && !X is always false. */
8604 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8605 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8606 return omit_one_operand (type, integer_zero_node, arg0);
8608 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8609 means A >= Y && A != MAX, but in this case we know that
8610 A < X <= MAX. */
8612 if (!TREE_SIDE_EFFECTS (arg0)
8613 && !TREE_SIDE_EFFECTS (arg1))
8615 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8616 if (tem)
8617 return fold_build2 (code, type, tem, arg1);
8619 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8620 if (tem)
8621 return fold_build2 (code, type, arg0, tem);
8624 truth_andor:
8625 /* We only do these simplifications if we are optimizing. */
8626 if (!optimize)
8627 return NULL_TREE;
8629 /* Check for things like (A || B) && (A || C). We can convert this
8630 to A || (B && C). Note that either operator can be any of the four
8631 truth and/or operations and the transformation will still be
8632 valid. Also note that we only care about order for the
8633 ANDIF and ORIF operators. If B contains side effects, this
8634 might change the truth-value of A. */
8635 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8636 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8637 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8638 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8639 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8640 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8642 tree a00 = TREE_OPERAND (arg0, 0);
8643 tree a01 = TREE_OPERAND (arg0, 1);
8644 tree a10 = TREE_OPERAND (arg1, 0);
8645 tree a11 = TREE_OPERAND (arg1, 1);
8646 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8647 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8648 && (code == TRUTH_AND_EXPR
8649 || code == TRUTH_OR_EXPR));
8651 if (operand_equal_p (a00, a10, 0))
8652 return fold_build2 (TREE_CODE (arg0), type, a00,
8653 fold_build2 (code, type, a01, a11));
8654 else if (commutative && operand_equal_p (a00, a11, 0))
8655 return fold_build2 (TREE_CODE (arg0), type, a00,
8656 fold_build2 (code, type, a01, a10));
8657 else if (commutative && operand_equal_p (a01, a10, 0))
8658 return fold_build2 (TREE_CODE (arg0), type, a01,
8659 fold_build2 (code, type, a00, a11));
8661 /* This case if tricky because we must either have commutative
8662 operators or else A10 must not have side-effects. */
8664 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8665 && operand_equal_p (a01, a11, 0))
8666 return fold_build2 (TREE_CODE (arg0), type,
8667 fold_build2 (code, type, a00, a10),
8668 a01);
8671 /* See if we can build a range comparison. */
8672 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8673 return tem;
8675 /* Check for the possibility of merging component references. If our
8676 lhs is another similar operation, try to merge its rhs with our
8677 rhs. Then try to merge our lhs and rhs. */
8678 if (TREE_CODE (arg0) == code
8679 && 0 != (tem = fold_truthop (code, type,
8680 TREE_OPERAND (arg0, 1), arg1)))
8681 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8683 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8684 return tem;
8686 return NULL_TREE;
8688 case TRUTH_ORIF_EXPR:
8689 /* Note that the operands of this must be ints
8690 and their values must be 0 or true.
8691 ("true" is a fixed value perhaps depending on the language.) */
8692 /* If first arg is constant true, return it. */
8693 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8694 return fold_convert (type, arg0);
8695 case TRUTH_OR_EXPR:
8696 /* If either arg is constant zero, drop it. */
8697 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8698 return non_lvalue (fold_convert (type, arg1));
8699 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8700 /* Preserve sequence points. */
8701 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8702 return non_lvalue (fold_convert (type, arg0));
8703 /* If second arg is constant true, result is true, but we must
8704 evaluate first arg. */
8705 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8706 return omit_one_operand (type, arg1, arg0);
8707 /* Likewise for first arg, but note this only occurs here for
8708 TRUTH_OR_EXPR. */
8709 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8710 return omit_one_operand (type, arg0, arg1);
8712 /* !X || X is always true. */
8713 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8714 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8715 return omit_one_operand (type, integer_one_node, arg1);
8716 /* X || !X is always true. */
8717 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8718 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8719 return omit_one_operand (type, integer_one_node, arg0);
8721 goto truth_andor;
8723 case TRUTH_XOR_EXPR:
8724 /* If the second arg is constant zero, drop it. */
8725 if (integer_zerop (arg1))
8726 return non_lvalue (fold_convert (type, arg0));
8727 /* If the second arg is constant true, this is a logical inversion. */
8728 if (integer_onep (arg1))
8730 /* Only call invert_truthvalue if operand is a truth value. */
8731 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8732 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8733 else
8734 tem = invert_truthvalue (arg0);
8735 return non_lvalue (fold_convert (type, tem));
8737 /* Identical arguments cancel to zero. */
8738 if (operand_equal_p (arg0, arg1, 0))
8739 return omit_one_operand (type, integer_zero_node, arg0);
8741 /* !X ^ X is always true. */
8742 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8743 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8744 return omit_one_operand (type, integer_one_node, arg1);
8746 /* X ^ !X is always true. */
8747 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8748 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8749 return omit_one_operand (type, integer_one_node, arg0);
8751 return NULL_TREE;
8753 case EQ_EXPR:
8754 case NE_EXPR:
8755 case LT_EXPR:
8756 case GT_EXPR:
8757 case LE_EXPR:
8758 case GE_EXPR:
8759 /* If one arg is a real or integer constant, put it last. */
8760 if (tree_swap_operands_p (arg0, arg1, true))
8761 return fold_build2 (swap_tree_comparison (code), type, arg1, arg0);
8763 /* If this is an equality comparison of the address of a non-weak
8764 object against zero, then we know the result. */
8765 if ((code == EQ_EXPR || code == NE_EXPR)
8766 && TREE_CODE (arg0) == ADDR_EXPR
8767 && DECL_P (TREE_OPERAND (arg0, 0))
8768 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8769 && integer_zerop (arg1))
8770 return constant_boolean_node (code != EQ_EXPR, type);
8772 /* If this is an equality comparison of the address of two non-weak,
8773 unaliased symbols neither of which are extern (since we do not
8774 have access to attributes for externs), then we know the result. */
8775 if ((code == EQ_EXPR || code == NE_EXPR)
8776 && TREE_CODE (arg0) == ADDR_EXPR
8777 && DECL_P (TREE_OPERAND (arg0, 0))
8778 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8779 && ! lookup_attribute ("alias",
8780 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8781 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8782 && TREE_CODE (arg1) == ADDR_EXPR
8783 && DECL_P (TREE_OPERAND (arg1, 0))
8784 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8785 && ! lookup_attribute ("alias",
8786 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8787 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8788 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8789 ? code == EQ_EXPR : code != EQ_EXPR,
8790 type);
8792 /* If this is a comparison of two exprs that look like an
8793 ARRAY_REF of the same object, then we can fold this to a
8794 comparison of the two offsets. */
8795 if (TREE_CODE_CLASS (code) == tcc_comparison)
8797 tree base0, offset0, base1, offset1;
8799 if (extract_array_ref (arg0, &base0, &offset0)
8800 && extract_array_ref (arg1, &base1, &offset1)
8801 && operand_equal_p (base0, base1, 0))
8803 if (offset0 == NULL_TREE
8804 && offset1 == NULL_TREE)
8806 offset0 = integer_zero_node;
8807 offset1 = integer_zero_node;
8809 else if (offset0 == NULL_TREE)
8810 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8811 else if (offset1 == NULL_TREE)
8812 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8814 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8815 return fold_build2 (code, type, offset0, offset1);
8819 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8821 tree targ0 = strip_float_extensions (arg0);
8822 tree targ1 = strip_float_extensions (arg1);
8823 tree newtype = TREE_TYPE (targ0);
8825 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8826 newtype = TREE_TYPE (targ1);
8828 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8829 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8830 return fold_build2 (code, type, fold_convert (newtype, targ0),
8831 fold_convert (newtype, targ1));
8833 /* (-a) CMP (-b) -> b CMP a */
8834 if (TREE_CODE (arg0) == NEGATE_EXPR
8835 && TREE_CODE (arg1) == NEGATE_EXPR)
8836 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8837 TREE_OPERAND (arg0, 0));
8839 if (TREE_CODE (arg1) == REAL_CST)
8841 REAL_VALUE_TYPE cst;
8842 cst = TREE_REAL_CST (arg1);
8844 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8845 if (TREE_CODE (arg0) == NEGATE_EXPR)
8846 return
8847 fold_build2 (swap_tree_comparison (code), type,
8848 TREE_OPERAND (arg0, 0),
8849 build_real (TREE_TYPE (arg1),
8850 REAL_VALUE_NEGATE (cst)));
8852 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8853 /* a CMP (-0) -> a CMP 0 */
8854 if (REAL_VALUE_MINUS_ZERO (cst))
8855 return fold_build2 (code, type, arg0,
8856 build_real (TREE_TYPE (arg1), dconst0));
8858 /* x != NaN is always true, other ops are always false. */
8859 if (REAL_VALUE_ISNAN (cst)
8860 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8862 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8863 return omit_one_operand (type, tem, arg0);
8866 /* Fold comparisons against infinity. */
8867 if (REAL_VALUE_ISINF (cst))
8869 tem = fold_inf_compare (code, type, arg0, arg1);
8870 if (tem != NULL_TREE)
8871 return tem;
8875 /* If this is a comparison of a real constant with a PLUS_EXPR
8876 or a MINUS_EXPR of a real constant, we can convert it into a
8877 comparison with a revised real constant as long as no overflow
8878 occurs when unsafe_math_optimizations are enabled. */
8879 if (flag_unsafe_math_optimizations
8880 && TREE_CODE (arg1) == REAL_CST
8881 && (TREE_CODE (arg0) == PLUS_EXPR
8882 || TREE_CODE (arg0) == MINUS_EXPR)
8883 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8884 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8885 ? MINUS_EXPR : PLUS_EXPR,
8886 arg1, TREE_OPERAND (arg0, 1), 0))
8887 && ! TREE_CONSTANT_OVERFLOW (tem))
8888 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8890 /* Likewise, we can simplify a comparison of a real constant with
8891 a MINUS_EXPR whose first operand is also a real constant, i.e.
8892 (c1 - x) < c2 becomes x > c1-c2. */
8893 if (flag_unsafe_math_optimizations
8894 && TREE_CODE (arg1) == REAL_CST
8895 && TREE_CODE (arg0) == MINUS_EXPR
8896 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8897 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8898 arg1, 0))
8899 && ! TREE_CONSTANT_OVERFLOW (tem))
8900 return fold_build2 (swap_tree_comparison (code), type,
8901 TREE_OPERAND (arg0, 1), tem);
8903 /* Fold comparisons against built-in math functions. */
8904 if (TREE_CODE (arg1) == REAL_CST
8905 && flag_unsafe_math_optimizations
8906 && ! flag_errno_math)
8908 enum built_in_function fcode = builtin_mathfn_code (arg0);
8910 if (fcode != END_BUILTINS)
8912 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8913 if (tem != NULL_TREE)
8914 return tem;
8919 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8920 if (TREE_CONSTANT (arg1)
8921 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8922 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8923 /* This optimization is invalid for ordered comparisons
8924 if CONST+INCR overflows or if foo+incr might overflow.
8925 This optimization is invalid for floating point due to rounding.
8926 For pointer types we assume overflow doesn't happen. */
8927 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8928 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8929 && (code == EQ_EXPR || code == NE_EXPR))))
8931 tree varop, newconst;
8933 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8935 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8936 arg1, TREE_OPERAND (arg0, 1));
8937 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8938 TREE_OPERAND (arg0, 0),
8939 TREE_OPERAND (arg0, 1));
8941 else
8943 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8944 arg1, TREE_OPERAND (arg0, 1));
8945 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8946 TREE_OPERAND (arg0, 0),
8947 TREE_OPERAND (arg0, 1));
8951 /* If VAROP is a reference to a bitfield, we must mask
8952 the constant by the width of the field. */
8953 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8954 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8955 && host_integerp (DECL_SIZE (TREE_OPERAND
8956 (TREE_OPERAND (varop, 0), 1)), 1))
8958 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8959 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8960 tree folded_compare, shift;
8962 /* First check whether the comparison would come out
8963 always the same. If we don't do that we would
8964 change the meaning with the masking. */
8965 folded_compare = fold_build2 (code, type,
8966 TREE_OPERAND (varop, 0), arg1);
8967 if (integer_zerop (folded_compare)
8968 || integer_onep (folded_compare))
8969 return omit_one_operand (type, folded_compare, varop);
8971 shift = build_int_cst (NULL_TREE,
8972 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8973 shift = fold_convert (TREE_TYPE (varop), shift);
8974 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8975 newconst, shift);
8976 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8977 newconst, shift);
8980 return fold_build2 (code, type, varop, newconst);
8983 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8984 This transformation affects the cases which are handled in later
8985 optimizations involving comparisons with non-negative constants. */
8986 if (TREE_CODE (arg1) == INTEGER_CST
8987 && TREE_CODE (arg0) != INTEGER_CST
8988 && tree_int_cst_sgn (arg1) > 0)
8990 switch (code)
8992 case GE_EXPR:
8993 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8994 return fold_build2 (GT_EXPR, type, arg0, arg1);
8996 case LT_EXPR:
8997 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8998 return fold_build2 (LE_EXPR, type, arg0, arg1);
9000 default:
9001 break;
9005 /* Comparisons with the highest or lowest possible integer of
9006 the specified size will have known values.
9008 This is quite similar to fold_relational_hi_lo, however,
9009 attempts to share the code have been nothing but trouble. */
9011 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9013 if (TREE_CODE (arg1) == INTEGER_CST
9014 && ! TREE_CONSTANT_OVERFLOW (arg1)
9015 && width <= 2 * HOST_BITS_PER_WIDE_INT
9016 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9017 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9019 HOST_WIDE_INT signed_max_hi;
9020 unsigned HOST_WIDE_INT signed_max_lo;
9021 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9023 if (width <= HOST_BITS_PER_WIDE_INT)
9025 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9026 - 1;
9027 signed_max_hi = 0;
9028 max_hi = 0;
9030 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9032 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9033 min_lo = 0;
9034 min_hi = 0;
9036 else
9038 max_lo = signed_max_lo;
9039 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9040 min_hi = -1;
9043 else
9045 width -= HOST_BITS_PER_WIDE_INT;
9046 signed_max_lo = -1;
9047 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9048 - 1;
9049 max_lo = -1;
9050 min_lo = 0;
9052 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9054 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9055 min_hi = 0;
9057 else
9059 max_hi = signed_max_hi;
9060 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9064 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9065 && TREE_INT_CST_LOW (arg1) == max_lo)
9066 switch (code)
9068 case GT_EXPR:
9069 return omit_one_operand (type, integer_zero_node, arg0);
9071 case GE_EXPR:
9072 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9074 case LE_EXPR:
9075 return omit_one_operand (type, integer_one_node, arg0);
9077 case LT_EXPR:
9078 return fold_build2 (NE_EXPR, type, arg0, arg1);
9080 /* The GE_EXPR and LT_EXPR cases above are not normally
9081 reached because of previous transformations. */
9083 default:
9084 break;
9086 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9087 == max_hi
9088 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9089 switch (code)
9091 case GT_EXPR:
9092 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9093 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9094 case LE_EXPR:
9095 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9096 return fold_build2 (NE_EXPR, type, arg0, arg1);
9097 default:
9098 break;
9100 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9101 == min_hi
9102 && TREE_INT_CST_LOW (arg1) == min_lo)
9103 switch (code)
9105 case LT_EXPR:
9106 return omit_one_operand (type, integer_zero_node, arg0);
9108 case LE_EXPR:
9109 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9111 case GE_EXPR:
9112 return omit_one_operand (type, integer_one_node, arg0);
9114 case GT_EXPR:
9115 return fold_build2 (NE_EXPR, type, arg0, arg1);
9117 default:
9118 break;
9120 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9121 == min_hi
9122 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9123 switch (code)
9125 case GE_EXPR:
9126 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9127 return fold_build2 (NE_EXPR, type, arg0, arg1);
9128 case LT_EXPR:
9129 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9130 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9131 default:
9132 break;
9135 else if (!in_gimple_form
9136 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9137 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9138 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9139 /* signed_type does not work on pointer types. */
9140 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9142 /* The following case also applies to X < signed_max+1
9143 and X >= signed_max+1 because previous transformations. */
9144 if (code == LE_EXPR || code == GT_EXPR)
9146 tree st0, st1;
9147 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9148 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9149 return fold
9150 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9151 type, fold_convert (st0, arg0),
9152 fold_convert (st1, integer_zero_node)));
9158 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9159 a MINUS_EXPR of a constant, we can convert it into a comparison with
9160 a revised constant as long as no overflow occurs. */
9161 if ((code == EQ_EXPR || code == NE_EXPR)
9162 && TREE_CODE (arg1) == INTEGER_CST
9163 && (TREE_CODE (arg0) == PLUS_EXPR
9164 || TREE_CODE (arg0) == MINUS_EXPR)
9165 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9166 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9167 ? MINUS_EXPR : PLUS_EXPR,
9168 arg1, TREE_OPERAND (arg0, 1), 0))
9169 && ! TREE_CONSTANT_OVERFLOW (tem))
9170 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9172 /* Similarly for a NEGATE_EXPR. */
9173 else if ((code == EQ_EXPR || code == NE_EXPR)
9174 && TREE_CODE (arg0) == NEGATE_EXPR
9175 && TREE_CODE (arg1) == INTEGER_CST
9176 && 0 != (tem = negate_expr (arg1))
9177 && TREE_CODE (tem) == INTEGER_CST
9178 && ! TREE_CONSTANT_OVERFLOW (tem))
9179 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9181 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9182 for !=. Don't do this for ordered comparisons due to overflow. */
9183 else if ((code == NE_EXPR || code == EQ_EXPR)
9184 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9185 return fold_build2 (code, type,
9186 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9188 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9189 && (TREE_CODE (arg0) == NOP_EXPR
9190 || TREE_CODE (arg0) == CONVERT_EXPR))
9192 /* If we are widening one operand of an integer comparison,
9193 see if the other operand is similarly being widened. Perhaps we
9194 can do the comparison in the narrower type. */
9195 tem = fold_widened_comparison (code, type, arg0, arg1);
9196 if (tem)
9197 return tem;
9199 /* Or if we are changing signedness. */
9200 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9201 if (tem)
9202 return tem;
9205 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9206 constant, we can simplify it. */
9207 else if (TREE_CODE (arg1) == INTEGER_CST
9208 && (TREE_CODE (arg0) == MIN_EXPR
9209 || TREE_CODE (arg0) == MAX_EXPR)
9210 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9212 tem = optimize_minmax_comparison (code, type, op0, op1);
9213 if (tem)
9214 return tem;
9216 return NULL_TREE;
9219 /* If we are comparing an ABS_EXPR with a constant, we can
9220 convert all the cases into explicit comparisons, but they may
9221 well not be faster than doing the ABS and one comparison.
9222 But ABS (X) <= C is a range comparison, which becomes a subtraction
9223 and a comparison, and is probably faster. */
9224 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9225 && TREE_CODE (arg0) == ABS_EXPR
9226 && ! TREE_SIDE_EFFECTS (arg0)
9227 && (0 != (tem = negate_expr (arg1)))
9228 && TREE_CODE (tem) == INTEGER_CST
9229 && ! TREE_CONSTANT_OVERFLOW (tem))
9230 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9231 build2 (GE_EXPR, type,
9232 TREE_OPERAND (arg0, 0), tem),
9233 build2 (LE_EXPR, type,
9234 TREE_OPERAND (arg0, 0), arg1));
9236 /* Convert ABS_EXPR<x> >= 0 to true. */
9237 else if (code == GE_EXPR
9238 && tree_expr_nonnegative_p (arg0)
9239 && (integer_zerop (arg1)
9240 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9241 && real_zerop (arg1))))
9242 return omit_one_operand (type, integer_one_node, arg0);
9244 /* Convert ABS_EXPR<x> < 0 to false. */
9245 else if (code == LT_EXPR
9246 && tree_expr_nonnegative_p (arg0)
9247 && (integer_zerop (arg1) || real_zerop (arg1)))
9248 return omit_one_operand (type, integer_zero_node, arg0);
9250 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9251 else if ((code == EQ_EXPR || code == NE_EXPR)
9252 && TREE_CODE (arg0) == ABS_EXPR
9253 && (integer_zerop (arg1) || real_zerop (arg1)))
9254 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9256 /* If this is an EQ or NE comparison with zero and ARG0 is
9257 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9258 two operations, but the latter can be done in one less insn
9259 on machines that have only two-operand insns or on which a
9260 constant cannot be the first operand. */
9261 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9262 && TREE_CODE (arg0) == BIT_AND_EXPR)
9264 tree arg00 = TREE_OPERAND (arg0, 0);
9265 tree arg01 = TREE_OPERAND (arg0, 1);
9266 if (TREE_CODE (arg00) == LSHIFT_EXPR
9267 && integer_onep (TREE_OPERAND (arg00, 0)))
9268 return
9269 fold_build2 (code, type,
9270 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9271 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9272 arg01, TREE_OPERAND (arg00, 1)),
9273 fold_convert (TREE_TYPE (arg0),
9274 integer_one_node)),
9275 arg1);
9276 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9277 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9278 return
9279 fold_build2 (code, type,
9280 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9281 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9282 arg00, TREE_OPERAND (arg01, 1)),
9283 fold_convert (TREE_TYPE (arg0),
9284 integer_one_node)),
9285 arg1);
9288 /* If this is an NE or EQ comparison of zero against the result of a
9289 signed MOD operation whose second operand is a power of 2, make
9290 the MOD operation unsigned since it is simpler and equivalent. */
9291 if ((code == NE_EXPR || code == EQ_EXPR)
9292 && integer_zerop (arg1)
9293 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9294 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9295 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9296 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9297 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9298 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9300 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9301 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9302 fold_convert (newtype,
9303 TREE_OPERAND (arg0, 0)),
9304 fold_convert (newtype,
9305 TREE_OPERAND (arg0, 1)));
9307 return fold_build2 (code, type, newmod,
9308 fold_convert (newtype, arg1));
9311 /* If this is an NE comparison of zero with an AND of one, remove the
9312 comparison since the AND will give the correct value. */
9313 if (code == NE_EXPR && integer_zerop (arg1)
9314 && TREE_CODE (arg0) == BIT_AND_EXPR
9315 && integer_onep (TREE_OPERAND (arg0, 1)))
9316 return fold_convert (type, arg0);
9318 /* If we have (A & C) == C where C is a power of 2, convert this into
9319 (A & C) != 0. Similarly for NE_EXPR. */
9320 if ((code == EQ_EXPR || code == NE_EXPR)
9321 && TREE_CODE (arg0) == BIT_AND_EXPR
9322 && integer_pow2p (TREE_OPERAND (arg0, 1))
9323 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9324 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9325 arg0, fold_convert (TREE_TYPE (arg0),
9326 integer_zero_node));
9328 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9329 2, then fold the expression into shifts and logical operations. */
9330 tem = fold_single_bit_test (code, arg0, arg1, type);
9331 if (tem)
9332 return tem;
9334 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9335 Similarly for NE_EXPR. */
9336 if ((code == EQ_EXPR || code == NE_EXPR)
9337 && TREE_CODE (arg0) == BIT_AND_EXPR
9338 && TREE_CODE (arg1) == INTEGER_CST
9339 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9341 tree notc = fold_build1 (BIT_NOT_EXPR,
9342 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9343 TREE_OPERAND (arg0, 1));
9344 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9345 arg1, notc);
9346 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9347 if (integer_nonzerop (dandnotc))
9348 return omit_one_operand (type, rslt, arg0);
9351 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9352 Similarly for NE_EXPR. */
9353 if ((code == EQ_EXPR || code == NE_EXPR)
9354 && TREE_CODE (arg0) == BIT_IOR_EXPR
9355 && TREE_CODE (arg1) == INTEGER_CST
9356 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9358 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9359 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9360 TREE_OPERAND (arg0, 1), notd);
9361 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9362 if (integer_nonzerop (candnotd))
9363 return omit_one_operand (type, rslt, arg0);
9366 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9367 and similarly for >= into !=. */
9368 if ((code == LT_EXPR || code == GE_EXPR)
9369 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9370 && TREE_CODE (arg1) == LSHIFT_EXPR
9371 && integer_onep (TREE_OPERAND (arg1, 0)))
9372 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9373 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9374 TREE_OPERAND (arg1, 1)),
9375 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9377 else if ((code == LT_EXPR || code == GE_EXPR)
9378 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9379 && (TREE_CODE (arg1) == NOP_EXPR
9380 || TREE_CODE (arg1) == CONVERT_EXPR)
9381 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9382 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9383 return
9384 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9385 fold_convert (TREE_TYPE (arg0),
9386 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9387 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9388 1))),
9389 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9391 /* Simplify comparison of something with itself. (For IEEE
9392 floating-point, we can only do some of these simplifications.) */
9393 if (operand_equal_p (arg0, arg1, 0))
9395 switch (code)
9397 case EQ_EXPR:
9398 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9399 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9400 return constant_boolean_node (1, type);
9401 break;
9403 case GE_EXPR:
9404 case LE_EXPR:
9405 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9406 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9407 return constant_boolean_node (1, type);
9408 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9410 case NE_EXPR:
9411 /* For NE, we can only do this simplification if integer
9412 or we don't honor IEEE floating point NaNs. */
9413 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9414 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9415 break;
9416 /* ... fall through ... */
9417 case GT_EXPR:
9418 case LT_EXPR:
9419 return constant_boolean_node (0, type);
9420 default:
9421 gcc_unreachable ();
9425 /* If we are comparing an expression that just has comparisons
9426 of two integer values, arithmetic expressions of those comparisons,
9427 and constants, we can simplify it. There are only three cases
9428 to check: the two values can either be equal, the first can be
9429 greater, or the second can be greater. Fold the expression for
9430 those three values. Since each value must be 0 or 1, we have
9431 eight possibilities, each of which corresponds to the constant 0
9432 or 1 or one of the six possible comparisons.
9434 This handles common cases like (a > b) == 0 but also handles
9435 expressions like ((x > y) - (y > x)) > 0, which supposedly
9436 occur in macroized code. */
9438 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9440 tree cval1 = 0, cval2 = 0;
9441 int save_p = 0;
9443 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9444 /* Don't handle degenerate cases here; they should already
9445 have been handled anyway. */
9446 && cval1 != 0 && cval2 != 0
9447 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9448 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9449 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9450 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9451 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9452 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9453 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9455 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9456 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9458 /* We can't just pass T to eval_subst in case cval1 or cval2
9459 was the same as ARG1. */
9461 tree high_result
9462 = fold_build2 (code, type,
9463 eval_subst (arg0, cval1, maxval,
9464 cval2, minval),
9465 arg1);
9466 tree equal_result
9467 = fold_build2 (code, type,
9468 eval_subst (arg0, cval1, maxval,
9469 cval2, maxval),
9470 arg1);
9471 tree low_result
9472 = fold_build2 (code, type,
9473 eval_subst (arg0, cval1, minval,
9474 cval2, maxval),
9475 arg1);
9477 /* All three of these results should be 0 or 1. Confirm they
9478 are. Then use those values to select the proper code
9479 to use. */
9481 if ((integer_zerop (high_result)
9482 || integer_onep (high_result))
9483 && (integer_zerop (equal_result)
9484 || integer_onep (equal_result))
9485 && (integer_zerop (low_result)
9486 || integer_onep (low_result)))
9488 /* Make a 3-bit mask with the high-order bit being the
9489 value for `>', the next for '=', and the low for '<'. */
9490 switch ((integer_onep (high_result) * 4)
9491 + (integer_onep (equal_result) * 2)
9492 + integer_onep (low_result))
9494 case 0:
9495 /* Always false. */
9496 return omit_one_operand (type, integer_zero_node, arg0);
9497 case 1:
9498 code = LT_EXPR;
9499 break;
9500 case 2:
9501 code = EQ_EXPR;
9502 break;
9503 case 3:
9504 code = LE_EXPR;
9505 break;
9506 case 4:
9507 code = GT_EXPR;
9508 break;
9509 case 5:
9510 code = NE_EXPR;
9511 break;
9512 case 6:
9513 code = GE_EXPR;
9514 break;
9515 case 7:
9516 /* Always true. */
9517 return omit_one_operand (type, integer_one_node, arg0);
9520 tem = build2 (code, type, cval1, cval2);
9521 if (save_p)
9522 return save_expr (tem);
9523 else
9524 return fold (tem);
9529 /* If this is a comparison of a field, we may be able to simplify it. */
9530 if (((TREE_CODE (arg0) == COMPONENT_REF
9531 && lang_hooks.can_use_bit_fields_p ())
9532 || TREE_CODE (arg0) == BIT_FIELD_REF)
9533 && (code == EQ_EXPR || code == NE_EXPR)
9534 /* Handle the constant case even without -O
9535 to make sure the warnings are given. */
9536 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9538 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9539 if (t1)
9540 return t1;
9543 /* If this is a comparison of complex values and either or both sides
9544 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9545 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9546 This may prevent needless evaluations. */
9547 if ((code == EQ_EXPR || code == NE_EXPR)
9548 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
9549 && (TREE_CODE (arg0) == COMPLEX_EXPR
9550 || TREE_CODE (arg1) == COMPLEX_EXPR
9551 || TREE_CODE (arg0) == COMPLEX_CST
9552 || TREE_CODE (arg1) == COMPLEX_CST))
9554 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
9555 tree real0, imag0, real1, imag1;
9557 arg0 = save_expr (arg0);
9558 arg1 = save_expr (arg1);
9559 real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
9560 imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
9561 real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
9562 imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
9564 return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
9565 : TRUTH_ORIF_EXPR),
9566 type,
9567 fold_build2 (code, type, real0, real1),
9568 fold_build2 (code, type, imag0, imag1));
9571 /* Optimize comparisons of strlen vs zero to a compare of the
9572 first character of the string vs zero. To wit,
9573 strlen(ptr) == 0 => *ptr == 0
9574 strlen(ptr) != 0 => *ptr != 0
9575 Other cases should reduce to one of these two (or a constant)
9576 due to the return value of strlen being unsigned. */
9577 if ((code == EQ_EXPR || code == NE_EXPR)
9578 && integer_zerop (arg1)
9579 && TREE_CODE (arg0) == CALL_EXPR)
9581 tree fndecl = get_callee_fndecl (arg0);
9582 tree arglist;
9584 if (fndecl
9585 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9586 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9587 && (arglist = TREE_OPERAND (arg0, 1))
9588 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9589 && ! TREE_CHAIN (arglist))
9590 return fold_build2 (code, type,
9591 build1 (INDIRECT_REF, char_type_node,
9592 TREE_VALUE (arglist)),
9593 fold_convert (char_type_node,
9594 integer_zero_node));
9597 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9598 into a single range test. */
9599 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9600 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9601 && TREE_CODE (arg1) == INTEGER_CST
9602 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9603 && !integer_zerop (TREE_OPERAND (arg0, 1))
9604 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9605 && !TREE_OVERFLOW (arg1))
9607 t1 = fold_div_compare (code, type, arg0, arg1);
9608 if (t1 != NULL_TREE)
9609 return t1;
9612 if ((code == EQ_EXPR || code == NE_EXPR)
9613 && !TREE_SIDE_EFFECTS (arg0)
9614 && integer_zerop (arg1)
9615 && tree_expr_nonzero_p (arg0))
9616 return constant_boolean_node (code==NE_EXPR, type);
9618 t1 = fold_relational_const (code, type, arg0, arg1);
9619 return t1 == NULL_TREE ? NULL_TREE : t1;
9621 case UNORDERED_EXPR:
9622 case ORDERED_EXPR:
9623 case UNLT_EXPR:
9624 case UNLE_EXPR:
9625 case UNGT_EXPR:
9626 case UNGE_EXPR:
9627 case UNEQ_EXPR:
9628 case LTGT_EXPR:
9629 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9631 t1 = fold_relational_const (code, type, arg0, arg1);
9632 if (t1 != NULL_TREE)
9633 return t1;
9636 /* If the first operand is NaN, the result is constant. */
9637 if (TREE_CODE (arg0) == REAL_CST
9638 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9639 && (code != LTGT_EXPR || ! flag_trapping_math))
9641 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9642 ? integer_zero_node
9643 : integer_one_node;
9644 return omit_one_operand (type, t1, arg1);
9647 /* If the second operand is NaN, the result is constant. */
9648 if (TREE_CODE (arg1) == REAL_CST
9649 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9650 && (code != LTGT_EXPR || ! flag_trapping_math))
9652 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9653 ? integer_zero_node
9654 : integer_one_node;
9655 return omit_one_operand (type, t1, arg0);
9658 /* Simplify unordered comparison of something with itself. */
9659 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9660 && operand_equal_p (arg0, arg1, 0))
9661 return constant_boolean_node (1, type);
9663 if (code == LTGT_EXPR
9664 && !flag_trapping_math
9665 && operand_equal_p (arg0, arg1, 0))
9666 return constant_boolean_node (0, type);
9668 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9670 tree targ0 = strip_float_extensions (arg0);
9671 tree targ1 = strip_float_extensions (arg1);
9672 tree newtype = TREE_TYPE (targ0);
9674 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9675 newtype = TREE_TYPE (targ1);
9677 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9678 return fold_build2 (code, type, fold_convert (newtype, targ0),
9679 fold_convert (newtype, targ1));
9682 return NULL_TREE;
9684 case COMPOUND_EXPR:
9685 /* When pedantic, a compound expression can be neither an lvalue
9686 nor an integer constant expression. */
9687 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9688 return NULL_TREE;
9689 /* Don't let (0, 0) be null pointer constant. */
9690 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9691 : fold_convert (type, arg1);
9692 return pedantic_non_lvalue (tem);
9694 case COMPLEX_EXPR:
9695 if (wins)
9696 return build_complex (type, arg0, arg1);
9697 return NULL_TREE;
9699 default:
9700 return NULL_TREE;
9701 } /* switch (code) */
9704 /* Fold a ternary expression of code CODE and type TYPE with operands
9705 OP0, OP1, and OP2. Return the folded expression if folding is
9706 successful. Otherwise, return NULL_TREE. */
9708 static tree
9709 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9711 tree tem;
9712 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9713 enum tree_code_class kind = TREE_CODE_CLASS (code);
9715 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9716 && TREE_CODE_LENGTH (code) == 3);
9718 /* Strip any conversions that don't change the mode. This is safe
9719 for every expression, except for a comparison expression because
9720 its signedness is derived from its operands. So, in the latter
9721 case, only strip conversions that don't change the signedness.
9723 Note that this is done as an internal manipulation within the
9724 constant folder, in order to find the simplest representation of
9725 the arguments so that their form can be studied. In any cases,
9726 the appropriate type conversions should be put back in the tree
9727 that will get out of the constant folder. */
9728 if (op0)
9730 arg0 = op0;
9731 STRIP_NOPS (arg0);
9734 if (op1)
9736 arg1 = op1;
9737 STRIP_NOPS (arg1);
9740 switch (code)
9742 case COMPONENT_REF:
9743 if (TREE_CODE (arg0) == CONSTRUCTOR
9744 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9746 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
9747 if (m)
9748 return TREE_VALUE (m);
9750 return NULL_TREE;
9752 case COND_EXPR:
9753 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9754 so all simple results must be passed through pedantic_non_lvalue. */
9755 if (TREE_CODE (arg0) == INTEGER_CST)
9757 tem = integer_zerop (arg0) ? op2 : op1;
9758 /* Only optimize constant conditions when the selected branch
9759 has the same type as the COND_EXPR. This avoids optimizing
9760 away "c ? x : throw", where the throw has a void type. */
9761 if (! VOID_TYPE_P (TREE_TYPE (tem))
9762 || VOID_TYPE_P (type))
9763 return pedantic_non_lvalue (tem);
9764 return NULL_TREE;
9766 if (operand_equal_p (arg1, op2, 0))
9767 return pedantic_omit_one_operand (type, arg1, arg0);
9769 /* If we have A op B ? A : C, we may be able to convert this to a
9770 simpler expression, depending on the operation and the values
9771 of B and C. Signed zeros prevent all of these transformations,
9772 for reasons given above each one.
9774 Also try swapping the arguments and inverting the conditional. */
9775 if (COMPARISON_CLASS_P (arg0)
9776 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9777 arg1, TREE_OPERAND (arg0, 1))
9778 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9780 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9781 if (tem)
9782 return tem;
9785 if (COMPARISON_CLASS_P (arg0)
9786 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9787 op2,
9788 TREE_OPERAND (arg0, 1))
9789 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9791 tem = invert_truthvalue (arg0);
9792 if (COMPARISON_CLASS_P (tem))
9794 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9795 if (tem)
9796 return tem;
9800 /* If the second operand is simpler than the third, swap them
9801 since that produces better jump optimization results. */
9802 if (tree_swap_operands_p (op1, op2, false))
9804 /* See if this can be inverted. If it can't, possibly because
9805 it was a floating-point inequality comparison, don't do
9806 anything. */
9807 tem = invert_truthvalue (arg0);
9809 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9810 return fold_build3 (code, type, tem, op2, op1);
9813 /* Convert A ? 1 : 0 to simply A. */
9814 if (integer_onep (op1)
9815 && integer_zerop (op2)
9816 /* If we try to convert OP0 to our type, the
9817 call to fold will try to move the conversion inside
9818 a COND, which will recurse. In that case, the COND_EXPR
9819 is probably the best choice, so leave it alone. */
9820 && type == TREE_TYPE (arg0))
9821 return pedantic_non_lvalue (arg0);
9823 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9824 over COND_EXPR in cases such as floating point comparisons. */
9825 if (integer_zerop (op1)
9826 && integer_onep (op2)
9827 && truth_value_p (TREE_CODE (arg0)))
9828 return pedantic_non_lvalue (fold_convert (type,
9829 invert_truthvalue (arg0)));
9831 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9832 if (TREE_CODE (arg0) == LT_EXPR
9833 && integer_zerop (TREE_OPERAND (arg0, 1))
9834 && integer_zerop (op2)
9835 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9836 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
9837 TREE_TYPE (tem), tem, arg1));
9839 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9840 already handled above. */
9841 if (TREE_CODE (arg0) == BIT_AND_EXPR
9842 && integer_onep (TREE_OPERAND (arg0, 1))
9843 && integer_zerop (op2)
9844 && integer_pow2p (arg1))
9846 tree tem = TREE_OPERAND (arg0, 0);
9847 STRIP_NOPS (tem);
9848 if (TREE_CODE (tem) == RSHIFT_EXPR
9849 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9850 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9851 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9852 return fold_build2 (BIT_AND_EXPR, type,
9853 TREE_OPERAND (tem, 0), arg1);
9856 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9857 is probably obsolete because the first operand should be a
9858 truth value (that's why we have the two cases above), but let's
9859 leave it in until we can confirm this for all front-ends. */
9860 if (integer_zerop (op2)
9861 && TREE_CODE (arg0) == NE_EXPR
9862 && integer_zerop (TREE_OPERAND (arg0, 1))
9863 && integer_pow2p (arg1)
9864 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9865 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9866 arg1, OEP_ONLY_CONST))
9867 return pedantic_non_lvalue (fold_convert (type,
9868 TREE_OPERAND (arg0, 0)));
9870 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9871 if (integer_zerop (op2)
9872 && truth_value_p (TREE_CODE (arg0))
9873 && truth_value_p (TREE_CODE (arg1)))
9874 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
9876 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9877 if (integer_onep (op2)
9878 && truth_value_p (TREE_CODE (arg0))
9879 && truth_value_p (TREE_CODE (arg1)))
9881 /* Only perform transformation if ARG0 is easily inverted. */
9882 tem = invert_truthvalue (arg0);
9883 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9884 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
9887 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9888 if (integer_zerop (arg1)
9889 && truth_value_p (TREE_CODE (arg0))
9890 && truth_value_p (TREE_CODE (op2)))
9892 /* Only perform transformation if ARG0 is easily inverted. */
9893 tem = invert_truthvalue (arg0);
9894 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9895 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
9898 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9899 if (integer_onep (arg1)
9900 && truth_value_p (TREE_CODE (arg0))
9901 && truth_value_p (TREE_CODE (op2)))
9902 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
9904 return NULL_TREE;
9906 case CALL_EXPR:
9907 /* Check for a built-in function. */
9908 if (TREE_CODE (op0) == ADDR_EXPR
9909 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
9910 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
9912 tree fndecl = TREE_OPERAND (op0, 0);
9913 tree arglist = op1;
9914 tree tmp = fold_builtin (fndecl, arglist, false);
9915 if (tmp)
9916 return tmp;
9918 return NULL_TREE;
9920 default:
9921 return NULL_TREE;
9922 } /* switch (code) */
9925 /* Perform constant folding and related simplification of EXPR.
9926 The related simplifications include x*1 => x, x*0 => 0, etc.,
9927 and application of the associative law.
9928 NOP_EXPR conversions may be removed freely (as long as we
9929 are careful not to change the type of the overall expression).
9930 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
9931 but we can constant-fold them if they have constant operands. */
9933 #ifdef ENABLE_FOLD_CHECKING
9934 # define fold(x) fold_1 (x)
9935 static tree fold_1 (tree);
9936 static
9937 #endif
9938 tree
9939 fold (tree expr)
9941 const tree t = expr;
9942 enum tree_code code = TREE_CODE (t);
9943 enum tree_code_class kind = TREE_CODE_CLASS (code);
9944 tree tem;
9946 /* Return right away if a constant. */
9947 if (kind == tcc_constant)
9948 return t;
9950 if (IS_EXPR_CODE_CLASS (kind))
9952 tree type = TREE_TYPE (t);
9953 tree op0, op1, op2;
9955 switch (TREE_CODE_LENGTH (code))
9957 case 1:
9958 op0 = TREE_OPERAND (t, 0);
9959 tem = fold_unary (code, type, op0);
9960 return tem ? tem : expr;
9961 case 2:
9962 op0 = TREE_OPERAND (t, 0);
9963 op1 = TREE_OPERAND (t, 1);
9964 tem = fold_binary (code, type, op0, op1);
9965 return tem ? tem : expr;
9966 case 3:
9967 op0 = TREE_OPERAND (t, 0);
9968 op1 = TREE_OPERAND (t, 1);
9969 op2 = TREE_OPERAND (t, 2);
9970 tem = fold_ternary (code, type, op0, op1, op2);
9971 return tem ? tem : expr;
9972 default:
9973 break;
9977 switch (code)
9979 case CONST_DECL:
9980 return fold (DECL_INITIAL (t));
9982 default:
9983 return t;
9984 } /* switch (code) */
9987 #ifdef ENABLE_FOLD_CHECKING
9988 #undef fold
9990 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9991 static void fold_check_failed (tree, tree);
9992 void print_fold_checksum (tree);
9994 /* When --enable-checking=fold, compute a digest of expr before
9995 and after actual fold call to see if fold did not accidentally
9996 change original expr. */
9998 tree
9999 fold (tree expr)
10001 tree ret;
10002 struct md5_ctx ctx;
10003 unsigned char checksum_before[16], checksum_after[16];
10004 htab_t ht;
10006 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10007 md5_init_ctx (&ctx);
10008 fold_checksum_tree (expr, &ctx, ht);
10009 md5_finish_ctx (&ctx, checksum_before);
10010 htab_empty (ht);
10012 ret = fold_1 (expr);
10014 md5_init_ctx (&ctx);
10015 fold_checksum_tree (expr, &ctx, ht);
10016 md5_finish_ctx (&ctx, checksum_after);
10017 htab_delete (ht);
10019 if (memcmp (checksum_before, checksum_after, 16))
10020 fold_check_failed (expr, ret);
10022 return ret;
10025 void
10026 print_fold_checksum (tree expr)
10028 struct md5_ctx ctx;
10029 unsigned char checksum[16], cnt;
10030 htab_t ht;
10032 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10033 md5_init_ctx (&ctx);
10034 fold_checksum_tree (expr, &ctx, ht);
10035 md5_finish_ctx (&ctx, checksum);
10036 htab_delete (ht);
10037 for (cnt = 0; cnt < 16; ++cnt)
10038 fprintf (stderr, "%02x", checksum[cnt]);
10039 putc ('\n', stderr);
10042 static void
10043 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10045 internal_error ("fold check: original tree changed by fold");
10048 static void
10049 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10051 void **slot;
10052 enum tree_code code;
10053 char buf[sizeof (struct tree_decl)];
10054 int i, len;
10056 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10057 <= sizeof (struct tree_decl))
10058 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10059 if (expr == NULL)
10060 return;
10061 slot = htab_find_slot (ht, expr, INSERT);
10062 if (*slot != NULL)
10063 return;
10064 *slot = expr;
10065 code = TREE_CODE (expr);
10066 if (TREE_CODE_CLASS (code) == tcc_declaration
10067 && DECL_ASSEMBLER_NAME_SET_P (expr))
10069 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10070 memcpy (buf, expr, tree_size (expr));
10071 expr = (tree) buf;
10072 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10074 else if (TREE_CODE_CLASS (code) == tcc_type
10075 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10076 || TYPE_CACHED_VALUES_P (expr)))
10078 /* Allow these fields to be modified. */
10079 memcpy (buf, expr, tree_size (expr));
10080 expr = (tree) buf;
10081 TYPE_POINTER_TO (expr) = NULL;
10082 TYPE_REFERENCE_TO (expr) = NULL;
10083 if (TYPE_CACHED_VALUES_P (expr))
10085 TYPE_CACHED_VALUES_P (expr) = 0;
10086 TYPE_CACHED_VALUES (expr) = NULL;
10089 md5_process_bytes (expr, tree_size (expr), ctx);
10090 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10091 if (TREE_CODE_CLASS (code) != tcc_type
10092 && TREE_CODE_CLASS (code) != tcc_declaration)
10093 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10094 switch (TREE_CODE_CLASS (code))
10096 case tcc_constant:
10097 switch (code)
10099 case STRING_CST:
10100 md5_process_bytes (TREE_STRING_POINTER (expr),
10101 TREE_STRING_LENGTH (expr), ctx);
10102 break;
10103 case COMPLEX_CST:
10104 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10105 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10106 break;
10107 case VECTOR_CST:
10108 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10109 break;
10110 default:
10111 break;
10113 break;
10114 case tcc_exceptional:
10115 switch (code)
10117 case TREE_LIST:
10118 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10119 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10120 break;
10121 case TREE_VEC:
10122 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10123 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10124 break;
10125 default:
10126 break;
10128 break;
10129 case tcc_expression:
10130 case tcc_reference:
10131 case tcc_comparison:
10132 case tcc_unary:
10133 case tcc_binary:
10134 case tcc_statement:
10135 len = TREE_CODE_LENGTH (code);
10136 for (i = 0; i < len; ++i)
10137 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10138 break;
10139 case tcc_declaration:
10140 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10141 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10142 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10143 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10144 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10145 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10146 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10147 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10148 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10149 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10150 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10151 break;
10152 case tcc_type:
10153 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10154 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10155 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10156 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10157 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10158 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10159 if (INTEGRAL_TYPE_P (expr)
10160 || SCALAR_FLOAT_TYPE_P (expr))
10162 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10163 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10165 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10166 if (TREE_CODE (expr) == RECORD_TYPE
10167 || TREE_CODE (expr) == UNION_TYPE
10168 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10169 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10170 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10171 break;
10172 default:
10173 break;
10177 #endif
10179 /* Fold a unary tree expression with code CODE of type TYPE with an
10180 operand OP0. Return a folded expresion if successful. Otherwise,
10181 return a tree expression with code CODE of type TYPE with an
10182 operand OP0. */
10184 tree
10185 fold_build1 (enum tree_code code, tree type, tree op0)
10187 tree tem = fold_unary (code, type, op0);
10188 if (tem)
10189 return tem;
10191 return build1 (code, type, op0);
10194 /* Fold a binary tree expression with code CODE of type TYPE with
10195 operands OP0 and OP1. Return a folded expresion if successful.
10196 Otherwise, return a tree expression with code CODE of type TYPE
10197 with operands OP0 and OP1. */
10199 tree
10200 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10202 tree tem = fold_binary (code, type, op0, op1);
10203 if (tem)
10204 return tem;
10206 return build2 (code, type, op0, op1);
10209 /* Fold a ternary tree expression with code CODE of type TYPE with
10210 operands OP0, OP1, and OP2. Return a folded expresion if
10211 successful. Otherwise, return a tree expression with code CODE of
10212 type TYPE with operands OP0, OP1, and OP2. */
10214 tree
10215 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10217 tree tem = fold_ternary (code, type, op0, op1, op2);
10218 if (tem)
10219 return tem;
10221 return build3 (code, type, op0, op1, op2);
10224 /* Perform constant folding and related simplification of initializer
10225 expression EXPR. This behaves identically to "fold" but ignores
10226 potential run-time traps and exceptions that fold must preserve. */
10228 tree
10229 fold_initializer (tree expr)
10231 int saved_signaling_nans = flag_signaling_nans;
10232 int saved_trapping_math = flag_trapping_math;
10233 int saved_rounding_math = flag_rounding_math;
10234 int saved_trapv = flag_trapv;
10235 tree result;
10237 flag_signaling_nans = 0;
10238 flag_trapping_math = 0;
10239 flag_rounding_math = 0;
10240 flag_trapv = 0;
10242 result = fold (expr);
10244 flag_signaling_nans = saved_signaling_nans;
10245 flag_trapping_math = saved_trapping_math;
10246 flag_rounding_math = saved_rounding_math;
10247 flag_trapv = saved_trapv;
10249 return result;
10252 /* Determine if first argument is a multiple of second argument. Return 0 if
10253 it is not, or we cannot easily determined it to be.
10255 An example of the sort of thing we care about (at this point; this routine
10256 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10257 fold cases do now) is discovering that
10259 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10261 is a multiple of
10263 SAVE_EXPR (J * 8)
10265 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10267 This code also handles discovering that
10269 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10271 is a multiple of 8 so we don't have to worry about dealing with a
10272 possible remainder.
10274 Note that we *look* inside a SAVE_EXPR only to determine how it was
10275 calculated; it is not safe for fold to do much of anything else with the
10276 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10277 at run time. For example, the latter example above *cannot* be implemented
10278 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10279 evaluation time of the original SAVE_EXPR is not necessarily the same at
10280 the time the new expression is evaluated. The only optimization of this
10281 sort that would be valid is changing
10283 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10285 divided by 8 to
10287 SAVE_EXPR (I) * SAVE_EXPR (J)
10289 (where the same SAVE_EXPR (J) is used in the original and the
10290 transformed version). */
10292 static int
10293 multiple_of_p (tree type, tree top, tree bottom)
10295 if (operand_equal_p (top, bottom, 0))
10296 return 1;
10298 if (TREE_CODE (type) != INTEGER_TYPE)
10299 return 0;
10301 switch (TREE_CODE (top))
10303 case BIT_AND_EXPR:
10304 /* Bitwise and provides a power of two multiple. If the mask is
10305 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10306 if (!integer_pow2p (bottom))
10307 return 0;
10308 /* FALLTHRU */
10310 case MULT_EXPR:
10311 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10312 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10314 case PLUS_EXPR:
10315 case MINUS_EXPR:
10316 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10317 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10319 case LSHIFT_EXPR:
10320 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10322 tree op1, t1;
10324 op1 = TREE_OPERAND (top, 1);
10325 /* const_binop may not detect overflow correctly,
10326 so check for it explicitly here. */
10327 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10328 > TREE_INT_CST_LOW (op1)
10329 && TREE_INT_CST_HIGH (op1) == 0
10330 && 0 != (t1 = fold_convert (type,
10331 const_binop (LSHIFT_EXPR,
10332 size_one_node,
10333 op1, 0)))
10334 && ! TREE_OVERFLOW (t1))
10335 return multiple_of_p (type, t1, bottom);
10337 return 0;
10339 case NOP_EXPR:
10340 /* Can't handle conversions from non-integral or wider integral type. */
10341 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10342 || (TYPE_PRECISION (type)
10343 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10344 return 0;
10346 /* .. fall through ... */
10348 case SAVE_EXPR:
10349 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10351 case INTEGER_CST:
10352 if (TREE_CODE (bottom) != INTEGER_CST
10353 || (TYPE_UNSIGNED (type)
10354 && (tree_int_cst_sgn (top) < 0
10355 || tree_int_cst_sgn (bottom) < 0)))
10356 return 0;
10357 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10358 top, bottom, 0));
10360 default:
10361 return 0;
10365 /* Return true if `t' is known to be non-negative. */
10368 tree_expr_nonnegative_p (tree t)
10370 switch (TREE_CODE (t))
10372 case ABS_EXPR:
10373 return 1;
10375 case INTEGER_CST:
10376 return tree_int_cst_sgn (t) >= 0;
10378 case REAL_CST:
10379 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10381 case PLUS_EXPR:
10382 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10383 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10384 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10386 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10387 both unsigned and at least 2 bits shorter than the result. */
10388 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10389 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10390 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10392 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10393 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10394 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10395 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10397 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10398 TYPE_PRECISION (inner2)) + 1;
10399 return prec < TYPE_PRECISION (TREE_TYPE (t));
10402 break;
10404 case MULT_EXPR:
10405 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10407 /* x * x for floating point x is always non-negative. */
10408 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10409 return 1;
10410 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10411 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10414 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10415 both unsigned and their total bits is shorter than the result. */
10416 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10417 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10418 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10420 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10421 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10422 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10423 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10424 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10425 < TYPE_PRECISION (TREE_TYPE (t));
10427 return 0;
10429 case TRUNC_DIV_EXPR:
10430 case CEIL_DIV_EXPR:
10431 case FLOOR_DIV_EXPR:
10432 case ROUND_DIV_EXPR:
10433 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10434 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10436 case TRUNC_MOD_EXPR:
10437 case CEIL_MOD_EXPR:
10438 case FLOOR_MOD_EXPR:
10439 case ROUND_MOD_EXPR:
10440 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10442 case RDIV_EXPR:
10443 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10444 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10446 case BIT_AND_EXPR:
10447 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10448 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10449 case BIT_IOR_EXPR:
10450 case BIT_XOR_EXPR:
10451 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10452 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10454 case NOP_EXPR:
10456 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10457 tree outer_type = TREE_TYPE (t);
10459 if (TREE_CODE (outer_type) == REAL_TYPE)
10461 if (TREE_CODE (inner_type) == REAL_TYPE)
10462 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10463 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10465 if (TYPE_UNSIGNED (inner_type))
10466 return 1;
10467 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10470 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10472 if (TREE_CODE (inner_type) == REAL_TYPE)
10473 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10474 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10475 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10476 && TYPE_UNSIGNED (inner_type);
10479 break;
10481 case COND_EXPR:
10482 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10483 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10484 case COMPOUND_EXPR:
10485 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10486 case MIN_EXPR:
10487 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10488 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10489 case MAX_EXPR:
10490 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10491 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10492 case MODIFY_EXPR:
10493 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10494 case BIND_EXPR:
10495 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10496 case SAVE_EXPR:
10497 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10498 case NON_LVALUE_EXPR:
10499 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10500 case FLOAT_EXPR:
10501 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10503 case TARGET_EXPR:
10505 tree temp = TARGET_EXPR_SLOT (t);
10506 t = TARGET_EXPR_INITIAL (t);
10508 /* If the initializer is non-void, then it's a normal expression
10509 that will be assigned to the slot. */
10510 if (!VOID_TYPE_P (t))
10511 return tree_expr_nonnegative_p (t);
10513 /* Otherwise, the initializer sets the slot in some way. One common
10514 way is an assignment statement at the end of the initializer. */
10515 while (1)
10517 if (TREE_CODE (t) == BIND_EXPR)
10518 t = expr_last (BIND_EXPR_BODY (t));
10519 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10520 || TREE_CODE (t) == TRY_CATCH_EXPR)
10521 t = expr_last (TREE_OPERAND (t, 0));
10522 else if (TREE_CODE (t) == STATEMENT_LIST)
10523 t = expr_last (t);
10524 else
10525 break;
10527 if (TREE_CODE (t) == MODIFY_EXPR
10528 && TREE_OPERAND (t, 0) == temp)
10529 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10531 return 0;
10534 case CALL_EXPR:
10536 tree fndecl = get_callee_fndecl (t);
10537 tree arglist = TREE_OPERAND (t, 1);
10538 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10539 switch (DECL_FUNCTION_CODE (fndecl))
10541 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10542 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10543 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10544 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10546 CASE_BUILTIN_F (BUILT_IN_ACOS)
10547 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10548 CASE_BUILTIN_F (BUILT_IN_CABS)
10549 CASE_BUILTIN_F (BUILT_IN_COSH)
10550 CASE_BUILTIN_F (BUILT_IN_ERFC)
10551 CASE_BUILTIN_F (BUILT_IN_EXP)
10552 CASE_BUILTIN_F (BUILT_IN_EXP10)
10553 CASE_BUILTIN_F (BUILT_IN_EXP2)
10554 CASE_BUILTIN_F (BUILT_IN_FABS)
10555 CASE_BUILTIN_F (BUILT_IN_FDIM)
10556 CASE_BUILTIN_F (BUILT_IN_FREXP)
10557 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10558 CASE_BUILTIN_F (BUILT_IN_POW10)
10559 CASE_BUILTIN_I (BUILT_IN_FFS)
10560 CASE_BUILTIN_I (BUILT_IN_PARITY)
10561 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10562 /* Always true. */
10563 return 1;
10565 CASE_BUILTIN_F (BUILT_IN_SQRT)
10566 /* sqrt(-0.0) is -0.0. */
10567 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10568 return 1;
10569 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10571 CASE_BUILTIN_F (BUILT_IN_ASINH)
10572 CASE_BUILTIN_F (BUILT_IN_ATAN)
10573 CASE_BUILTIN_F (BUILT_IN_ATANH)
10574 CASE_BUILTIN_F (BUILT_IN_CBRT)
10575 CASE_BUILTIN_F (BUILT_IN_CEIL)
10576 CASE_BUILTIN_F (BUILT_IN_ERF)
10577 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10578 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10579 CASE_BUILTIN_F (BUILT_IN_FMOD)
10580 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10581 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10582 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10583 CASE_BUILTIN_F (BUILT_IN_LRINT)
10584 CASE_BUILTIN_F (BUILT_IN_LROUND)
10585 CASE_BUILTIN_F (BUILT_IN_MODF)
10586 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10587 CASE_BUILTIN_F (BUILT_IN_POW)
10588 CASE_BUILTIN_F (BUILT_IN_RINT)
10589 CASE_BUILTIN_F (BUILT_IN_ROUND)
10590 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10591 CASE_BUILTIN_F (BUILT_IN_SINH)
10592 CASE_BUILTIN_F (BUILT_IN_TANH)
10593 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10594 /* True if the 1st argument is nonnegative. */
10595 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10597 CASE_BUILTIN_F (BUILT_IN_FMAX)
10598 /* True if the 1st OR 2nd arguments are nonnegative. */
10599 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10600 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10602 CASE_BUILTIN_F (BUILT_IN_FMIN)
10603 /* True if the 1st AND 2nd arguments are nonnegative. */
10604 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10605 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10607 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10608 /* True if the 2nd argument is nonnegative. */
10609 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10611 default:
10612 break;
10613 #undef CASE_BUILTIN_F
10614 #undef CASE_BUILTIN_I
10618 /* ... fall through ... */
10620 default:
10621 if (truth_value_p (TREE_CODE (t)))
10622 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10623 return 1;
10626 /* We don't know sign of `t', so be conservative and return false. */
10627 return 0;
10630 /* Return true when T is an address and is known to be nonzero.
10631 For floating point we further ensure that T is not denormal.
10632 Similar logic is present in nonzero_address in rtlanal.h. */
10634 static bool
10635 tree_expr_nonzero_p (tree t)
10637 tree type = TREE_TYPE (t);
10639 /* Doing something useful for floating point would need more work. */
10640 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10641 return false;
10643 switch (TREE_CODE (t))
10645 case ABS_EXPR:
10646 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10647 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10649 case INTEGER_CST:
10650 /* We used to test for !integer_zerop here. This does not work correctly
10651 if TREE_CONSTANT_OVERFLOW (t). */
10652 return (TREE_INT_CST_LOW (t) != 0
10653 || TREE_INT_CST_HIGH (t) != 0);
10655 case PLUS_EXPR:
10656 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10658 /* With the presence of negative values it is hard
10659 to say something. */
10660 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10661 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10662 return false;
10663 /* One of operands must be positive and the other non-negative. */
10664 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10665 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10667 break;
10669 case MULT_EXPR:
10670 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10672 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10673 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10675 break;
10677 case NOP_EXPR:
10679 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10680 tree outer_type = TREE_TYPE (t);
10682 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10683 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10685 break;
10687 case ADDR_EXPR:
10689 tree base = get_base_address (TREE_OPERAND (t, 0));
10691 if (!base)
10692 return false;
10694 /* Weak declarations may link to NULL. */
10695 if (DECL_P (base))
10696 return !DECL_WEAK (base);
10698 /* Constants are never weak. */
10699 if (CONSTANT_CLASS_P (base))
10700 return true;
10702 return false;
10705 case COND_EXPR:
10706 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10707 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10709 case MIN_EXPR:
10710 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10711 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10713 case MAX_EXPR:
10714 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10716 /* When both operands are nonzero, then MAX must be too. */
10717 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10718 return true;
10720 /* MAX where operand 0 is positive is positive. */
10721 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10723 /* MAX where operand 1 is positive is positive. */
10724 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10725 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10726 return true;
10727 break;
10729 case COMPOUND_EXPR:
10730 case MODIFY_EXPR:
10731 case BIND_EXPR:
10732 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10734 case SAVE_EXPR:
10735 case NON_LVALUE_EXPR:
10736 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10738 case BIT_IOR_EXPR:
10739 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10740 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10742 default:
10743 break;
10745 return false;
10748 /* See if we are applying CODE, a relational to the highest or lowest
10749 possible integer of TYPE. If so, then the result is a compile
10750 time constant. */
10752 static tree
10753 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
10754 tree *op1_p)
10756 tree op0 = *op0_p;
10757 tree op1 = *op1_p;
10758 enum tree_code code = *code_p;
10759 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
10761 if (TREE_CODE (op1) == INTEGER_CST
10762 && ! TREE_CONSTANT_OVERFLOW (op1)
10763 && width <= HOST_BITS_PER_WIDE_INT
10764 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
10765 || POINTER_TYPE_P (TREE_TYPE (op1))))
10767 unsigned HOST_WIDE_INT signed_max;
10768 unsigned HOST_WIDE_INT max, min;
10770 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10772 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10774 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10775 min = 0;
10777 else
10779 max = signed_max;
10780 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10783 if (TREE_INT_CST_HIGH (op1) == 0
10784 && TREE_INT_CST_LOW (op1) == max)
10785 switch (code)
10787 case GT_EXPR:
10788 return omit_one_operand (type, integer_zero_node, op0);
10790 case GE_EXPR:
10791 *code_p = EQ_EXPR;
10792 break;
10793 case LE_EXPR:
10794 return omit_one_operand (type, integer_one_node, op0);
10796 case LT_EXPR:
10797 *code_p = NE_EXPR;
10798 break;
10800 /* The GE_EXPR and LT_EXPR cases above are not normally
10801 reached because of previous transformations. */
10803 default:
10804 break;
10806 else if (TREE_INT_CST_HIGH (op1) == 0
10807 && TREE_INT_CST_LOW (op1) == max - 1)
10808 switch (code)
10810 case GT_EXPR:
10811 *code_p = EQ_EXPR;
10812 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10813 break;
10814 case LE_EXPR:
10815 *code_p = NE_EXPR;
10816 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10817 break;
10818 default:
10819 break;
10821 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10822 && TREE_INT_CST_LOW (op1) == min)
10823 switch (code)
10825 case LT_EXPR:
10826 return omit_one_operand (type, integer_zero_node, op0);
10828 case LE_EXPR:
10829 *code_p = EQ_EXPR;
10830 break;
10832 case GE_EXPR:
10833 return omit_one_operand (type, integer_one_node, op0);
10835 case GT_EXPR:
10836 *code_p = NE_EXPR;
10837 break;
10839 default:
10840 break;
10842 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10843 && TREE_INT_CST_LOW (op1) == min + 1)
10844 switch (code)
10846 case GE_EXPR:
10847 *code_p = NE_EXPR;
10848 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10849 break;
10850 case LT_EXPR:
10851 *code_p = EQ_EXPR;
10852 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10853 break;
10854 default:
10855 break;
10858 else if (TREE_INT_CST_HIGH (op1) == 0
10859 && TREE_INT_CST_LOW (op1) == signed_max
10860 && TYPE_UNSIGNED (TREE_TYPE (op1))
10861 /* signed_type does not work on pointer types. */
10862 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10864 /* The following case also applies to X < signed_max+1
10865 and X >= signed_max+1 because previous transformations. */
10866 if (code == LE_EXPR || code == GT_EXPR)
10868 tree st0, st1, exp, retval;
10869 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10870 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10872 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10873 type,
10874 fold_convert (st0, op0),
10875 fold_convert (st1, integer_zero_node));
10877 retval = fold_binary_to_constant (TREE_CODE (exp),
10878 TREE_TYPE (exp),
10879 TREE_OPERAND (exp, 0),
10880 TREE_OPERAND (exp, 1));
10882 /* If we are in gimple form, then returning EXP would create
10883 non-gimple expressions. Clearing it is safe and insures
10884 we do not allow a non-gimple expression to escape. */
10885 if (in_gimple_form)
10886 exp = NULL;
10888 return (retval ? retval : exp);
10893 return NULL_TREE;
10897 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10898 attempt to fold the expression to a constant without modifying TYPE,
10899 OP0 or OP1.
10901 If the expression could be simplified to a constant, then return
10902 the constant. If the expression would not be simplified to a
10903 constant, then return NULL_TREE.
10905 Note this is primarily designed to be called after gimplification
10906 of the tree structures and when at least one operand is a constant.
10907 As a result of those simplifying assumptions this routine is far
10908 simpler than the generic fold routine. */
10910 tree
10911 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10913 int wins = 1;
10914 tree subop0;
10915 tree subop1;
10916 tree tem;
10918 /* If this is a commutative operation, and ARG0 is a constant, move it
10919 to ARG1 to reduce the number of tests below. */
10920 if (commutative_tree_code (code)
10921 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10923 tem = op0;
10924 op0 = op1;
10925 op1 = tem;
10928 /* If either operand is a complex type, extract its real component. */
10929 if (TREE_CODE (op0) == COMPLEX_CST)
10930 subop0 = TREE_REALPART (op0);
10931 else
10932 subop0 = op0;
10934 if (TREE_CODE (op1) == COMPLEX_CST)
10935 subop1 = TREE_REALPART (op1);
10936 else
10937 subop1 = op1;
10939 /* Note if either argument is not a real or integer constant.
10940 With a few exceptions, simplification is limited to cases
10941 where both arguments are constants. */
10942 if ((TREE_CODE (subop0) != INTEGER_CST
10943 && TREE_CODE (subop0) != REAL_CST)
10944 || (TREE_CODE (subop1) != INTEGER_CST
10945 && TREE_CODE (subop1) != REAL_CST))
10946 wins = 0;
10948 switch (code)
10950 case PLUS_EXPR:
10951 /* (plus (address) (const_int)) is a constant. */
10952 if (TREE_CODE (op0) == PLUS_EXPR
10953 && TREE_CODE (op1) == INTEGER_CST
10954 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10955 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10956 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10957 == ADDR_EXPR)))
10958 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10960 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10961 const_binop (PLUS_EXPR, op1,
10962 TREE_OPERAND (op0, 1), 0));
10964 case BIT_XOR_EXPR:
10966 binary:
10967 if (!wins)
10968 return NULL_TREE;
10970 /* Both arguments are constants. Simplify. */
10971 tem = const_binop (code, op0, op1, 0);
10972 if (tem != NULL_TREE)
10974 /* The return value should always have the same type as
10975 the original expression. */
10976 if (TREE_TYPE (tem) != type)
10977 tem = fold_convert (type, tem);
10979 return tem;
10981 return NULL_TREE;
10983 case MINUS_EXPR:
10984 /* Fold &x - &x. This can happen from &x.foo - &x.
10985 This is unsafe for certain floats even in non-IEEE formats.
10986 In IEEE, it is unsafe because it does wrong for NaNs.
10987 Also note that operand_equal_p is always false if an
10988 operand is volatile. */
10989 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10990 return fold_convert (type, integer_zero_node);
10992 goto binary;
10994 case MULT_EXPR:
10995 case BIT_AND_EXPR:
10996 /* Special case multiplication or bitwise AND where one argument
10997 is zero. */
10998 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10999 return omit_one_operand (type, op1, op0);
11000 else
11001 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
11002 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
11003 && real_zerop (op1))
11004 return omit_one_operand (type, op1, op0);
11006 goto binary;
11008 case BIT_IOR_EXPR:
11009 /* Special case when we know the result will be all ones. */
11010 if (integer_all_onesp (op1))
11011 return omit_one_operand (type, op1, op0);
11013 goto binary;
11015 case TRUNC_DIV_EXPR:
11016 case ROUND_DIV_EXPR:
11017 case FLOOR_DIV_EXPR:
11018 case CEIL_DIV_EXPR:
11019 case EXACT_DIV_EXPR:
11020 case TRUNC_MOD_EXPR:
11021 case ROUND_MOD_EXPR:
11022 case FLOOR_MOD_EXPR:
11023 case CEIL_MOD_EXPR:
11024 case RDIV_EXPR:
11025 /* Division by zero is undefined. */
11026 if (integer_zerop (op1))
11027 return NULL_TREE;
11029 if (TREE_CODE (op1) == REAL_CST
11030 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
11031 && real_zerop (op1))
11032 return NULL_TREE;
11034 goto binary;
11036 case MIN_EXPR:
11037 if (INTEGRAL_TYPE_P (type)
11038 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11039 return omit_one_operand (type, op1, op0);
11041 goto binary;
11043 case MAX_EXPR:
11044 if (INTEGRAL_TYPE_P (type)
11045 && TYPE_MAX_VALUE (type)
11046 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11047 return omit_one_operand (type, op1, op0);
11049 goto binary;
11051 case RSHIFT_EXPR:
11052 /* Optimize -1 >> x for arithmetic right shifts. */
11053 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
11054 return omit_one_operand (type, op0, op1);
11055 /* ... fall through ... */
11057 case LSHIFT_EXPR:
11058 if (integer_zerop (op0))
11059 return omit_one_operand (type, op0, op1);
11061 /* Since negative shift count is not well-defined, don't
11062 try to compute it in the compiler. */
11063 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
11064 return NULL_TREE;
11066 goto binary;
11068 case LROTATE_EXPR:
11069 case RROTATE_EXPR:
11070 /* -1 rotated either direction by any amount is still -1. */
11071 if (integer_all_onesp (op0))
11072 return omit_one_operand (type, op0, op1);
11074 /* 0 rotated either direction by any amount is still zero. */
11075 if (integer_zerop (op0))
11076 return omit_one_operand (type, op0, op1);
11078 goto binary;
11080 case COMPLEX_EXPR:
11081 if (wins)
11082 return build_complex (type, op0, op1);
11083 return NULL_TREE;
11085 case LT_EXPR:
11086 case LE_EXPR:
11087 case GT_EXPR:
11088 case GE_EXPR:
11089 case EQ_EXPR:
11090 case NE_EXPR:
11091 /* If one arg is a real or integer constant, put it last. */
11092 if ((TREE_CODE (op0) == INTEGER_CST
11093 && TREE_CODE (op1) != INTEGER_CST)
11094 || (TREE_CODE (op0) == REAL_CST
11095 && TREE_CODE (op0) != REAL_CST))
11097 tree temp;
11099 temp = op0;
11100 op0 = op1;
11101 op1 = temp;
11102 code = swap_tree_comparison (code);
11105 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11106 This transformation affects the cases which are handled in later
11107 optimizations involving comparisons with non-negative constants. */
11108 if (TREE_CODE (op1) == INTEGER_CST
11109 && TREE_CODE (op0) != INTEGER_CST
11110 && tree_int_cst_sgn (op1) > 0)
11112 switch (code)
11114 case GE_EXPR:
11115 code = GT_EXPR;
11116 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11117 break;
11119 case LT_EXPR:
11120 code = LE_EXPR;
11121 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
11122 break;
11124 default:
11125 break;
11129 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
11130 if (tem)
11131 return tem;
11133 /* Fall through. */
11135 case ORDERED_EXPR:
11136 case UNORDERED_EXPR:
11137 case UNLT_EXPR:
11138 case UNLE_EXPR:
11139 case UNGT_EXPR:
11140 case UNGE_EXPR:
11141 case UNEQ_EXPR:
11142 case LTGT_EXPR:
11143 if (!wins)
11144 return NULL_TREE;
11146 return fold_relational_const (code, type, op0, op1);
11148 case RANGE_EXPR:
11149 /* This could probably be handled. */
11150 return NULL_TREE;
11152 case TRUTH_AND_EXPR:
11153 /* If second arg is constant zero, result is zero, but first arg
11154 must be evaluated. */
11155 if (integer_zerop (op1))
11156 return omit_one_operand (type, op1, op0);
11157 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11158 case will be handled here. */
11159 if (integer_zerop (op0))
11160 return omit_one_operand (type, op0, op1);
11161 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11162 return constant_boolean_node (true, type);
11163 return NULL_TREE;
11165 case TRUTH_OR_EXPR:
11166 /* If second arg is constant true, result is true, but we must
11167 evaluate first arg. */
11168 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
11169 return omit_one_operand (type, op1, op0);
11170 /* Likewise for first arg, but note this only occurs here for
11171 TRUTH_OR_EXPR. */
11172 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
11173 return omit_one_operand (type, op0, op1);
11174 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11175 return constant_boolean_node (false, type);
11176 return NULL_TREE;
11178 case TRUTH_XOR_EXPR:
11179 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11181 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
11182 return constant_boolean_node (x, type);
11184 return NULL_TREE;
11186 default:
11187 return NULL_TREE;
11191 /* Given the components of a unary expression CODE, TYPE and OP0,
11192 attempt to fold the expression to a constant without modifying
11193 TYPE or OP0.
11195 If the expression could be simplified to a constant, then return
11196 the constant. If the expression would not be simplified to a
11197 constant, then return NULL_TREE.
11199 Note this is primarily designed to be called after gimplification
11200 of the tree structures and when op0 is a constant. As a result
11201 of those simplifying assumptions this routine is far simpler than
11202 the generic fold routine. */
11204 tree
11205 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11207 /* Make sure we have a suitable constant argument. */
11208 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
11210 tree subop;
11212 if (TREE_CODE (op0) == COMPLEX_CST)
11213 subop = TREE_REALPART (op0);
11214 else
11215 subop = op0;
11217 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
11218 return NULL_TREE;
11221 switch (code)
11223 case NOP_EXPR:
11224 case FLOAT_EXPR:
11225 case CONVERT_EXPR:
11226 case FIX_TRUNC_EXPR:
11227 case FIX_FLOOR_EXPR:
11228 case FIX_CEIL_EXPR:
11229 case FIX_ROUND_EXPR:
11230 return fold_convert_const (code, type, op0);
11232 case NEGATE_EXPR:
11233 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11234 return fold_negate_const (op0, type);
11235 else
11236 return NULL_TREE;
11238 case ABS_EXPR:
11239 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
11240 return fold_abs_const (op0, type);
11241 else
11242 return NULL_TREE;
11244 case BIT_NOT_EXPR:
11245 if (TREE_CODE (op0) == INTEGER_CST)
11246 return fold_not_const (op0, type);
11247 else
11248 return NULL_TREE;
11250 case REALPART_EXPR:
11251 if (TREE_CODE (op0) == COMPLEX_CST)
11252 return TREE_REALPART (op0);
11253 else
11254 return NULL_TREE;
11256 case IMAGPART_EXPR:
11257 if (TREE_CODE (op0) == COMPLEX_CST)
11258 return TREE_IMAGPART (op0);
11259 else
11260 return NULL_TREE;
11262 case CONJ_EXPR:
11263 if (TREE_CODE (op0) == COMPLEX_CST
11264 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
11265 return build_complex (type, TREE_REALPART (op0),
11266 negate_expr (TREE_IMAGPART (op0)));
11267 return NULL_TREE;
11269 default:
11270 return NULL_TREE;
11274 /* If EXP represents referencing an element in a constant string
11275 (either via pointer arithmetic or array indexing), return the
11276 tree representing the value accessed, otherwise return NULL. */
11278 tree
11279 fold_read_from_constant_string (tree exp)
11281 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11283 tree exp1 = TREE_OPERAND (exp, 0);
11284 tree index;
11285 tree string;
11287 if (TREE_CODE (exp) == INDIRECT_REF)
11288 string = string_constant (exp1, &index);
11289 else
11291 tree low_bound = array_ref_low_bound (exp);
11292 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11294 /* Optimize the special-case of a zero lower bound.
11296 We convert the low_bound to sizetype to avoid some problems
11297 with constant folding. (E.g. suppose the lower bound is 1,
11298 and its mode is QI. Without the conversion,l (ARRAY
11299 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11300 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11301 if (! integer_zerop (low_bound))
11302 index = size_diffop (index, fold_convert (sizetype, low_bound));
11304 string = exp1;
11307 if (string
11308 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11309 && TREE_CODE (string) == STRING_CST
11310 && TREE_CODE (index) == INTEGER_CST
11311 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11312 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11313 == MODE_INT)
11314 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11315 return fold_convert (TREE_TYPE (exp),
11316 build_int_cst (NULL_TREE,
11317 (TREE_STRING_POINTER (string)
11318 [TREE_INT_CST_LOW (index)])));
11320 return NULL;
11323 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11324 an integer constant or real constant.
11326 TYPE is the type of the result. */
11328 static tree
11329 fold_negate_const (tree arg0, tree type)
11331 tree t = NULL_TREE;
11333 switch (TREE_CODE (arg0))
11335 case INTEGER_CST:
11337 unsigned HOST_WIDE_INT low;
11338 HOST_WIDE_INT high;
11339 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11340 TREE_INT_CST_HIGH (arg0),
11341 &low, &high);
11342 t = build_int_cst_wide (type, low, high);
11343 t = force_fit_type (t, 1,
11344 (overflow | TREE_OVERFLOW (arg0))
11345 && !TYPE_UNSIGNED (type),
11346 TREE_CONSTANT_OVERFLOW (arg0));
11347 break;
11350 case REAL_CST:
11351 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11352 break;
11354 default:
11355 gcc_unreachable ();
11358 return t;
11361 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11362 an integer constant or real constant.
11364 TYPE is the type of the result. */
11366 tree
11367 fold_abs_const (tree arg0, tree type)
11369 tree t = NULL_TREE;
11371 switch (TREE_CODE (arg0))
11373 case INTEGER_CST:
11374 /* If the value is unsigned, then the absolute value is
11375 the same as the ordinary value. */
11376 if (TYPE_UNSIGNED (type))
11377 t = arg0;
11378 /* Similarly, if the value is non-negative. */
11379 else if (INT_CST_LT (integer_minus_one_node, arg0))
11380 t = arg0;
11381 /* If the value is negative, then the absolute value is
11382 its negation. */
11383 else
11385 unsigned HOST_WIDE_INT low;
11386 HOST_WIDE_INT high;
11387 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11388 TREE_INT_CST_HIGH (arg0),
11389 &low, &high);
11390 t = build_int_cst_wide (type, low, high);
11391 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11392 TREE_CONSTANT_OVERFLOW (arg0));
11394 break;
11396 case REAL_CST:
11397 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11398 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11399 else
11400 t = arg0;
11401 break;
11403 default:
11404 gcc_unreachable ();
11407 return t;
11410 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11411 constant. TYPE is the type of the result. */
11413 static tree
11414 fold_not_const (tree arg0, tree type)
11416 tree t = NULL_TREE;
11418 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11420 t = build_int_cst_wide (type,
11421 ~ TREE_INT_CST_LOW (arg0),
11422 ~ TREE_INT_CST_HIGH (arg0));
11423 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11424 TREE_CONSTANT_OVERFLOW (arg0));
11426 return t;
11429 /* Given CODE, a relational operator, the target type, TYPE and two
11430 constant operands OP0 and OP1, return the result of the
11431 relational operation. If the result is not a compile time
11432 constant, then return NULL_TREE. */
11434 static tree
11435 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11437 int result, invert;
11439 /* From here on, the only cases we handle are when the result is
11440 known to be a constant. */
11442 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11444 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11445 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11447 /* Handle the cases where either operand is a NaN. */
11448 if (real_isnan (c0) || real_isnan (c1))
11450 switch (code)
11452 case EQ_EXPR:
11453 case ORDERED_EXPR:
11454 result = 0;
11455 break;
11457 case NE_EXPR:
11458 case UNORDERED_EXPR:
11459 case UNLT_EXPR:
11460 case UNLE_EXPR:
11461 case UNGT_EXPR:
11462 case UNGE_EXPR:
11463 case UNEQ_EXPR:
11464 result = 1;
11465 break;
11467 case LT_EXPR:
11468 case LE_EXPR:
11469 case GT_EXPR:
11470 case GE_EXPR:
11471 case LTGT_EXPR:
11472 if (flag_trapping_math)
11473 return NULL_TREE;
11474 result = 0;
11475 break;
11477 default:
11478 gcc_unreachable ();
11481 return constant_boolean_node (result, type);
11484 return constant_boolean_node (real_compare (code, c0, c1), type);
11487 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11489 To compute GT, swap the arguments and do LT.
11490 To compute GE, do LT and invert the result.
11491 To compute LE, swap the arguments, do LT and invert the result.
11492 To compute NE, do EQ and invert the result.
11494 Therefore, the code below must handle only EQ and LT. */
11496 if (code == LE_EXPR || code == GT_EXPR)
11498 tree tem = op0;
11499 op0 = op1;
11500 op1 = tem;
11501 code = swap_tree_comparison (code);
11504 /* Note that it is safe to invert for real values here because we
11505 have already handled the one case that it matters. */
11507 invert = 0;
11508 if (code == NE_EXPR || code == GE_EXPR)
11510 invert = 1;
11511 code = invert_tree_comparison (code, false);
11514 /* Compute a result for LT or EQ if args permit;
11515 Otherwise return T. */
11516 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11518 if (code == EQ_EXPR)
11519 result = tree_int_cst_equal (op0, op1);
11520 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11521 result = INT_CST_LT_UNSIGNED (op0, op1);
11522 else
11523 result = INT_CST_LT (op0, op1);
11525 else
11526 return NULL_TREE;
11528 if (invert)
11529 result ^= 1;
11530 return constant_boolean_node (result, type);
11533 /* Build an expression for the a clean point containing EXPR with type TYPE.
11534 Don't build a cleanup point expression for EXPR which don't have side
11535 effects. */
11537 tree
11538 fold_build_cleanup_point_expr (tree type, tree expr)
11540 /* If the expression does not have side effects then we don't have to wrap
11541 it with a cleanup point expression. */
11542 if (!TREE_SIDE_EFFECTS (expr))
11543 return expr;
11545 /* If the expression is a return, check to see if the expression inside the
11546 return has no side effects or the right hand side of the modify expression
11547 inside the return. If either don't have side effects set we don't need to
11548 wrap the expression in a cleanup point expression. Note we don't check the
11549 left hand side of the modify because it should always be a return decl. */
11550 if (TREE_CODE (expr) == RETURN_EXPR)
11552 tree op = TREE_OPERAND (expr, 0);
11553 if (!op || !TREE_SIDE_EFFECTS (op))
11554 return expr;
11555 op = TREE_OPERAND (op, 1);
11556 if (!TREE_SIDE_EFFECTS (op))
11557 return expr;
11560 return build1 (CLEANUP_POINT_EXPR, type, expr);
11563 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11564 avoid confusing the gimplify process. */
11566 tree
11567 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11569 /* The size of the object is not relevant when talking about its address. */
11570 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11571 t = TREE_OPERAND (t, 0);
11573 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11574 if (TREE_CODE (t) == INDIRECT_REF
11575 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11577 t = TREE_OPERAND (t, 0);
11578 if (TREE_TYPE (t) != ptrtype)
11579 t = build1 (NOP_EXPR, ptrtype, t);
11581 else
11583 tree base = t;
11585 while (handled_component_p (base))
11586 base = TREE_OPERAND (base, 0);
11587 if (DECL_P (base))
11588 TREE_ADDRESSABLE (base) = 1;
11590 t = build1 (ADDR_EXPR, ptrtype, t);
11593 return t;
11596 tree
11597 build_fold_addr_expr (tree t)
11599 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11602 /* Given a pointer value T, return a simplified version of an indirection
11603 through T, or NULL_TREE if no simplification is possible. */
11605 static tree
11606 fold_indirect_ref_1 (tree t)
11608 tree type = TREE_TYPE (TREE_TYPE (t));
11609 tree sub = t;
11610 tree subtype;
11612 STRIP_NOPS (sub);
11613 subtype = TREE_TYPE (sub);
11614 if (!POINTER_TYPE_P (subtype))
11615 return NULL_TREE;
11617 if (TREE_CODE (sub) == ADDR_EXPR)
11619 tree op = TREE_OPERAND (sub, 0);
11620 tree optype = TREE_TYPE (op);
11621 /* *&p => p */
11622 if (lang_hooks.types_compatible_p (type, optype))
11623 return op;
11624 /* *(foo *)&fooarray => fooarray[0] */
11625 else if (TREE_CODE (optype) == ARRAY_TYPE
11626 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
11628 tree type_domain = TYPE_DOMAIN (optype);
11629 tree min_val = size_zero_node;
11630 if (type_domain && TYPE_MIN_VALUE (type_domain))
11631 min_val = TYPE_MIN_VALUE (type_domain);
11632 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11636 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11637 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11638 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
11640 tree type_domain;
11641 tree min_val = size_zero_node;
11642 sub = build_fold_indirect_ref (sub);
11643 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11644 if (type_domain && TYPE_MIN_VALUE (type_domain))
11645 min_val = TYPE_MIN_VALUE (type_domain);
11646 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11649 return NULL_TREE;
11652 /* Builds an expression for an indirection through T, simplifying some
11653 cases. */
11655 tree
11656 build_fold_indirect_ref (tree t)
11658 tree sub = fold_indirect_ref_1 (t);
11660 if (sub)
11661 return sub;
11662 else
11663 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
11666 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11668 tree
11669 fold_indirect_ref (tree t)
11671 tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
11673 if (sub)
11674 return sub;
11675 else
11676 return t;
11679 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11680 whose result is ignored. The type of the returned tree need not be
11681 the same as the original expression. */
11683 tree
11684 fold_ignored_result (tree t)
11686 if (!TREE_SIDE_EFFECTS (t))
11687 return integer_zero_node;
11689 for (;;)
11690 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11692 case tcc_unary:
11693 t = TREE_OPERAND (t, 0);
11694 break;
11696 case tcc_binary:
11697 case tcc_comparison:
11698 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11699 t = TREE_OPERAND (t, 0);
11700 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11701 t = TREE_OPERAND (t, 1);
11702 else
11703 return t;
11704 break;
11706 case tcc_expression:
11707 switch (TREE_CODE (t))
11709 case COMPOUND_EXPR:
11710 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11711 return t;
11712 t = TREE_OPERAND (t, 0);
11713 break;
11715 case COND_EXPR:
11716 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11717 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11718 return t;
11719 t = TREE_OPERAND (t, 0);
11720 break;
11722 default:
11723 return t;
11725 break;
11727 default:
11728 return t;
11732 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11733 This can only be applied to objects of a sizetype. */
11735 tree
11736 round_up (tree value, int divisor)
11738 tree div = NULL_TREE;
11740 gcc_assert (divisor > 0);
11741 if (divisor == 1)
11742 return value;
11744 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11745 have to do anything. Only do this when we are not given a const,
11746 because in that case, this check is more expensive than just
11747 doing it. */
11748 if (TREE_CODE (value) != INTEGER_CST)
11750 div = build_int_cst (TREE_TYPE (value), divisor);
11752 if (multiple_of_p (TREE_TYPE (value), value, div))
11753 return value;
11756 /* If divisor is a power of two, simplify this to bit manipulation. */
11757 if (divisor == (divisor & -divisor))
11759 tree t;
11761 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11762 value = size_binop (PLUS_EXPR, value, t);
11763 t = build_int_cst (TREE_TYPE (value), -divisor);
11764 value = size_binop (BIT_AND_EXPR, value, t);
11766 else
11768 if (!div)
11769 div = build_int_cst (TREE_TYPE (value), divisor);
11770 value = size_binop (CEIL_DIV_EXPR, value, div);
11771 value = size_binop (MULT_EXPR, value, div);
11774 return value;
11777 /* Likewise, but round down. */
11779 tree
11780 round_down (tree value, int divisor)
11782 tree div = NULL_TREE;
11784 gcc_assert (divisor > 0);
11785 if (divisor == 1)
11786 return value;
11788 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11789 have to do anything. Only do this when we are not given a const,
11790 because in that case, this check is more expensive than just
11791 doing it. */
11792 if (TREE_CODE (value) != INTEGER_CST)
11794 div = build_int_cst (TREE_TYPE (value), divisor);
11796 if (multiple_of_p (TREE_TYPE (value), value, div))
11797 return value;
11800 /* If divisor is a power of two, simplify this to bit manipulation. */
11801 if (divisor == (divisor & -divisor))
11803 tree t;
11805 t = build_int_cst (TREE_TYPE (value), -divisor);
11806 value = size_binop (BIT_AND_EXPR, value, t);
11808 else
11810 if (!div)
11811 div = build_int_cst (TREE_TYPE (value), divisor);
11812 value = size_binop (FLOOR_DIV_EXPR, value, div);
11813 value = size_binop (MULT_EXPR, value, div);
11816 return value;
11819 /* Returns the pointer to the base of the object addressed by EXP and
11820 extracts the information about the offset of the access, storing it
11821 to PBITPOS and POFFSET. */
11823 static tree
11824 split_address_to_core_and_offset (tree exp,
11825 HOST_WIDE_INT *pbitpos, tree *poffset)
11827 tree core;
11828 enum machine_mode mode;
11829 int unsignedp, volatilep;
11830 HOST_WIDE_INT bitsize;
11832 if (TREE_CODE (exp) == ADDR_EXPR)
11834 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11835 poffset, &mode, &unsignedp, &volatilep,
11836 false);
11838 if (TREE_CODE (core) == INDIRECT_REF)
11839 core = TREE_OPERAND (core, 0);
11841 else
11843 core = exp;
11844 *pbitpos = 0;
11845 *poffset = NULL_TREE;
11848 return core;
11851 /* Returns true if addresses of E1 and E2 differ by a constant, false
11852 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11854 bool
11855 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11857 tree core1, core2;
11858 HOST_WIDE_INT bitpos1, bitpos2;
11859 tree toffset1, toffset2, tdiff, type;
11861 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11862 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11864 if (bitpos1 % BITS_PER_UNIT != 0
11865 || bitpos2 % BITS_PER_UNIT != 0
11866 || !operand_equal_p (core1, core2, 0))
11867 return false;
11869 if (toffset1 && toffset2)
11871 type = TREE_TYPE (toffset1);
11872 if (type != TREE_TYPE (toffset2))
11873 toffset2 = fold_convert (type, toffset2);
11875 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11876 if (!host_integerp (tdiff, 0))
11877 return false;
11879 *diff = tree_low_cst (tdiff, 0);
11881 else if (toffset1 || toffset2)
11883 /* If only one of the offsets is non-constant, the difference cannot
11884 be a constant. */
11885 return false;
11887 else
11888 *diff = 0;
11890 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11891 return true;
11894 /* Simplify the floating point expression EXP when the sign of the
11895 result is not significant. Return NULL_TREE if no simplification
11896 is possible. */
11898 tree
11899 fold_strip_sign_ops (tree exp)
11901 tree arg0, arg1;
11903 switch (TREE_CODE (exp))
11905 case ABS_EXPR:
11906 case NEGATE_EXPR:
11907 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11908 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11910 case MULT_EXPR:
11911 case RDIV_EXPR:
11912 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11913 return NULL_TREE;
11914 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11915 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11916 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11917 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11918 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11919 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11920 break;
11922 default:
11923 break;
11925 return NULL_TREE;