2008-04-16 Daniel Kraft <d@domob.eu>
[official-gcc/alias-decl.git] / gcc / fold-const.c
blobc167bc4f6706ae3fe8b86c16a46bc8c12c0a58ae
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
70 otherwise. */
71 int folding_initializer = 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
77 COMPCODE_FALSE = 0,
78 COMPCODE_LT = 1,
79 COMPCODE_EQ = 2,
80 COMPCODE_LE = 3,
81 COMPCODE_GT = 4,
82 COMPCODE_LTGT = 5,
83 COMPCODE_GE = 6,
84 COMPCODE_ORD = 7,
85 COMPCODE_UNORD = 8,
86 COMPCODE_UNLT = 9,
87 COMPCODE_UNEQ = 10,
88 COMPCODE_UNLE = 11,
89 COMPCODE_UNGT = 12,
90 COMPCODE_NE = 13,
91 COMPCODE_UNGE = 14,
92 COMPCODE_TRUE = 15
95 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
96 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
97 static bool negate_mathfn_p (enum built_in_function);
98 static bool negate_expr_p (tree);
99 static tree negate_expr (tree);
100 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
101 static tree associate_trees (tree, tree, enum tree_code, tree);
102 static tree const_binop (enum tree_code, tree, tree, int);
103 static enum comparison_code comparison_to_compcode (enum tree_code);
104 static enum tree_code compcode_to_comparison (enum comparison_code);
105 static tree combine_comparisons (enum tree_code, enum tree_code,
106 enum tree_code, tree, tree, tree);
107 static int truth_value_p (enum tree_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *, bool *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136 tree, tree, tree);
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 #define LOWPART(x) \
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
170 static void
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 static void
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
185 HOST_WIDE_INT *hi)
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
198 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 unsigned HOST_WIDE_INT low0 = l1;
201 HOST_WIDE_INT high0 = h1;
202 unsigned int prec;
203 int sign_extended_type;
205 if (POINTER_TYPE_P (type)
206 || TREE_CODE (type) == OFFSET_TYPE)
207 prec = POINTER_SIZE;
208 else
209 prec = TYPE_PRECISION (type);
211 /* Size types *are* sign extended. */
212 sign_extended_type = (!TYPE_UNSIGNED (type)
213 || (TREE_CODE (type) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
221 else
223 h1 = 0;
224 if (prec < HOST_BITS_PER_WIDE_INT)
225 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type)
230 /* No sign extension */;
231 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
232 /* Correct width already. */;
233 else if (prec > HOST_BITS_PER_WIDE_INT)
235 /* Sign extend top half? */
236 if (h1 & ((unsigned HOST_WIDE_INT)1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 else if (prec == HOST_BITS_PER_WIDE_INT)
242 if ((HOST_WIDE_INT)l1 < 0)
243 h1 = -1;
245 else
247 /* Sign extend bottom half? */
248 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250 h1 = -1;
251 l1 |= (HOST_WIDE_INT)(-1) << prec;
255 *lv = l1;
256 *hv = h1;
258 /* If the value didn't fit, signal overflow. */
259 return l1 != low0 || h1 != high0;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
277 tree
278 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
279 HOST_WIDE_INT high, int overflowable,
280 bool overflowed)
282 int sign_extended_type;
283 bool overflow;
285 /* Size types *are* sign extended. */
286 sign_extended_type = (!TYPE_UNSIGNED (type)
287 || (TREE_CODE (type) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type)));
290 overflow = fit_double_type (low, high, &low, &high, type);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed || overflow)
295 if (overflowed
296 || overflowable < 0
297 || (overflowable > 0 && sign_extended_type))
299 tree t = make_node (INTEGER_CST);
300 TREE_INT_CST_LOW (t) = low;
301 TREE_INT_CST_HIGH (t) = high;
302 TREE_TYPE (t) = type;
303 TREE_OVERFLOW (t) = 1;
304 return t;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type, low, high);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
322 bool unsigned_p)
324 unsigned HOST_WIDE_INT l;
325 HOST_WIDE_INT h;
327 l = l1 + l2;
328 h = h1 + h2 + (l < l1);
330 *lv = l;
331 *hv = h;
333 if (unsigned_p)
334 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
335 else
336 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 if (l1 == 0)
350 *lv = 0;
351 *hv = - h1;
352 return (*hv & h1) < 0;
354 else
356 *lv = -l1;
357 *hv = ~h1;
358 return 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
372 bool unsigned_p)
374 HOST_WIDE_INT arg1[4];
375 HOST_WIDE_INT arg2[4];
376 HOST_WIDE_INT prod[4 * 2];
377 unsigned HOST_WIDE_INT carry;
378 int i, j, k;
379 unsigned HOST_WIDE_INT toplow, neglow;
380 HOST_WIDE_INT tophigh, neghigh;
382 encode (arg1, l1, h1);
383 encode (arg2, l2, h2);
385 memset (prod, 0, sizeof prod);
387 for (i = 0; i < 4; i++)
389 carry = 0;
390 for (j = 0; j < 4; j++)
392 k = i + j;
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry += arg1[i] * arg2[j];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
396 carry += prod[k];
397 prod[k] = LOWPART (carry);
398 carry = HIGHPART (carry);
400 prod[i + 4] = carry;
403 decode (prod, lv, hv);
404 decode (prod + 4, &toplow, &tophigh);
406 /* Unsigned overflow is immediate. */
407 if (unsigned_p)
408 return (toplow | tophigh) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
412 if (h1 < 0)
414 neg_double (l2, h2, &neglow, &neghigh);
415 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
417 if (h2 < 0)
419 neg_double (l1, h1, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431 void
432 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
433 HOST_WIDE_INT count, unsigned int prec,
434 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 unsigned HOST_WIDE_INT signmask;
438 if (count < 0)
440 rshift_double (l1, h1, -count, prec, lv, hv, arith);
441 return;
444 if (SHIFT_COUNT_TRUNCATED)
445 count %= prec;
447 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
451 *hv = 0;
452 *lv = 0;
454 else if (count >= HOST_BITS_PER_WIDE_INT)
456 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
457 *lv = 0;
459 else
461 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
462 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
463 *lv = l1 << count;
466 /* Sign extend all bits that are beyond the precision. */
468 signmask = -((prec > HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT) *hv
470 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
471 : (*lv >> (prec - 1))) & 1);
473 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
478 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
480 else
482 *hv = signmask;
483 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
484 *lv |= signmask << prec;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
493 void
494 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
495 HOST_WIDE_INT count, unsigned int prec,
496 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
497 int arith)
499 unsigned HOST_WIDE_INT signmask;
501 signmask = (arith
502 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
503 : 0);
505 if (SHIFT_COUNT_TRUNCATED)
506 count %= prec;
508 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
512 *hv = 0;
513 *lv = 0;
515 else if (count >= HOST_BITS_PER_WIDE_INT)
517 *hv = 0;
518 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
520 else
522 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
523 *lv = ((l1 >> count)
524 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count >= (HOST_WIDE_INT)prec)
531 *hv = signmask;
532 *lv = signmask;
534 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
539 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
541 else
543 *hv = signmask;
544 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
545 *lv |= signmask << (prec - count);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
554 void
555 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
556 HOST_WIDE_INT count, unsigned int prec,
557 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 unsigned HOST_WIDE_INT s1l, s2l;
560 HOST_WIDE_INT s1h, s2h;
562 count %= prec;
563 if (count < 0)
564 count += prec;
566 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
567 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
568 *lv = s1l | s2l;
569 *hv = s1h | s2h;
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
576 void
577 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
578 HOST_WIDE_INT count, unsigned int prec,
579 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 unsigned HOST_WIDE_INT s1l, s2l;
582 HOST_WIDE_INT s1h, s2h;
584 count %= prec;
585 if (count < 0)
586 count += prec;
588 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
589 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
590 *lv = s1l | s2l;
591 *hv = s1h | s2h;
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
598 or EXACT_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code, int uns,
605 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig,
607 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig,
609 unsigned HOST_WIDE_INT *lquo,
610 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
611 HOST_WIDE_INT *hrem)
613 int quo_neg = 0;
614 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den[4], quo[4];
616 int i, j;
617 unsigned HOST_WIDE_INT work;
618 unsigned HOST_WIDE_INT carry = 0;
619 unsigned HOST_WIDE_INT lnum = lnum_orig;
620 HOST_WIDE_INT hnum = hnum_orig;
621 unsigned HOST_WIDE_INT lden = lden_orig;
622 HOST_WIDE_INT hden = hden_orig;
623 int overflow = 0;
625 if (hden == 0 && lden == 0)
626 overflow = 1, lden = 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
629 if (!uns)
631 if (hnum < 0)
633 quo_neg = ~ quo_neg;
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum, hnum, &lnum, &hnum)
636 && ((HOST_WIDE_INT) lden & hden) == -1)
637 overflow = 1;
639 if (hden < 0)
641 quo_neg = ~ quo_neg;
642 neg_double (lden, hden, &lden, &hden);
646 if (hnum == 0 && hden == 0)
647 { /* single precision */
648 *hquo = *hrem = 0;
649 /* This unsigned division rounds toward zero. */
650 *lquo = lnum / lden;
651 goto finish_up;
654 if (hnum == 0)
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
657 *hquo = *lquo = 0;
658 *hrem = hnum;
659 *lrem = lnum;
660 goto finish_up;
663 memset (quo, 0, sizeof quo);
665 memset (num, 0, sizeof num); /* to zero 9th element */
666 memset (den, 0, sizeof den);
668 encode (num, lnum, hnum);
669 encode (den, lden, hden);
671 /* Special code for when the divisor < BASE. */
672 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 /* hnum != 0 already checked. */
675 for (i = 4 - 1; i >= 0; i--)
677 work = num[i] + carry * BASE;
678 quo[i] = work / lden;
679 carry = work % lden;
682 else
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig, den_hi_sig;
687 unsigned HOST_WIDE_INT quo_est, scale;
689 /* Find the highest nonzero divisor digit. */
690 for (i = 4 - 1;; i--)
691 if (den[i] != 0)
693 den_hi_sig = i;
694 break;
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale = BASE / (den[den_hi_sig] + 1);
701 if (scale > 1)
702 { /* scale divisor and dividend */
703 carry = 0;
704 for (i = 0; i <= 4 - 1; i++)
706 work = (num[i] * scale) + carry;
707 num[i] = LOWPART (work);
708 carry = HIGHPART (work);
711 num[4] = carry;
712 carry = 0;
713 for (i = 0; i <= 4 - 1; i++)
715 work = (den[i] * scale) + carry;
716 den[i] = LOWPART (work);
717 carry = HIGHPART (work);
718 if (den[i] != 0) den_hi_sig = i;
722 num_hi_sig = 4;
724 /* Main loop */
725 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp;
732 num_hi_sig = i + den_hi_sig + 1;
733 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
734 if (num[num_hi_sig] != den[den_hi_sig])
735 quo_est = work / den[den_hi_sig];
736 else
737 quo_est = BASE - 1;
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp = work - quo_est * den[den_hi_sig];
741 if (tmp < BASE
742 && (den[den_hi_sig - 1] * quo_est
743 > (tmp * BASE + num[num_hi_sig - 2])))
744 quo_est--;
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
750 carry = 0;
751 for (j = 0; j <= den_hi_sig; j++)
753 work = quo_est * den[j] + carry;
754 carry = HIGHPART (work);
755 work = num[i + j] - LOWPART (work);
756 num[i + j] = LOWPART (work);
757 carry += HIGHPART (work) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
764 quo_est--;
765 carry = 0; /* add divisor back in */
766 for (j = 0; j <= den_hi_sig; j++)
768 work = num[i + j] + den[j] + carry;
769 carry = HIGHPART (work);
770 num[i + j] = LOWPART (work);
773 num [num_hi_sig] += carry;
776 /* Store the quotient digit. */
777 quo[i] = quo_est;
781 decode (quo, lquo, hquo);
783 finish_up:
784 /* If result is negative, make it so. */
785 if (quo_neg)
786 neg_double (*lquo, *hquo, lquo, hquo);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
790 neg_double (*lrem, *hrem, lrem, hrem);
791 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
793 switch (code)
795 case TRUNC_DIV_EXPR:
796 case TRUNC_MOD_EXPR: /* round toward zero */
797 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
798 return overflow;
800 case FLOOR_DIV_EXPR:
801 case FLOOR_MOD_EXPR: /* round toward negative infinity */
802 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
804 /* quo = quo - 1; */
805 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
806 lquo, hquo);
808 else
809 return overflow;
810 break;
812 case CEIL_DIV_EXPR:
813 case CEIL_MOD_EXPR: /* round toward positive infinity */
814 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
817 lquo, hquo);
819 else
820 return overflow;
821 break;
823 case ROUND_DIV_EXPR:
824 case ROUND_MOD_EXPR: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem = *lrem;
827 HOST_WIDE_INT habs_rem = *hrem;
828 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
829 HOST_WIDE_INT habs_den = hden, htwice;
831 /* Get absolute values. */
832 if (*hrem < 0)
833 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
834 if (hden < 0)
835 neg_double (lden, hden, &labs_den, &habs_den);
837 /* If (2 * abs (lrem) >= abs (lden)) */
838 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
839 labs_rem, habs_rem, &ltwice, &htwice);
841 if (((unsigned HOST_WIDE_INT) habs_den
842 < (unsigned HOST_WIDE_INT) htwice)
843 || (((unsigned HOST_WIDE_INT) habs_den
844 == (unsigned HOST_WIDE_INT) htwice)
845 && (labs_den < ltwice)))
847 if (*hquo < 0)
848 /* quo = quo - 1; */
849 add_double (*lquo, *hquo,
850 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
851 else
852 /* quo = quo + 1; */
853 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
854 lquo, hquo);
856 else
857 return overflow;
859 break;
861 default:
862 gcc_unreachable ();
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
867 neg_double (*lrem, *hrem, lrem, hrem);
868 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
869 return overflow;
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
876 static tree
877 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 unsigned HOST_WIDE_INT int1l, int2l;
880 HOST_WIDE_INT int1h, int2h;
881 unsigned HOST_WIDE_INT quol, reml;
882 HOST_WIDE_INT quoh, remh;
883 tree type = TREE_TYPE (arg1);
884 int uns = TYPE_UNSIGNED (type);
886 int1l = TREE_INT_CST_LOW (arg1);
887 int1h = TREE_INT_CST_HIGH (arg1);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type))
892 uns = false;
893 type = signed_type_for (type);
894 fit_double_type (int1l, int1h, &int1l, &int1h,
895 type);
897 else
898 fit_double_type (int1l, int1h, &int1l, &int1h, type);
899 int2l = TREE_INT_CST_LOW (arg2);
900 int2h = TREE_INT_CST_HIGH (arg2);
902 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
903 &quol, &quoh, &reml, &remh);
904 if (remh != 0 || reml != 0)
905 return NULL_TREE;
907 return build_int_cst_wide (type, quol, quoh);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
917 used. */
919 static int fold_deferring_overflow_warnings;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
936 void
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
949 deferred code. */
951 void
952 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
954 const char *warnmsg;
955 location_t locus;
957 gcc_assert (fold_deferring_overflow_warnings > 0);
958 --fold_deferring_overflow_warnings;
959 if (fold_deferring_overflow_warnings > 0)
961 if (fold_deferred_overflow_warning != NULL
962 && code != 0
963 && code < (int) fold_deferred_overflow_code)
964 fold_deferred_overflow_code = code;
965 return;
968 warnmsg = fold_deferred_overflow_warning;
969 fold_deferred_overflow_warning = NULL;
971 if (!issue || warnmsg == NULL)
972 return;
974 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
975 return;
977 /* Use the smallest code level when deciding to issue the
978 warning. */
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
983 return;
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
987 else
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
993 warnings. */
995 void
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1003 bool
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1012 static void
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1031 static bool
1032 negate_mathfn_p (enum built_in_function code)
1034 switch (code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1059 return true;
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1067 default:
1068 break;
1070 return false;
1073 /* Check whether we may negate an integer constant T without causing
1074 overflow. */
1076 bool
1077 may_negate_without_overflow_p (const_tree t)
1079 unsigned HOST_WIDE_INT val;
1080 unsigned int prec;
1081 tree type;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1087 return false;
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1093 return true;
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1097 else
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1107 static bool
1108 negate_expr_p (tree t)
1110 tree type;
1112 if (t == 0)
1113 return false;
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1120 case INTEGER_CST:
1121 if (TYPE_OVERFLOW_WRAPS (type))
1122 return true;
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1126 case BIT_NOT_EXPR:
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1130 case FIXED_CST:
1131 case REAL_CST:
1132 case NEGATE_EXPR:
1133 return true;
1135 case COMPLEX_CST:
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1139 case COMPLEX_EXPR:
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1143 case CONJ_EXPR:
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1146 case PLUS_EXPR:
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1149 return false;
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1154 return true;
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1158 case MINUS_EXPR:
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1165 case MULT_EXPR:
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1167 break;
1169 /* Fall through. */
1171 case RDIV_EXPR:
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1175 break;
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1180 case CEIL_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1186 overflow. */
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1189 break;
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1193 case NOP_EXPR:
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1198 if (tem != t)
1199 return negate_expr_p (tem);
1201 break;
1203 case CALL_EXPR:
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1207 break;
1209 case RSHIFT_EXPR:
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1217 return true;
1219 break;
1221 default:
1222 break;
1224 return false;
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1230 returned. */
1232 static tree
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1236 tree tem;
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1241 case BIT_NOT_EXPR:
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1245 break;
1247 case INTEGER_CST:
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1251 return tem;
1252 break;
1254 case REAL_CST:
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1258 return tem;
1259 break;
1261 case FIXED_CST:
1262 tem = fold_negate_const (t, type);
1263 return tem;
1265 case COMPLEX_CST:
1267 tree rpart = negate_expr (TREE_REALPART (t));
1268 tree ipart = negate_expr (TREE_IMAGPART (t));
1270 if ((TREE_CODE (rpart) == REAL_CST
1271 && TREE_CODE (ipart) == REAL_CST)
1272 || (TREE_CODE (rpart) == INTEGER_CST
1273 && TREE_CODE (ipart) == INTEGER_CST))
1274 return build_complex (type, rpart, ipart);
1276 break;
1278 case COMPLEX_EXPR:
1279 if (negate_expr_p (t))
1280 return fold_build2 (COMPLEX_EXPR, type,
1281 fold_negate_expr (TREE_OPERAND (t, 0)),
1282 fold_negate_expr (TREE_OPERAND (t, 1)));
1283 break;
1285 case CONJ_EXPR:
1286 if (negate_expr_p (t))
1287 return fold_build1 (CONJ_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)));
1289 break;
1291 case NEGATE_EXPR:
1292 return TREE_OPERAND (t, 0);
1294 case PLUS_EXPR:
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t, 1))
1300 && reorder_operands_p (TREE_OPERAND (t, 0),
1301 TREE_OPERAND (t, 1)))
1303 tem = negate_expr (TREE_OPERAND (t, 1));
1304 return fold_build2 (MINUS_EXPR, type,
1305 tem, TREE_OPERAND (t, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 tem = negate_expr (TREE_OPERAND (t, 0));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 1));
1316 break;
1318 case MINUS_EXPR:
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1322 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1323 return fold_build2 (MINUS_EXPR, type,
1324 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1325 break;
1327 case MULT_EXPR:
1328 if (TYPE_UNSIGNED (type))
1329 break;
1331 /* Fall through. */
1333 case RDIV_EXPR:
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 tem = TREE_OPERAND (t, 1);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 TREE_OPERAND (t, 0), negate_expr (tem));
1340 tem = TREE_OPERAND (t, 0);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 negate_expr (tem), TREE_OPERAND (t, 1));
1345 break;
1347 case TRUNC_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 case FLOOR_DIV_EXPR:
1350 case CEIL_DIV_EXPR:
1351 case EXACT_DIV_EXPR:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1356 overflow. */
1357 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 const char * const warnmsg = G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem = TREE_OPERAND (t, 1);
1362 if (negate_expr_p (tem))
1364 if (INTEGRAL_TYPE_P (type)
1365 && (TREE_CODE (tem) != INTEGER_CST
1366 || integer_onep (tem)))
1367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1374 if (INTEGRAL_TYPE_P (type)
1375 && (TREE_CODE (tem) != INTEGER_CST
1376 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1377 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1378 return fold_build2 (TREE_CODE (t), type,
1379 negate_expr (tem), TREE_OPERAND (t, 1));
1382 break;
1384 case NOP_EXPR:
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type) == REAL_TYPE)
1388 tem = strip_float_extensions (t);
1389 if (tem != t && negate_expr_p (tem))
1390 return fold_convert (type, negate_expr (tem));
1392 break;
1394 case CALL_EXPR:
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t))
1397 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1399 tree fndecl, arg;
1401 fndecl = get_callee_fndecl (t);
1402 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1403 return build_call_expr (fndecl, 1, arg);
1405 break;
1407 case RSHIFT_EXPR:
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 tree op1 = TREE_OPERAND (t, 1);
1412 if (TREE_INT_CST_HIGH (op1) == 0
1413 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1414 == TREE_INT_CST_LOW (op1))
1416 tree ntype = TYPE_UNSIGNED (type)
1417 ? signed_type_for (type)
1418 : unsigned_type_for (type);
1419 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1420 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1421 return fold_convert (type, temp);
1424 break;
1426 default:
1427 break;
1430 return NULL_TREE;
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1437 static tree
1438 negate_expr (tree t)
1440 tree type, tem;
1442 if (t == NULL_TREE)
1443 return NULL_TREE;
1445 type = TREE_TYPE (t);
1446 STRIP_SIGN_NOPS (t);
1448 tem = fold_negate_expr (t);
1449 if (!tem)
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 return fold_convert (type, tem);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1474 static tree
1475 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1476 tree *minus_litp, int negate_p)
1478 tree var = 0;
1480 *conp = 0;
1481 *litp = 0;
1482 *minus_litp = 0;
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in);
1487 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1488 || TREE_CODE (in) == FIXED_CST)
1489 *litp = in;
1490 else if (TREE_CODE (in) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1498 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 tree op0 = TREE_OPERAND (in, 0);
1501 tree op1 = TREE_OPERAND (in, 1);
1502 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1503 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1507 || TREE_CODE (op0) == FIXED_CST)
1508 *litp = op0, op0 = 0;
1509 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1510 || TREE_CODE (op1) == FIXED_CST)
1511 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513 if (op0 != 0 && TREE_CONSTANT (op0))
1514 *conp = op0, op0 = 0;
1515 else if (op1 != 0 && TREE_CONSTANT (op1))
1516 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0 != 0 && op1 != 0)
1521 var = in;
1522 else if (op0 != 0)
1523 var = op0;
1524 else
1525 var = op1, neg_var_p = neg1_p;
1527 /* Now do any needed negations. */
1528 if (neg_litp_p)
1529 *minus_litp = *litp, *litp = 0;
1530 if (neg_conp_p)
1531 *conp = negate_expr (*conp);
1532 if (neg_var_p)
1533 var = negate_expr (var);
1535 else if (TREE_CONSTANT (in))
1536 *conp = in;
1537 else
1538 var = in;
1540 if (negate_p)
1542 if (*litp)
1543 *minus_litp = *litp, *litp = 0;
1544 else if (*minus_litp)
1545 *litp = *minus_litp, *minus_litp = 0;
1546 *conp = negate_expr (*conp);
1547 var = negate_expr (var);
1550 return var;
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1557 static tree
1558 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1560 if (t1 == 0)
1561 return t2;
1562 else if (t2 == 0)
1563 return t1;
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1569 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 if (code == PLUS_EXPR)
1573 if (TREE_CODE (t1) == NEGATE_EXPR)
1574 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1575 fold_convert (type, TREE_OPERAND (t1, 0)));
1576 else if (TREE_CODE (t2) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1578 fold_convert (type, TREE_OPERAND (t2, 0)));
1579 else if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1582 else if (code == MINUS_EXPR)
1584 if (integer_zerop (t2))
1585 return fold_convert (type, t1);
1588 return build2 (code, type, fold_convert (type, t1),
1589 fold_convert (type, t2));
1592 return fold_build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1599 static bool
1600 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1602 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1603 return false;
1604 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1605 return false;
1607 switch (code)
1609 case LSHIFT_EXPR:
1610 case RSHIFT_EXPR:
1611 case LROTATE_EXPR:
1612 case RROTATE_EXPR:
1613 return true;
1615 default:
1616 break;
1619 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1620 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1621 && TYPE_MODE (type1) == TYPE_MODE (type2);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1631 tree
1632 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 unsigned HOST_WIDE_INT int1l, int2l;
1635 HOST_WIDE_INT int1h, int2h;
1636 unsigned HOST_WIDE_INT low;
1637 HOST_WIDE_INT hi;
1638 unsigned HOST_WIDE_INT garbagel;
1639 HOST_WIDE_INT garbageh;
1640 tree t;
1641 tree type = TREE_TYPE (arg1);
1642 int uns = TYPE_UNSIGNED (type);
1643 int is_sizetype
1644 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1645 int overflow = 0;
1647 int1l = TREE_INT_CST_LOW (arg1);
1648 int1h = TREE_INT_CST_HIGH (arg1);
1649 int2l = TREE_INT_CST_LOW (arg2);
1650 int2h = TREE_INT_CST_HIGH (arg2);
1652 switch (code)
1654 case BIT_IOR_EXPR:
1655 low = int1l | int2l, hi = int1h | int2h;
1656 break;
1658 case BIT_XOR_EXPR:
1659 low = int1l ^ int2l, hi = int1h ^ int2h;
1660 break;
1662 case BIT_AND_EXPR:
1663 low = int1l & int2l, hi = int1h & int2h;
1664 break;
1666 case RSHIFT_EXPR:
1667 int2l = -int2l;
1668 case LSHIFT_EXPR:
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1673 &low, &hi, !uns);
1674 break;
1676 case RROTATE_EXPR:
1677 int2l = - int2l;
1678 case LROTATE_EXPR:
1679 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 &low, &hi);
1681 break;
1683 case PLUS_EXPR:
1684 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1685 break;
1687 case MINUS_EXPR:
1688 neg_double (int2l, int2h, &low, &hi);
1689 add_double (int1l, int1h, low, hi, &low, &hi);
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1691 break;
1693 case MULT_EXPR:
1694 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1695 break;
1697 case TRUNC_DIV_EXPR:
1698 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1699 case EXACT_DIV_EXPR:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1702 && !TREE_OVERFLOW (arg1)
1703 && !TREE_OVERFLOW (arg2)
1704 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 if (code == CEIL_DIV_EXPR)
1707 int1l += int2l - 1;
1709 low = int1l / int2l, hi = 0;
1710 break;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR:
1716 if (int2h == 0 && int2l == 0)
1717 return NULL_TREE;
1718 if (int2h == 0 && int2l == 1)
1720 low = int1l, hi = int1h;
1721 break;
1723 if (int1l == int2l && int1h == int2h
1724 && ! (int1l == 0 && int1h == 0))
1726 low = 1, hi = 0;
1727 break;
1729 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1730 &low, &hi, &garbagel, &garbageh);
1731 break;
1733 case TRUNC_MOD_EXPR:
1734 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1737 && !TREE_OVERFLOW (arg1)
1738 && !TREE_OVERFLOW (arg2)
1739 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 if (code == CEIL_MOD_EXPR)
1742 int1l += int2l - 1;
1743 low = int1l % int2l, hi = 0;
1744 break;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR:
1750 if (int2h == 0 && int2l == 0)
1751 return NULL_TREE;
1752 overflow = div_and_round_double (code, uns,
1753 int1l, int1h, int2l, int2h,
1754 &garbagel, &garbageh, &low, &hi);
1755 break;
1757 case MIN_EXPR:
1758 case MAX_EXPR:
1759 if (uns)
1760 low = (((unsigned HOST_WIDE_INT) int1h
1761 < (unsigned HOST_WIDE_INT) int2h)
1762 || (((unsigned HOST_WIDE_INT) int1h
1763 == (unsigned HOST_WIDE_INT) int2h)
1764 && int1l < int2l));
1765 else
1766 low = (int1h < int2h
1767 || (int1h == int2h && int1l < int2l));
1769 if (low == (code == MIN_EXPR))
1770 low = int1l, hi = int1h;
1771 else
1772 low = int2l, hi = int2h;
1773 break;
1775 default:
1776 return NULL_TREE;
1779 if (notrunc)
1781 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1787 t = copy_node (t);
1788 TREE_OVERFLOW (t) = 1;
1791 else
1792 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1793 ((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1796 return t;
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1806 static tree
1807 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 /* Sanity check for the recursive cases. */
1810 if (!arg1 || !arg2)
1811 return NULL_TREE;
1813 STRIP_NOPS (arg1);
1814 STRIP_NOPS (arg2);
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return int_const_binop (code, arg1, arg2, notrunc);
1819 if (TREE_CODE (arg1) == REAL_CST)
1821 enum machine_mode mode;
1822 REAL_VALUE_TYPE d1;
1823 REAL_VALUE_TYPE d2;
1824 REAL_VALUE_TYPE value;
1825 REAL_VALUE_TYPE result;
1826 bool inexact;
1827 tree t, type;
1829 /* The following codes are handled by real_arithmetic. */
1830 switch (code)
1832 case PLUS_EXPR:
1833 case MINUS_EXPR:
1834 case MULT_EXPR:
1835 case RDIV_EXPR:
1836 case MIN_EXPR:
1837 case MAX_EXPR:
1838 break;
1840 default:
1841 return NULL_TREE;
1844 d1 = TREE_REAL_CST (arg1);
1845 d2 = TREE_REAL_CST (arg2);
1847 type = TREE_TYPE (arg1);
1848 mode = TYPE_MODE (type);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode)
1853 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1854 return NULL_TREE;
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code == RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2, dconst0)
1860 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1861 return NULL_TREE;
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1))
1866 return arg1;
1867 else if (REAL_VALUE_ISNAN (d2))
1868 return arg2;
1870 inexact = real_arithmetic (&value, code, &d1, &d2);
1871 real_convert (&result, mode, &value);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode)
1877 && REAL_VALUE_ISINF (result)
1878 && !REAL_VALUE_ISINF (d1)
1879 && !REAL_VALUE_ISINF (d2))
1880 return NULL_TREE;
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1888 && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1890 return NULL_TREE;
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1895 return t;
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1903 tree t, type;
1904 int sat_p;
1905 bool overflow_p;
1907 /* The following codes are handled by fixed_arithmetic. */
1908 switch (code)
1910 case PLUS_EXPR:
1911 case MINUS_EXPR:
1912 case MULT_EXPR:
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1915 break;
1917 case LSHIFT_EXPR:
1918 case RSHIFT_EXPR:
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1921 f2.mode = SImode;
1922 break;
1924 default:
1925 return NULL_TREE;
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1941 return t;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1951 tree real, imag;
1953 switch (code)
1955 case PLUS_EXPR:
1956 case MINUS_EXPR:
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1959 break;
1961 case MULT_EXPR:
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1965 notrunc);
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1969 notrunc);
1970 break;
1972 case RDIV_EXPR:
1974 tree magsquared
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1978 notrunc);
1979 tree t1
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1983 notrunc);
1984 tree t2
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1988 notrunc);
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
1996 break;
1998 default:
1999 return NULL_TREE;
2002 if (real && imag)
2003 return build_complex (type, real, imag);
2006 return NULL_TREE;
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2012 tree
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2023 tree
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2032 TREE_TYPE (arg1)));
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2041 return arg1;
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2043 return arg0;
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2048 return arg0;
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2053 return arg1;
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2067 tree
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2071 tree ctype;
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2081 ctype = ssizetype;
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2084 else
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2102 else
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2105 arg1, arg0)));
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2111 static tree
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2114 tree t;
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2142 return t;
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2148 static tree
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2151 int overflow = 0;
2152 tree t;
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2164 REAL_VALUE_TYPE r;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2167 switch (code)
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2171 break;
2173 default:
2174 gcc_unreachable ();
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2180 overflow = 1;
2181 high = 0;
2182 low = 0;
2185 /* See if R is less than the lower bound or greater than the
2186 upper bound. */
2188 if (! overflow)
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2194 overflow = 1;
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2200 if (! overflow)
2202 tree ut = TYPE_MAX_VALUE (type);
2203 if (ut)
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2208 overflow = 1;
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2215 if (! overflow)
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2220 return t;
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2226 static tree
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2229 tree t;
2230 double_int temp, temp_trunc;
2231 unsigned int mode;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2248 else
2250 temp.low = 0;
2251 temp.high = 0;
2252 temp_trunc.low = 0;
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2261 double_int one;
2262 one.low = 1;
2263 one.high = 0;
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2270 (temp.high < 0
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2275 return t;
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2281 static tree
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2285 tree t;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2291 return t;
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2297 static tree
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2301 tree t;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2309 return t;
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2315 static tree
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2319 tree t;
2320 bool overflow_p;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2334 return t;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2340 static tree
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2344 tree t;
2345 bool overflow_p;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2361 return t;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2367 static tree
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2371 tree t;
2372 bool overflow_p;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2387 return t;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2393 static tree
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2397 return arg1;
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2401 if (TREE_CODE (arg1) == INTEGER_CST)
2402 return fold_convert_const_int_from_int (type, arg1);
2403 else if (TREE_CODE (arg1) == REAL_CST)
2404 return fold_convert_const_int_from_real (code, type, arg1);
2405 else if (TREE_CODE (arg1) == FIXED_CST)
2406 return fold_convert_const_int_from_fixed (type, arg1);
2408 else if (TREE_CODE (type) == REAL_TYPE)
2410 if (TREE_CODE (arg1) == INTEGER_CST)
2411 return build_real_from_int_cst (type, arg1);
2412 else if (TREE_CODE (arg1) == REAL_CST)
2413 return fold_convert_const_real_from_real (type, arg1);
2414 else if (TREE_CODE (arg1) == FIXED_CST)
2415 return fold_convert_const_real_from_fixed (type, arg1);
2417 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2419 if (TREE_CODE (arg1) == FIXED_CST)
2420 return fold_convert_const_fixed_from_fixed (type, arg1);
2421 else if (TREE_CODE (arg1) == INTEGER_CST)
2422 return fold_convert_const_fixed_from_int (type, arg1);
2423 else if (TREE_CODE (arg1) == REAL_CST)
2424 return fold_convert_const_fixed_from_real (type, arg1);
2426 return NULL_TREE;
2429 /* Construct a vector of zero elements of vector type TYPE. */
2431 static tree
2432 build_zero_vector (tree type)
2434 tree elem, list;
2435 int i, units;
2437 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2438 units = TYPE_VECTOR_SUBPARTS (type);
2440 list = NULL_TREE;
2441 for (i = 0; i < units; i++)
2442 list = tree_cons (NULL_TREE, elem, list);
2443 return build_vector (type, list);
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2448 bool
2449 fold_convertible_p (const_tree type, const_tree arg)
2451 tree orig = TREE_TYPE (arg);
2453 if (type == orig)
2454 return true;
2456 if (TREE_CODE (arg) == ERROR_MARK
2457 || TREE_CODE (type) == ERROR_MARK
2458 || TREE_CODE (orig) == ERROR_MARK)
2459 return false;
2461 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2462 return true;
2464 switch (TREE_CODE (type))
2466 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2467 case POINTER_TYPE: case REFERENCE_TYPE:
2468 case OFFSET_TYPE:
2469 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2470 || TREE_CODE (orig) == OFFSET_TYPE)
2471 return true;
2472 return (TREE_CODE (orig) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2475 case REAL_TYPE:
2476 case FIXED_POINT_TYPE:
2477 case COMPLEX_TYPE:
2478 case VECTOR_TYPE:
2479 case VOID_TYPE:
2480 return TREE_CODE (type) == TREE_CODE (orig);
2482 default:
2483 return false;
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2490 tree
2491 fold_convert (tree type, tree arg)
2493 tree orig = TREE_TYPE (arg);
2494 tree tem;
2496 if (type == orig)
2497 return arg;
2499 if (TREE_CODE (arg) == ERROR_MARK
2500 || TREE_CODE (type) == ERROR_MARK
2501 || TREE_CODE (orig) == ERROR_MARK)
2502 return error_mark_node;
2504 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2505 return fold_build1 (NOP_EXPR, type, arg);
2507 switch (TREE_CODE (type))
2509 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2510 case POINTER_TYPE: case REFERENCE_TYPE:
2511 case OFFSET_TYPE:
2512 if (TREE_CODE (arg) == INTEGER_CST)
2514 tem = fold_convert_const (NOP_EXPR, type, arg);
2515 if (tem != NULL_TREE)
2516 return tem;
2518 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2519 || TREE_CODE (orig) == OFFSET_TYPE)
2520 return fold_build1 (NOP_EXPR, type, arg);
2521 if (TREE_CODE (orig) == COMPLEX_TYPE)
2523 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2524 return fold_convert (type, tem);
2526 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2528 return fold_build1 (NOP_EXPR, type, arg);
2530 case REAL_TYPE:
2531 if (TREE_CODE (arg) == INTEGER_CST)
2533 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2534 if (tem != NULL_TREE)
2535 return tem;
2537 else if (TREE_CODE (arg) == REAL_CST)
2539 tem = fold_convert_const (NOP_EXPR, type, arg);
2540 if (tem != NULL_TREE)
2541 return tem;
2543 else if (TREE_CODE (arg) == FIXED_CST)
2545 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2546 if (tem != NULL_TREE)
2547 return tem;
2550 switch (TREE_CODE (orig))
2552 case INTEGER_TYPE:
2553 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2554 case POINTER_TYPE: case REFERENCE_TYPE:
2555 return fold_build1 (FLOAT_EXPR, type, arg);
2557 case REAL_TYPE:
2558 return fold_build1 (NOP_EXPR, type, arg);
2560 case FIXED_POINT_TYPE:
2561 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2563 case COMPLEX_TYPE:
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2567 default:
2568 gcc_unreachable ();
2571 case FIXED_POINT_TYPE:
2572 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2573 || TREE_CODE (arg) == REAL_CST)
2575 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2576 if (tem != NULL_TREE)
2577 return tem;
2580 switch (TREE_CODE (orig))
2582 case FIXED_POINT_TYPE:
2583 case INTEGER_TYPE:
2584 case ENUMERAL_TYPE:
2585 case BOOLEAN_TYPE:
2586 case REAL_TYPE:
2587 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2589 case COMPLEX_TYPE:
2590 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2591 return fold_convert (type, tem);
2593 default:
2594 gcc_unreachable ();
2597 case COMPLEX_TYPE:
2598 switch (TREE_CODE (orig))
2600 case INTEGER_TYPE:
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2603 case REAL_TYPE:
2604 case FIXED_POINT_TYPE:
2605 return build2 (COMPLEX_EXPR, type,
2606 fold_convert (TREE_TYPE (type), arg),
2607 fold_convert (TREE_TYPE (type), integer_zero_node));
2608 case COMPLEX_TYPE:
2610 tree rpart, ipart;
2612 if (TREE_CODE (arg) == COMPLEX_EXPR)
2614 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2615 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2616 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2619 arg = save_expr (arg);
2620 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2621 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2622 rpart = fold_convert (TREE_TYPE (type), rpart);
2623 ipart = fold_convert (TREE_TYPE (type), ipart);
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2627 default:
2628 gcc_unreachable ();
2631 case VECTOR_TYPE:
2632 if (integer_zerop (arg))
2633 return build_zero_vector (type);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2636 || TREE_CODE (orig) == VECTOR_TYPE);
2637 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2639 case VOID_TYPE:
2640 tem = fold_ignored_result (arg);
2641 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2642 return tem;
2643 return fold_build1 (NOP_EXPR, type, tem);
2645 default:
2646 gcc_unreachable ();
2650 /* Return false if expr can be assumed not to be an lvalue, true
2651 otherwise. */
2653 static bool
2654 maybe_lvalue_p (const_tree x)
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x))
2659 case VAR_DECL:
2660 case PARM_DECL:
2661 case RESULT_DECL:
2662 case LABEL_DECL:
2663 case FUNCTION_DECL:
2664 case SSA_NAME:
2666 case COMPONENT_REF:
2667 case INDIRECT_REF:
2668 case ALIGN_INDIRECT_REF:
2669 case MISALIGNED_INDIRECT_REF:
2670 case ARRAY_REF:
2671 case ARRAY_RANGE_REF:
2672 case BIT_FIELD_REF:
2673 case OBJ_TYPE_REF:
2675 case REALPART_EXPR:
2676 case IMAGPART_EXPR:
2677 case PREINCREMENT_EXPR:
2678 case PREDECREMENT_EXPR:
2679 case SAVE_EXPR:
2680 case TRY_CATCH_EXPR:
2681 case WITH_CLEANUP_EXPR:
2682 case COMPOUND_EXPR:
2683 case MODIFY_EXPR:
2684 case GIMPLE_MODIFY_STMT:
2685 case TARGET_EXPR:
2686 case COND_EXPR:
2687 case BIND_EXPR:
2688 case MIN_EXPR:
2689 case MAX_EXPR:
2690 break;
2692 default:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2695 break;
2696 return false;
2699 return true;
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2704 tree
2705 non_lvalue (tree x)
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2708 us. */
2709 if (in_gimple_form)
2710 return x;
2712 if (! maybe_lvalue_p (x))
2713 return x;
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2725 static tree
2726 pedantic_non_lvalue (tree x)
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2730 else
2731 return x;
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2739 enum tree_code
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2742 if (honor_nans && flag_trapping_math)
2743 return ERROR_MARK;
2745 switch (code)
2747 case EQ_EXPR:
2748 return NE_EXPR;
2749 case NE_EXPR:
2750 return EQ_EXPR;
2751 case GT_EXPR:
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2753 case GE_EXPR:
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2755 case LT_EXPR:
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2757 case LE_EXPR:
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2759 case LTGT_EXPR:
2760 return UNEQ_EXPR;
2761 case UNEQ_EXPR:
2762 return LTGT_EXPR;
2763 case UNGT_EXPR:
2764 return LE_EXPR;
2765 case UNGE_EXPR:
2766 return LT_EXPR;
2767 case UNLT_EXPR:
2768 return GE_EXPR;
2769 case UNLE_EXPR:
2770 return GT_EXPR;
2771 case ORDERED_EXPR:
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2775 default:
2776 gcc_unreachable ();
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2783 enum tree_code
2784 swap_tree_comparison (enum tree_code code)
2786 switch (code)
2788 case EQ_EXPR:
2789 case NE_EXPR:
2790 case ORDERED_EXPR:
2791 case UNORDERED_EXPR:
2792 case LTGT_EXPR:
2793 case UNEQ_EXPR:
2794 return code;
2795 case GT_EXPR:
2796 return LT_EXPR;
2797 case GE_EXPR:
2798 return LE_EXPR;
2799 case LT_EXPR:
2800 return GT_EXPR;
2801 case LE_EXPR:
2802 return GE_EXPR;
2803 case UNGT_EXPR:
2804 return UNLT_EXPR;
2805 case UNGE_EXPR:
2806 return UNLE_EXPR;
2807 case UNLT_EXPR:
2808 return UNGT_EXPR;
2809 case UNLE_EXPR:
2810 return UNGE_EXPR;
2811 default:
2812 gcc_unreachable ();
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2824 switch (code)
2826 case LT_EXPR:
2827 return COMPCODE_LT;
2828 case EQ_EXPR:
2829 return COMPCODE_EQ;
2830 case LE_EXPR:
2831 return COMPCODE_LE;
2832 case GT_EXPR:
2833 return COMPCODE_GT;
2834 case NE_EXPR:
2835 return COMPCODE_NE;
2836 case GE_EXPR:
2837 return COMPCODE_GE;
2838 case ORDERED_EXPR:
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2842 case UNLT_EXPR:
2843 return COMPCODE_UNLT;
2844 case UNEQ_EXPR:
2845 return COMPCODE_UNEQ;
2846 case UNLE_EXPR:
2847 return COMPCODE_UNLE;
2848 case UNGT_EXPR:
2849 return COMPCODE_UNGT;
2850 case LTGT_EXPR:
2851 return COMPCODE_LTGT;
2852 case UNGE_EXPR:
2853 return COMPCODE_UNGE;
2854 default:
2855 gcc_unreachable ();
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2866 switch (code)
2868 case COMPCODE_LT:
2869 return LT_EXPR;
2870 case COMPCODE_EQ:
2871 return EQ_EXPR;
2872 case COMPCODE_LE:
2873 return LE_EXPR;
2874 case COMPCODE_GT:
2875 return GT_EXPR;
2876 case COMPCODE_NE:
2877 return NE_EXPR;
2878 case COMPCODE_GE:
2879 return GE_EXPR;
2880 case COMPCODE_ORD:
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2884 case COMPCODE_UNLT:
2885 return UNLT_EXPR;
2886 case COMPCODE_UNEQ:
2887 return UNEQ_EXPR;
2888 case COMPCODE_UNLE:
2889 return UNLE_EXPR;
2890 case COMPCODE_UNGT:
2891 return UNGT_EXPR;
2892 case COMPCODE_LTGT:
2893 return LTGT_EXPR;
2894 case COMPCODE_UNGE:
2895 return UNGE_EXPR;
2896 default:
2897 gcc_unreachable ();
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2907 tree
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2917 switch (code)
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2921 break;
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2925 break;
2927 default:
2928 return NULL_TREE;
2931 if (!honor_nans)
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2941 else if (flag_trapping_math)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2963 rtrap = false;
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2967 if (rtrap && !ltrap
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2969 return NULL_TREE;
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2973 return NULL_TREE;
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2980 else
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2987 static int
2988 truth_value_p (enum tree_code code)
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3027 return 0;
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. As pointers
3037 strictly don't have a signedness, require either two pointers or
3038 two non-pointers as well. */
3039 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3040 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3041 return 0;
3043 /* If both types don't have the same precision, then it is not safe
3044 to strip NOPs. */
3045 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3046 return 0;
3048 STRIP_NOPS (arg0);
3049 STRIP_NOPS (arg1);
3051 /* In case both args are comparisons but with different comparison
3052 code, try to swap the comparison operands of one arg to produce
3053 a match and compare that variant. */
3054 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3055 && COMPARISON_CLASS_P (arg0)
3056 && COMPARISON_CLASS_P (arg1))
3058 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3060 if (TREE_CODE (arg0) == swap_code)
3061 return operand_equal_p (TREE_OPERAND (arg0, 0),
3062 TREE_OPERAND (arg1, 1), flags)
3063 && operand_equal_p (TREE_OPERAND (arg0, 1),
3064 TREE_OPERAND (arg1, 0), flags);
3067 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3068 /* This is needed for conversions and for COMPONENT_REF.
3069 Might as well play it safe and always test this. */
3070 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3073 return 0;
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below. */
3082 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3083 && (TREE_CODE (arg0) == SAVE_EXPR
3084 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3085 return 1;
3087 /* Next handle constant cases, those for which we can return 1 even
3088 if ONLY_CONST is set. */
3089 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3090 switch (TREE_CODE (arg0))
3092 case INTEGER_CST:
3093 return tree_int_cst_equal (arg0, arg1);
3095 case FIXED_CST:
3096 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3097 TREE_FIXED_CST (arg1));
3099 case REAL_CST:
3100 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3101 TREE_REAL_CST (arg1)))
3102 return 1;
3105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3107 /* If we do not distinguish between signed and unsigned zero,
3108 consider them equal. */
3109 if (real_zerop (arg0) && real_zerop (arg1))
3110 return 1;
3112 return 0;
3114 case VECTOR_CST:
3116 tree v1, v2;
3118 v1 = TREE_VECTOR_CST_ELTS (arg0);
3119 v2 = TREE_VECTOR_CST_ELTS (arg1);
3120 while (v1 && v2)
3122 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3123 flags))
3124 return 0;
3125 v1 = TREE_CHAIN (v1);
3126 v2 = TREE_CHAIN (v2);
3129 return v1 == v2;
3132 case COMPLEX_CST:
3133 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3134 flags)
3135 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3136 flags));
3138 case STRING_CST:
3139 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3140 && ! memcmp (TREE_STRING_POINTER (arg0),
3141 TREE_STRING_POINTER (arg1),
3142 TREE_STRING_LENGTH (arg0)));
3144 case ADDR_EXPR:
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3147 default:
3148 break;
3151 if (flags & OEP_ONLY_CONST)
3152 return 0;
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3167 case tcc_unary:
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3171 CASE_CONVERT:
3172 case FIX_TRUNC_EXPR:
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3174 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3175 return 0;
3176 break;
3177 default:
3178 break;
3181 return OP_SAME (0);
3184 case tcc_comparison:
3185 case tcc_binary:
3186 if (OP_SAME (0) && OP_SAME (1))
3187 return 1;
3189 /* For commutative ops, allow the other order. */
3190 return (commutative_tree_code (TREE_CODE (arg0))
3191 && operand_equal_p (TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 1), flags)
3193 && operand_equal_p (TREE_OPERAND (arg0, 1),
3194 TREE_OPERAND (arg1, 0), flags));
3196 case tcc_reference:
3197 /* If either of the pointer (or reference) expressions we are
3198 dereferencing contain a side effect, these cannot be equal. */
3199 if (TREE_SIDE_EFFECTS (arg0)
3200 || TREE_SIDE_EFFECTS (arg1))
3201 return 0;
3203 switch (TREE_CODE (arg0))
3205 case INDIRECT_REF:
3206 case ALIGN_INDIRECT_REF:
3207 case MISALIGNED_INDIRECT_REF:
3208 case REALPART_EXPR:
3209 case IMAGPART_EXPR:
3210 return OP_SAME (0);
3212 case ARRAY_REF:
3213 case ARRAY_RANGE_REF:
3214 /* Operands 2 and 3 may be null.
3215 Compare the array index by value if it is constant first as we
3216 may have different types but same value here. */
3217 return (OP_SAME (0)
3218 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3219 TREE_OPERAND (arg1, 1))
3220 || OP_SAME (1))
3221 && OP_SAME_WITH_NULL (2)
3222 && OP_SAME_WITH_NULL (3));
3224 case COMPONENT_REF:
3225 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3226 may be NULL when we're called to compare MEM_EXPRs. */
3227 return OP_SAME_WITH_NULL (0)
3228 && OP_SAME (1)
3229 && OP_SAME_WITH_NULL (2);
3231 case BIT_FIELD_REF:
3232 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3234 default:
3235 return 0;
3238 case tcc_expression:
3239 switch (TREE_CODE (arg0))
3241 case ADDR_EXPR:
3242 case TRUTH_NOT_EXPR:
3243 return OP_SAME (0);
3245 case TRUTH_ANDIF_EXPR:
3246 case TRUTH_ORIF_EXPR:
3247 return OP_SAME (0) && OP_SAME (1);
3249 case TRUTH_AND_EXPR:
3250 case TRUTH_OR_EXPR:
3251 case TRUTH_XOR_EXPR:
3252 if (OP_SAME (0) && OP_SAME (1))
3253 return 1;
3255 /* Otherwise take into account this is a commutative operation. */
3256 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 1), flags)
3258 && operand_equal_p (TREE_OPERAND (arg0, 1),
3259 TREE_OPERAND (arg1, 0), flags));
3261 default:
3262 return 0;
3265 case tcc_vl_exp:
3266 switch (TREE_CODE (arg0))
3268 case CALL_EXPR:
3269 /* If the CALL_EXPRs call different functions, then they
3270 clearly can not be equal. */
3271 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3272 flags))
3273 return 0;
3276 unsigned int cef = call_expr_flags (arg0);
3277 if (flags & OEP_PURE_SAME)
3278 cef &= ECF_CONST | ECF_PURE;
3279 else
3280 cef &= ECF_CONST;
3281 if (!cef)
3282 return 0;
3285 /* Now see if all the arguments are the same. */
3287 const_call_expr_arg_iterator iter0, iter1;
3288 const_tree a0, a1;
3289 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3290 a1 = first_const_call_expr_arg (arg1, &iter1);
3291 a0 && a1;
3292 a0 = next_const_call_expr_arg (&iter0),
3293 a1 = next_const_call_expr_arg (&iter1))
3294 if (! operand_equal_p (a0, a1, flags))
3295 return 0;
3297 /* If we get here and both argument lists are exhausted
3298 then the CALL_EXPRs are equal. */
3299 return ! (a0 || a1);
3301 default:
3302 return 0;
3305 case tcc_declaration:
3306 /* Consider __builtin_sqrt equal to sqrt. */
3307 return (TREE_CODE (arg0) == FUNCTION_DECL
3308 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3309 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3310 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3312 default:
3313 return 0;
3316 #undef OP_SAME
3317 #undef OP_SAME_WITH_NULL
3320 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3321 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3323 When in doubt, return 0. */
3325 static int
3326 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3328 int unsignedp1, unsignedpo;
3329 tree primarg0, primarg1, primother;
3330 unsigned int correct_width;
3332 if (operand_equal_p (arg0, arg1, 0))
3333 return 1;
3335 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3336 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3337 return 0;
3339 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3340 and see if the inner values are the same. This removes any
3341 signedness comparison, which doesn't matter here. */
3342 primarg0 = arg0, primarg1 = arg1;
3343 STRIP_NOPS (primarg0);
3344 STRIP_NOPS (primarg1);
3345 if (operand_equal_p (primarg0, primarg1, 0))
3346 return 1;
3348 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3349 actual comparison operand, ARG0.
3351 First throw away any conversions to wider types
3352 already present in the operands. */
3354 primarg1 = get_narrower (arg1, &unsignedp1);
3355 primother = get_narrower (other, &unsignedpo);
3357 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3358 if (unsignedp1 == unsignedpo
3359 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3360 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3362 tree type = TREE_TYPE (arg0);
3364 /* Make sure shorter operand is extended the right way
3365 to match the longer operand. */
3366 primarg1 = fold_convert (signed_or_unsigned_type_for
3367 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3369 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3370 return 1;
3373 return 0;
3376 /* See if ARG is an expression that is either a comparison or is performing
3377 arithmetic on comparisons. The comparisons must only be comparing
3378 two different values, which will be stored in *CVAL1 and *CVAL2; if
3379 they are nonzero it means that some operands have already been found.
3380 No variables may be used anywhere else in the expression except in the
3381 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3382 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3384 If this is true, return 1. Otherwise, return zero. */
3386 static int
3387 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3389 enum tree_code code = TREE_CODE (arg);
3390 enum tree_code_class class = TREE_CODE_CLASS (code);
3392 /* We can handle some of the tcc_expression cases here. */
3393 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3394 class = tcc_unary;
3395 else if (class == tcc_expression
3396 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3397 || code == COMPOUND_EXPR))
3398 class = tcc_binary;
3400 else if (class == tcc_expression && code == SAVE_EXPR
3401 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3403 /* If we've already found a CVAL1 or CVAL2, this expression is
3404 two complex to handle. */
3405 if (*cval1 || *cval2)
3406 return 0;
3408 class = tcc_unary;
3409 *save_p = 1;
3412 switch (class)
3414 case tcc_unary:
3415 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3417 case tcc_binary:
3418 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3419 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3420 cval1, cval2, save_p));
3422 case tcc_constant:
3423 return 1;
3425 case tcc_expression:
3426 if (code == COND_EXPR)
3427 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3428 cval1, cval2, save_p)
3429 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3430 cval1, cval2, save_p)
3431 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3432 cval1, cval2, save_p));
3433 return 0;
3435 case tcc_comparison:
3436 /* First see if we can handle the first operand, then the second. For
3437 the second operand, we know *CVAL1 can't be zero. It must be that
3438 one side of the comparison is each of the values; test for the
3439 case where this isn't true by failing if the two operands
3440 are the same. */
3442 if (operand_equal_p (TREE_OPERAND (arg, 0),
3443 TREE_OPERAND (arg, 1), 0))
3444 return 0;
3446 if (*cval1 == 0)
3447 *cval1 = TREE_OPERAND (arg, 0);
3448 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3450 else if (*cval2 == 0)
3451 *cval2 = TREE_OPERAND (arg, 0);
3452 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3454 else
3455 return 0;
3457 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3459 else if (*cval2 == 0)
3460 *cval2 = TREE_OPERAND (arg, 1);
3461 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3463 else
3464 return 0;
3466 return 1;
3468 default:
3469 return 0;
3473 /* ARG is a tree that is known to contain just arithmetic operations and
3474 comparisons. Evaluate the operations in the tree substituting NEW0 for
3475 any occurrence of OLD0 as an operand of a comparison and likewise for
3476 NEW1 and OLD1. */
3478 static tree
3479 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3481 tree type = TREE_TYPE (arg);
3482 enum tree_code code = TREE_CODE (arg);
3483 enum tree_code_class class = TREE_CODE_CLASS (code);
3485 /* We can handle some of the tcc_expression cases here. */
3486 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3487 class = tcc_unary;
3488 else if (class == tcc_expression
3489 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3490 class = tcc_binary;
3492 switch (class)
3494 case tcc_unary:
3495 return fold_build1 (code, type,
3496 eval_subst (TREE_OPERAND (arg, 0),
3497 old0, new0, old1, new1));
3499 case tcc_binary:
3500 return fold_build2 (code, type,
3501 eval_subst (TREE_OPERAND (arg, 0),
3502 old0, new0, old1, new1),
3503 eval_subst (TREE_OPERAND (arg, 1),
3504 old0, new0, old1, new1));
3506 case tcc_expression:
3507 switch (code)
3509 case SAVE_EXPR:
3510 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3512 case COMPOUND_EXPR:
3513 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3515 case COND_EXPR:
3516 return fold_build3 (code, type,
3517 eval_subst (TREE_OPERAND (arg, 0),
3518 old0, new0, old1, new1),
3519 eval_subst (TREE_OPERAND (arg, 1),
3520 old0, new0, old1, new1),
3521 eval_subst (TREE_OPERAND (arg, 2),
3522 old0, new0, old1, new1));
3523 default:
3524 break;
3526 /* Fall through - ??? */
3528 case tcc_comparison:
3530 tree arg0 = TREE_OPERAND (arg, 0);
3531 tree arg1 = TREE_OPERAND (arg, 1);
3533 /* We need to check both for exact equality and tree equality. The
3534 former will be true if the operand has a side-effect. In that
3535 case, we know the operand occurred exactly once. */
3537 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3538 arg0 = new0;
3539 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3540 arg0 = new1;
3542 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3543 arg1 = new0;
3544 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3545 arg1 = new1;
3547 return fold_build2 (code, type, arg0, arg1);
3550 default:
3551 return arg;
3555 /* Return a tree for the case when the result of an expression is RESULT
3556 converted to TYPE and OMITTED was previously an operand of the expression
3557 but is now not needed (e.g., we folded OMITTED * 0).
3559 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3560 the conversion of RESULT to TYPE. */
3562 tree
3563 omit_one_operand (tree type, tree result, tree omitted)
3565 tree t = fold_convert (type, result);
3567 /* If the resulting operand is an empty statement, just return the omitted
3568 statement casted to void. */
3569 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3570 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3572 if (TREE_SIDE_EFFECTS (omitted))
3573 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3575 return non_lvalue (t);
3578 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3580 static tree
3581 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3583 tree t = fold_convert (type, result);
3585 /* If the resulting operand is an empty statement, just return the omitted
3586 statement casted to void. */
3587 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3588 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3590 if (TREE_SIDE_EFFECTS (omitted))
3591 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3593 return pedantic_non_lvalue (t);
3596 /* Return a tree for the case when the result of an expression is RESULT
3597 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3598 of the expression but are now not needed.
3600 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3601 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3602 evaluated before OMITTED2. Otherwise, if neither has side effects,
3603 just do the conversion of RESULT to TYPE. */
3605 tree
3606 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3608 tree t = fold_convert (type, result);
3610 if (TREE_SIDE_EFFECTS (omitted2))
3611 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3612 if (TREE_SIDE_EFFECTS (omitted1))
3613 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3615 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3619 /* Return a simplified tree node for the truth-negation of ARG. This
3620 never alters ARG itself. We assume that ARG is an operation that
3621 returns a truth value (0 or 1).
3623 FIXME: one would think we would fold the result, but it causes
3624 problems with the dominator optimizer. */
3626 tree
3627 fold_truth_not_expr (tree arg)
3629 tree type = TREE_TYPE (arg);
3630 enum tree_code code = TREE_CODE (arg);
3632 /* If this is a comparison, we can simply invert it, except for
3633 floating-point non-equality comparisons, in which case we just
3634 enclose a TRUTH_NOT_EXPR around what we have. */
3636 if (TREE_CODE_CLASS (code) == tcc_comparison)
3638 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3639 if (FLOAT_TYPE_P (op_type)
3640 && flag_trapping_math
3641 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3642 && code != NE_EXPR && code != EQ_EXPR)
3643 return NULL_TREE;
3644 else
3646 code = invert_tree_comparison (code,
3647 HONOR_NANS (TYPE_MODE (op_type)));
3648 if (code == ERROR_MARK)
3649 return NULL_TREE;
3650 else
3651 return build2 (code, type,
3652 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3656 switch (code)
3658 case INTEGER_CST:
3659 return constant_boolean_node (integer_zerop (arg), type);
3661 case TRUTH_AND_EXPR:
3662 return build2 (TRUTH_OR_EXPR, type,
3663 invert_truthvalue (TREE_OPERAND (arg, 0)),
3664 invert_truthvalue (TREE_OPERAND (arg, 1)));
3666 case TRUTH_OR_EXPR:
3667 return build2 (TRUTH_AND_EXPR, type,
3668 invert_truthvalue (TREE_OPERAND (arg, 0)),
3669 invert_truthvalue (TREE_OPERAND (arg, 1)));
3671 case TRUTH_XOR_EXPR:
3672 /* Here we can invert either operand. We invert the first operand
3673 unless the second operand is a TRUTH_NOT_EXPR in which case our
3674 result is the XOR of the first operand with the inside of the
3675 negation of the second operand. */
3677 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3678 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3679 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3680 else
3681 return build2 (TRUTH_XOR_EXPR, type,
3682 invert_truthvalue (TREE_OPERAND (arg, 0)),
3683 TREE_OPERAND (arg, 1));
3685 case TRUTH_ANDIF_EXPR:
3686 return build2 (TRUTH_ORIF_EXPR, type,
3687 invert_truthvalue (TREE_OPERAND (arg, 0)),
3688 invert_truthvalue (TREE_OPERAND (arg, 1)));
3690 case TRUTH_ORIF_EXPR:
3691 return build2 (TRUTH_ANDIF_EXPR, type,
3692 invert_truthvalue (TREE_OPERAND (arg, 0)),
3693 invert_truthvalue (TREE_OPERAND (arg, 1)));
3695 case TRUTH_NOT_EXPR:
3696 return TREE_OPERAND (arg, 0);
3698 case COND_EXPR:
3700 tree arg1 = TREE_OPERAND (arg, 1);
3701 tree arg2 = TREE_OPERAND (arg, 2);
3702 /* A COND_EXPR may have a throw as one operand, which
3703 then has void type. Just leave void operands
3704 as they are. */
3705 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3706 VOID_TYPE_P (TREE_TYPE (arg1))
3707 ? arg1 : invert_truthvalue (arg1),
3708 VOID_TYPE_P (TREE_TYPE (arg2))
3709 ? arg2 : invert_truthvalue (arg2));
3712 case COMPOUND_EXPR:
3713 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3714 invert_truthvalue (TREE_OPERAND (arg, 1)));
3716 case NON_LVALUE_EXPR:
3717 return invert_truthvalue (TREE_OPERAND (arg, 0));
3719 case NOP_EXPR:
3720 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3721 return build1 (TRUTH_NOT_EXPR, type, arg);
3723 case CONVERT_EXPR:
3724 case FLOAT_EXPR:
3725 return build1 (TREE_CODE (arg), type,
3726 invert_truthvalue (TREE_OPERAND (arg, 0)));
3728 case BIT_AND_EXPR:
3729 if (!integer_onep (TREE_OPERAND (arg, 1)))
3730 break;
3731 return build2 (EQ_EXPR, type, arg,
3732 build_int_cst (type, 0));
3734 case SAVE_EXPR:
3735 return build1 (TRUTH_NOT_EXPR, type, arg);
3737 case CLEANUP_POINT_EXPR:
3738 return build1 (CLEANUP_POINT_EXPR, type,
3739 invert_truthvalue (TREE_OPERAND (arg, 0)));
3741 default:
3742 break;
3745 return NULL_TREE;
3748 /* Return a simplified tree node for the truth-negation of ARG. This
3749 never alters ARG itself. We assume that ARG is an operation that
3750 returns a truth value (0 or 1).
3752 FIXME: one would think we would fold the result, but it causes
3753 problems with the dominator optimizer. */
3755 tree
3756 invert_truthvalue (tree arg)
3758 tree tem;
3760 if (TREE_CODE (arg) == ERROR_MARK)
3761 return arg;
3763 tem = fold_truth_not_expr (arg);
3764 if (!tem)
3765 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3767 return tem;
3770 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3771 operands are another bit-wise operation with a common input. If so,
3772 distribute the bit operations to save an operation and possibly two if
3773 constants are involved. For example, convert
3774 (A | B) & (A | C) into A | (B & C)
3775 Further simplification will occur if B and C are constants.
3777 If this optimization cannot be done, 0 will be returned. */
3779 static tree
3780 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3782 tree common;
3783 tree left, right;
3785 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3786 || TREE_CODE (arg0) == code
3787 || (TREE_CODE (arg0) != BIT_AND_EXPR
3788 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3789 return 0;
3791 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3793 common = TREE_OPERAND (arg0, 0);
3794 left = TREE_OPERAND (arg0, 1);
3795 right = TREE_OPERAND (arg1, 1);
3797 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3799 common = TREE_OPERAND (arg0, 0);
3800 left = TREE_OPERAND (arg0, 1);
3801 right = TREE_OPERAND (arg1, 0);
3803 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3805 common = TREE_OPERAND (arg0, 1);
3806 left = TREE_OPERAND (arg0, 0);
3807 right = TREE_OPERAND (arg1, 1);
3809 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3811 common = TREE_OPERAND (arg0, 1);
3812 left = TREE_OPERAND (arg0, 0);
3813 right = TREE_OPERAND (arg1, 0);
3815 else
3816 return 0;
3818 return fold_build2 (TREE_CODE (arg0), type, common,
3819 fold_build2 (code, type, left, right));
3822 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3823 with code CODE. This optimization is unsafe. */
3824 static tree
3825 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3827 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3828 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3830 /* (A / C) +- (B / C) -> (A +- B) / C. */
3831 if (mul0 == mul1
3832 && operand_equal_p (TREE_OPERAND (arg0, 1),
3833 TREE_OPERAND (arg1, 1), 0))
3834 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3835 fold_build2 (code, type,
3836 TREE_OPERAND (arg0, 0),
3837 TREE_OPERAND (arg1, 0)),
3838 TREE_OPERAND (arg0, 1));
3840 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3841 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3842 TREE_OPERAND (arg1, 0), 0)
3843 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3844 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3846 REAL_VALUE_TYPE r0, r1;
3847 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3848 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3849 if (!mul0)
3850 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3851 if (!mul1)
3852 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3853 real_arithmetic (&r0, code, &r0, &r1);
3854 return fold_build2 (MULT_EXPR, type,
3855 TREE_OPERAND (arg0, 0),
3856 build_real (type, r0));
3859 return NULL_TREE;
3862 /* Subroutine for fold_truthop: decode a field reference.
3864 If EXP is a comparison reference, we return the innermost reference.
3866 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3867 set to the starting bit number.
3869 If the innermost field can be completely contained in a mode-sized
3870 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3872 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3873 otherwise it is not changed.
3875 *PUNSIGNEDP is set to the signedness of the field.
3877 *PMASK is set to the mask used. This is either contained in a
3878 BIT_AND_EXPR or derived from the width of the field.
3880 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3882 Return 0 if this is not a component reference or is one that we can't
3883 do anything with. */
3885 static tree
3886 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3887 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3888 int *punsignedp, int *pvolatilep,
3889 tree *pmask, tree *pand_mask)
3891 tree outer_type = 0;
3892 tree and_mask = 0;
3893 tree mask, inner, offset;
3894 tree unsigned_type;
3895 unsigned int precision;
3897 /* All the optimizations using this function assume integer fields.
3898 There are problems with FP fields since the type_for_size call
3899 below can fail for, e.g., XFmode. */
3900 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3901 return 0;
3903 /* We are interested in the bare arrangement of bits, so strip everything
3904 that doesn't affect the machine mode. However, record the type of the
3905 outermost expression if it may matter below. */
3906 if (CONVERT_EXPR_P (exp)
3907 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3908 outer_type = TREE_TYPE (exp);
3909 STRIP_NOPS (exp);
3911 if (TREE_CODE (exp) == BIT_AND_EXPR)
3913 and_mask = TREE_OPERAND (exp, 1);
3914 exp = TREE_OPERAND (exp, 0);
3915 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3916 if (TREE_CODE (and_mask) != INTEGER_CST)
3917 return 0;
3920 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3921 punsignedp, pvolatilep, false);
3922 if ((inner == exp && and_mask == 0)
3923 || *pbitsize < 0 || offset != 0
3924 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3925 return 0;
3927 /* If the number of bits in the reference is the same as the bitsize of
3928 the outer type, then the outer type gives the signedness. Otherwise
3929 (in case of a small bitfield) the signedness is unchanged. */
3930 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3931 *punsignedp = TYPE_UNSIGNED (outer_type);
3933 /* Compute the mask to access the bitfield. */
3934 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3935 precision = TYPE_PRECISION (unsigned_type);
3937 mask = build_int_cst_type (unsigned_type, -1);
3939 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3940 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3942 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3943 if (and_mask != 0)
3944 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3945 fold_convert (unsigned_type, and_mask), mask);
3947 *pmask = mask;
3948 *pand_mask = and_mask;
3949 return inner;
3952 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3953 represents the sign bit of EXP's type. If EXP represents a sign
3954 or zero extension, also test VAL against the unextended type.
3955 The return value is the (sub)expression whose sign bit is VAL,
3956 or NULL_TREE otherwise. */
3958 static tree
3959 sign_bit_p (tree exp, const_tree val)
3961 unsigned HOST_WIDE_INT mask_lo, lo;
3962 HOST_WIDE_INT mask_hi, hi;
3963 int width;
3964 tree t;
3966 /* Tree EXP must have an integral type. */
3967 t = TREE_TYPE (exp);
3968 if (! INTEGRAL_TYPE_P (t))
3969 return NULL_TREE;
3971 /* Tree VAL must be an integer constant. */
3972 if (TREE_CODE (val) != INTEGER_CST
3973 || TREE_OVERFLOW (val))
3974 return NULL_TREE;
3976 width = TYPE_PRECISION (t);
3977 if (width > HOST_BITS_PER_WIDE_INT)
3979 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3980 lo = 0;
3982 mask_hi = ((unsigned HOST_WIDE_INT) -1
3983 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3984 mask_lo = -1;
3986 else
3988 hi = 0;
3989 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3991 mask_hi = 0;
3992 mask_lo = ((unsigned HOST_WIDE_INT) -1
3993 >> (HOST_BITS_PER_WIDE_INT - width));
3996 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3997 treat VAL as if it were unsigned. */
3998 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3999 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4000 return exp;
4002 /* Handle extension from a narrower type. */
4003 if (TREE_CODE (exp) == NOP_EXPR
4004 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4005 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4007 return NULL_TREE;
4010 /* Subroutine for fold_truthop: determine if an operand is simple enough
4011 to be evaluated unconditionally. */
4013 static int
4014 simple_operand_p (const_tree exp)
4016 /* Strip any conversions that don't change the machine mode. */
4017 STRIP_NOPS (exp);
4019 return (CONSTANT_CLASS_P (exp)
4020 || TREE_CODE (exp) == SSA_NAME
4021 || (DECL_P (exp)
4022 && ! TREE_ADDRESSABLE (exp)
4023 && ! TREE_THIS_VOLATILE (exp)
4024 && ! DECL_NONLOCAL (exp)
4025 /* Don't regard global variables as simple. They may be
4026 allocated in ways unknown to the compiler (shared memory,
4027 #pragma weak, etc). */
4028 && ! TREE_PUBLIC (exp)
4029 && ! DECL_EXTERNAL (exp)
4030 /* Loading a static variable is unduly expensive, but global
4031 registers aren't expensive. */
4032 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4035 /* The following functions are subroutines to fold_range_test and allow it to
4036 try to change a logical combination of comparisons into a range test.
4038 For example, both
4039 X == 2 || X == 3 || X == 4 || X == 5
4041 X >= 2 && X <= 5
4042 are converted to
4043 (unsigned) (X - 2) <= 3
4045 We describe each set of comparisons as being either inside or outside
4046 a range, using a variable named like IN_P, and then describe the
4047 range with a lower and upper bound. If one of the bounds is omitted,
4048 it represents either the highest or lowest value of the type.
4050 In the comments below, we represent a range by two numbers in brackets
4051 preceded by a "+" to designate being inside that range, or a "-" to
4052 designate being outside that range, so the condition can be inverted by
4053 flipping the prefix. An omitted bound is represented by a "-". For
4054 example, "- [-, 10]" means being outside the range starting at the lowest
4055 possible value and ending at 10, in other words, being greater than 10.
4056 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4057 always false.
4059 We set up things so that the missing bounds are handled in a consistent
4060 manner so neither a missing bound nor "true" and "false" need to be
4061 handled using a special case. */
4063 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4064 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4065 and UPPER1_P are nonzero if the respective argument is an upper bound
4066 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4067 must be specified for a comparison. ARG1 will be converted to ARG0's
4068 type if both are specified. */
4070 static tree
4071 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4072 tree arg1, int upper1_p)
4074 tree tem;
4075 int result;
4076 int sgn0, sgn1;
4078 /* If neither arg represents infinity, do the normal operation.
4079 Else, if not a comparison, return infinity. Else handle the special
4080 comparison rules. Note that most of the cases below won't occur, but
4081 are handled for consistency. */
4083 if (arg0 != 0 && arg1 != 0)
4085 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4086 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4087 STRIP_NOPS (tem);
4088 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4091 if (TREE_CODE_CLASS (code) != tcc_comparison)
4092 return 0;
4094 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4095 for neither. In real maths, we cannot assume open ended ranges are
4096 the same. But, this is computer arithmetic, where numbers are finite.
4097 We can therefore make the transformation of any unbounded range with
4098 the value Z, Z being greater than any representable number. This permits
4099 us to treat unbounded ranges as equal. */
4100 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4101 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4102 switch (code)
4104 case EQ_EXPR:
4105 result = sgn0 == sgn1;
4106 break;
4107 case NE_EXPR:
4108 result = sgn0 != sgn1;
4109 break;
4110 case LT_EXPR:
4111 result = sgn0 < sgn1;
4112 break;
4113 case LE_EXPR:
4114 result = sgn0 <= sgn1;
4115 break;
4116 case GT_EXPR:
4117 result = sgn0 > sgn1;
4118 break;
4119 case GE_EXPR:
4120 result = sgn0 >= sgn1;
4121 break;
4122 default:
4123 gcc_unreachable ();
4126 return constant_boolean_node (result, type);
4129 /* Given EXP, a logical expression, set the range it is testing into
4130 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4131 actually being tested. *PLOW and *PHIGH will be made of the same
4132 type as the returned expression. If EXP is not a comparison, we
4133 will most likely not be returning a useful value and range. Set
4134 *STRICT_OVERFLOW_P to true if the return value is only valid
4135 because signed overflow is undefined; otherwise, do not change
4136 *STRICT_OVERFLOW_P. */
4138 static tree
4139 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4140 bool *strict_overflow_p)
4142 enum tree_code code;
4143 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4144 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4145 int in_p, n_in_p;
4146 tree low, high, n_low, n_high;
4148 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4149 and see if we can refine the range. Some of the cases below may not
4150 happen, but it doesn't seem worth worrying about this. We "continue"
4151 the outer loop when we've changed something; otherwise we "break"
4152 the switch, which will "break" the while. */
4154 in_p = 0;
4155 low = high = build_int_cst (TREE_TYPE (exp), 0);
4157 while (1)
4159 code = TREE_CODE (exp);
4160 exp_type = TREE_TYPE (exp);
4162 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4164 if (TREE_OPERAND_LENGTH (exp) > 0)
4165 arg0 = TREE_OPERAND (exp, 0);
4166 if (TREE_CODE_CLASS (code) == tcc_comparison
4167 || TREE_CODE_CLASS (code) == tcc_unary
4168 || TREE_CODE_CLASS (code) == tcc_binary)
4169 arg0_type = TREE_TYPE (arg0);
4170 if (TREE_CODE_CLASS (code) == tcc_binary
4171 || TREE_CODE_CLASS (code) == tcc_comparison
4172 || (TREE_CODE_CLASS (code) == tcc_expression
4173 && TREE_OPERAND_LENGTH (exp) > 1))
4174 arg1 = TREE_OPERAND (exp, 1);
4177 switch (code)
4179 case TRUTH_NOT_EXPR:
4180 in_p = ! in_p, exp = arg0;
4181 continue;
4183 case EQ_EXPR: case NE_EXPR:
4184 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4185 /* We can only do something if the range is testing for zero
4186 and if the second operand is an integer constant. Note that
4187 saying something is "in" the range we make is done by
4188 complementing IN_P since it will set in the initial case of
4189 being not equal to zero; "out" is leaving it alone. */
4190 if (low == 0 || high == 0
4191 || ! integer_zerop (low) || ! integer_zerop (high)
4192 || TREE_CODE (arg1) != INTEGER_CST)
4193 break;
4195 switch (code)
4197 case NE_EXPR: /* - [c, c] */
4198 low = high = arg1;
4199 break;
4200 case EQ_EXPR: /* + [c, c] */
4201 in_p = ! in_p, low = high = arg1;
4202 break;
4203 case GT_EXPR: /* - [-, c] */
4204 low = 0, high = arg1;
4205 break;
4206 case GE_EXPR: /* + [c, -] */
4207 in_p = ! in_p, low = arg1, high = 0;
4208 break;
4209 case LT_EXPR: /* - [c, -] */
4210 low = arg1, high = 0;
4211 break;
4212 case LE_EXPR: /* + [-, c] */
4213 in_p = ! in_p, low = 0, high = arg1;
4214 break;
4215 default:
4216 gcc_unreachable ();
4219 /* If this is an unsigned comparison, we also know that EXP is
4220 greater than or equal to zero. We base the range tests we make
4221 on that fact, so we record it here so we can parse existing
4222 range tests. We test arg0_type since often the return type
4223 of, e.g. EQ_EXPR, is boolean. */
4224 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4226 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4227 in_p, low, high, 1,
4228 build_int_cst (arg0_type, 0),
4229 NULL_TREE))
4230 break;
4232 in_p = n_in_p, low = n_low, high = n_high;
4234 /* If the high bound is missing, but we have a nonzero low
4235 bound, reverse the range so it goes from zero to the low bound
4236 minus 1. */
4237 if (high == 0 && low && ! integer_zerop (low))
4239 in_p = ! in_p;
4240 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4241 integer_one_node, 0);
4242 low = build_int_cst (arg0_type, 0);
4246 exp = arg0;
4247 continue;
4249 case NEGATE_EXPR:
4250 /* (-x) IN [a,b] -> x in [-b, -a] */
4251 n_low = range_binop (MINUS_EXPR, exp_type,
4252 build_int_cst (exp_type, 0),
4253 0, high, 1);
4254 n_high = range_binop (MINUS_EXPR, exp_type,
4255 build_int_cst (exp_type, 0),
4256 0, low, 0);
4257 low = n_low, high = n_high;
4258 exp = arg0;
4259 continue;
4261 case BIT_NOT_EXPR:
4262 /* ~ X -> -X - 1 */
4263 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4264 build_int_cst (exp_type, 1));
4265 continue;
4267 case PLUS_EXPR: case MINUS_EXPR:
4268 if (TREE_CODE (arg1) != INTEGER_CST)
4269 break;
4271 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4272 move a constant to the other side. */
4273 if (!TYPE_UNSIGNED (arg0_type)
4274 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4275 break;
4277 /* If EXP is signed, any overflow in the computation is undefined,
4278 so we don't worry about it so long as our computations on
4279 the bounds don't overflow. For unsigned, overflow is defined
4280 and this is exactly the right thing. */
4281 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4282 arg0_type, low, 0, arg1, 0);
4283 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4284 arg0_type, high, 1, arg1, 0);
4285 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4286 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4287 break;
4289 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4290 *strict_overflow_p = true;
4292 /* Check for an unsigned range which has wrapped around the maximum
4293 value thus making n_high < n_low, and normalize it. */
4294 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4296 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4297 integer_one_node, 0);
4298 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4299 integer_one_node, 0);
4301 /* If the range is of the form +/- [ x+1, x ], we won't
4302 be able to normalize it. But then, it represents the
4303 whole range or the empty set, so make it
4304 +/- [ -, - ]. */
4305 if (tree_int_cst_equal (n_low, low)
4306 && tree_int_cst_equal (n_high, high))
4307 low = high = 0;
4308 else
4309 in_p = ! in_p;
4311 else
4312 low = n_low, high = n_high;
4314 exp = arg0;
4315 continue;
4317 CASE_CONVERT: case NON_LVALUE_EXPR:
4318 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4319 break;
4321 if (! INTEGRAL_TYPE_P (arg0_type)
4322 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4323 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4324 break;
4326 n_low = low, n_high = high;
4328 if (n_low != 0)
4329 n_low = fold_convert (arg0_type, n_low);
4331 if (n_high != 0)
4332 n_high = fold_convert (arg0_type, n_high);
4335 /* If we're converting arg0 from an unsigned type, to exp,
4336 a signed type, we will be doing the comparison as unsigned.
4337 The tests above have already verified that LOW and HIGH
4338 are both positive.
4340 So we have to ensure that we will handle large unsigned
4341 values the same way that the current signed bounds treat
4342 negative values. */
4344 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4346 tree high_positive;
4347 tree equiv_type;
4348 /* For fixed-point modes, we need to pass the saturating flag
4349 as the 2nd parameter. */
4350 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4351 equiv_type = lang_hooks.types.type_for_mode
4352 (TYPE_MODE (arg0_type),
4353 TYPE_SATURATING (arg0_type));
4354 else
4355 equiv_type = lang_hooks.types.type_for_mode
4356 (TYPE_MODE (arg0_type), 1);
4358 /* A range without an upper bound is, naturally, unbounded.
4359 Since convert would have cropped a very large value, use
4360 the max value for the destination type. */
4361 high_positive
4362 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4363 : TYPE_MAX_VALUE (arg0_type);
4365 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4366 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4367 fold_convert (arg0_type,
4368 high_positive),
4369 build_int_cst (arg0_type, 1));
4371 /* If the low bound is specified, "and" the range with the
4372 range for which the original unsigned value will be
4373 positive. */
4374 if (low != 0)
4376 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4377 1, n_low, n_high, 1,
4378 fold_convert (arg0_type,
4379 integer_zero_node),
4380 high_positive))
4381 break;
4383 in_p = (n_in_p == in_p);
4385 else
4387 /* Otherwise, "or" the range with the range of the input
4388 that will be interpreted as negative. */
4389 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4390 0, n_low, n_high, 1,
4391 fold_convert (arg0_type,
4392 integer_zero_node),
4393 high_positive))
4394 break;
4396 in_p = (in_p != n_in_p);
4400 exp = arg0;
4401 low = n_low, high = n_high;
4402 continue;
4404 default:
4405 break;
4408 break;
4411 /* If EXP is a constant, we can evaluate whether this is true or false. */
4412 if (TREE_CODE (exp) == INTEGER_CST)
4414 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4415 exp, 0, low, 0))
4416 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4417 exp, 1, high, 1)));
4418 low = high = 0;
4419 exp = 0;
4422 *pin_p = in_p, *plow = low, *phigh = high;
4423 return exp;
4426 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4427 type, TYPE, return an expression to test if EXP is in (or out of, depending
4428 on IN_P) the range. Return 0 if the test couldn't be created. */
4430 static tree
4431 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4433 tree etype = TREE_TYPE (exp);
4434 tree value;
4436 #ifdef HAVE_canonicalize_funcptr_for_compare
4437 /* Disable this optimization for function pointer expressions
4438 on targets that require function pointer canonicalization. */
4439 if (HAVE_canonicalize_funcptr_for_compare
4440 && TREE_CODE (etype) == POINTER_TYPE
4441 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4442 return NULL_TREE;
4443 #endif
4445 if (! in_p)
4447 value = build_range_check (type, exp, 1, low, high);
4448 if (value != 0)
4449 return invert_truthvalue (value);
4451 return 0;
4454 if (low == 0 && high == 0)
4455 return build_int_cst (type, 1);
4457 if (low == 0)
4458 return fold_build2 (LE_EXPR, type, exp,
4459 fold_convert (etype, high));
4461 if (high == 0)
4462 return fold_build2 (GE_EXPR, type, exp,
4463 fold_convert (etype, low));
4465 if (operand_equal_p (low, high, 0))
4466 return fold_build2 (EQ_EXPR, type, exp,
4467 fold_convert (etype, low));
4469 if (integer_zerop (low))
4471 if (! TYPE_UNSIGNED (etype))
4473 etype = unsigned_type_for (etype);
4474 high = fold_convert (etype, high);
4475 exp = fold_convert (etype, exp);
4477 return build_range_check (type, exp, 1, 0, high);
4480 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4481 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4483 unsigned HOST_WIDE_INT lo;
4484 HOST_WIDE_INT hi;
4485 int prec;
4487 prec = TYPE_PRECISION (etype);
4488 if (prec <= HOST_BITS_PER_WIDE_INT)
4490 hi = 0;
4491 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4493 else
4495 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4496 lo = (unsigned HOST_WIDE_INT) -1;
4499 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4501 if (TYPE_UNSIGNED (etype))
4503 etype = signed_type_for (etype);
4504 exp = fold_convert (etype, exp);
4506 return fold_build2 (GT_EXPR, type, exp,
4507 build_int_cst (etype, 0));
4511 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4512 This requires wrap-around arithmetics for the type of the expression. */
4513 switch (TREE_CODE (etype))
4515 case INTEGER_TYPE:
4516 /* There is no requirement that LOW be within the range of ETYPE
4517 if the latter is a subtype. It must, however, be within the base
4518 type of ETYPE. So be sure we do the subtraction in that type. */
4519 if (TREE_TYPE (etype))
4520 etype = TREE_TYPE (etype);
4521 break;
4523 case ENUMERAL_TYPE:
4524 case BOOLEAN_TYPE:
4525 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4526 TYPE_UNSIGNED (etype));
4527 break;
4529 default:
4530 break;
4533 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4534 if (TREE_CODE (etype) == INTEGER_TYPE
4535 && !TYPE_OVERFLOW_WRAPS (etype))
4537 tree utype, minv, maxv;
4539 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4540 for the type in question, as we rely on this here. */
4541 utype = unsigned_type_for (etype);
4542 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4543 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4544 integer_one_node, 1);
4545 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4547 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4548 minv, 1, maxv, 1)))
4549 etype = utype;
4550 else
4551 return 0;
4554 high = fold_convert (etype, high);
4555 low = fold_convert (etype, low);
4556 exp = fold_convert (etype, exp);
4558 value = const_binop (MINUS_EXPR, high, low, 0);
4561 if (POINTER_TYPE_P (etype))
4563 if (value != 0 && !TREE_OVERFLOW (value))
4565 low = fold_convert (sizetype, low);
4566 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4567 return build_range_check (type,
4568 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4569 1, build_int_cst (etype, 0), value);
4571 return 0;
4574 if (value != 0 && !TREE_OVERFLOW (value))
4575 return build_range_check (type,
4576 fold_build2 (MINUS_EXPR, etype, exp, low),
4577 1, build_int_cst (etype, 0), value);
4579 return 0;
4582 /* Return the predecessor of VAL in its type, handling the infinite case. */
4584 static tree
4585 range_predecessor (tree val)
4587 tree type = TREE_TYPE (val);
4589 if (INTEGRAL_TYPE_P (type)
4590 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4591 return 0;
4592 else
4593 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4596 /* Return the successor of VAL in its type, handling the infinite case. */
4598 static tree
4599 range_successor (tree val)
4601 tree type = TREE_TYPE (val);
4603 if (INTEGRAL_TYPE_P (type)
4604 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4605 return 0;
4606 else
4607 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4610 /* Given two ranges, see if we can merge them into one. Return 1 if we
4611 can, 0 if we can't. Set the output range into the specified parameters. */
4613 static int
4614 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4615 tree high0, int in1_p, tree low1, tree high1)
4617 int no_overlap;
4618 int subset;
4619 int temp;
4620 tree tem;
4621 int in_p;
4622 tree low, high;
4623 int lowequal = ((low0 == 0 && low1 == 0)
4624 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4625 low0, 0, low1, 0)));
4626 int highequal = ((high0 == 0 && high1 == 0)
4627 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4628 high0, 1, high1, 1)));
4630 /* Make range 0 be the range that starts first, or ends last if they
4631 start at the same value. Swap them if it isn't. */
4632 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4633 low0, 0, low1, 0))
4634 || (lowequal
4635 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4636 high1, 1, high0, 1))))
4638 temp = in0_p, in0_p = in1_p, in1_p = temp;
4639 tem = low0, low0 = low1, low1 = tem;
4640 tem = high0, high0 = high1, high1 = tem;
4643 /* Now flag two cases, whether the ranges are disjoint or whether the
4644 second range is totally subsumed in the first. Note that the tests
4645 below are simplified by the ones above. */
4646 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4647 high0, 1, low1, 0));
4648 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4649 high1, 1, high0, 1));
4651 /* We now have four cases, depending on whether we are including or
4652 excluding the two ranges. */
4653 if (in0_p && in1_p)
4655 /* If they don't overlap, the result is false. If the second range
4656 is a subset it is the result. Otherwise, the range is from the start
4657 of the second to the end of the first. */
4658 if (no_overlap)
4659 in_p = 0, low = high = 0;
4660 else if (subset)
4661 in_p = 1, low = low1, high = high1;
4662 else
4663 in_p = 1, low = low1, high = high0;
4666 else if (in0_p && ! in1_p)
4668 /* If they don't overlap, the result is the first range. If they are
4669 equal, the result is false. If the second range is a subset of the
4670 first, and the ranges begin at the same place, we go from just after
4671 the end of the second range to the end of the first. If the second
4672 range is not a subset of the first, or if it is a subset and both
4673 ranges end at the same place, the range starts at the start of the
4674 first range and ends just before the second range.
4675 Otherwise, we can't describe this as a single range. */
4676 if (no_overlap)
4677 in_p = 1, low = low0, high = high0;
4678 else if (lowequal && highequal)
4679 in_p = 0, low = high = 0;
4680 else if (subset && lowequal)
4682 low = range_successor (high1);
4683 high = high0;
4684 in_p = 1;
4685 if (low == 0)
4687 /* We are in the weird situation where high0 > high1 but
4688 high1 has no successor. Punt. */
4689 return 0;
4692 else if (! subset || highequal)
4694 low = low0;
4695 high = range_predecessor (low1);
4696 in_p = 1;
4697 if (high == 0)
4699 /* low0 < low1 but low1 has no predecessor. Punt. */
4700 return 0;
4703 else
4704 return 0;
4707 else if (! in0_p && in1_p)
4709 /* If they don't overlap, the result is the second range. If the second
4710 is a subset of the first, the result is false. Otherwise,
4711 the range starts just after the first range and ends at the
4712 end of the second. */
4713 if (no_overlap)
4714 in_p = 1, low = low1, high = high1;
4715 else if (subset || highequal)
4716 in_p = 0, low = high = 0;
4717 else
4719 low = range_successor (high0);
4720 high = high1;
4721 in_p = 1;
4722 if (low == 0)
4724 /* high1 > high0 but high0 has no successor. Punt. */
4725 return 0;
4730 else
4732 /* The case where we are excluding both ranges. Here the complex case
4733 is if they don't overlap. In that case, the only time we have a
4734 range is if they are adjacent. If the second is a subset of the
4735 first, the result is the first. Otherwise, the range to exclude
4736 starts at the beginning of the first range and ends at the end of the
4737 second. */
4738 if (no_overlap)
4740 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4741 range_successor (high0),
4742 1, low1, 0)))
4743 in_p = 0, low = low0, high = high1;
4744 else
4746 /* Canonicalize - [min, x] into - [-, x]. */
4747 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4748 switch (TREE_CODE (TREE_TYPE (low0)))
4750 case ENUMERAL_TYPE:
4751 if (TYPE_PRECISION (TREE_TYPE (low0))
4752 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4753 break;
4754 /* FALLTHROUGH */
4755 case INTEGER_TYPE:
4756 if (tree_int_cst_equal (low0,
4757 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4758 low0 = 0;
4759 break;
4760 case POINTER_TYPE:
4761 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4762 && integer_zerop (low0))
4763 low0 = 0;
4764 break;
4765 default:
4766 break;
4769 /* Canonicalize - [x, max] into - [x, -]. */
4770 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4771 switch (TREE_CODE (TREE_TYPE (high1)))
4773 case ENUMERAL_TYPE:
4774 if (TYPE_PRECISION (TREE_TYPE (high1))
4775 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4776 break;
4777 /* FALLTHROUGH */
4778 case INTEGER_TYPE:
4779 if (tree_int_cst_equal (high1,
4780 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4781 high1 = 0;
4782 break;
4783 case POINTER_TYPE:
4784 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4785 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4786 high1, 1,
4787 integer_one_node, 1)))
4788 high1 = 0;
4789 break;
4790 default:
4791 break;
4794 /* The ranges might be also adjacent between the maximum and
4795 minimum values of the given type. For
4796 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4797 return + [x + 1, y - 1]. */
4798 if (low0 == 0 && high1 == 0)
4800 low = range_successor (high0);
4801 high = range_predecessor (low1);
4802 if (low == 0 || high == 0)
4803 return 0;
4805 in_p = 1;
4807 else
4808 return 0;
4811 else if (subset)
4812 in_p = 0, low = low0, high = high0;
4813 else
4814 in_p = 0, low = low0, high = high1;
4817 *pin_p = in_p, *plow = low, *phigh = high;
4818 return 1;
4822 /* Subroutine of fold, looking inside expressions of the form
4823 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4824 of the COND_EXPR. This function is being used also to optimize
4825 A op B ? C : A, by reversing the comparison first.
4827 Return a folded expression whose code is not a COND_EXPR
4828 anymore, or NULL_TREE if no folding opportunity is found. */
4830 static tree
4831 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4833 enum tree_code comp_code = TREE_CODE (arg0);
4834 tree arg00 = TREE_OPERAND (arg0, 0);
4835 tree arg01 = TREE_OPERAND (arg0, 1);
4836 tree arg1_type = TREE_TYPE (arg1);
4837 tree tem;
4839 STRIP_NOPS (arg1);
4840 STRIP_NOPS (arg2);
4842 /* If we have A op 0 ? A : -A, consider applying the following
4843 transformations:
4845 A == 0? A : -A same as -A
4846 A != 0? A : -A same as A
4847 A >= 0? A : -A same as abs (A)
4848 A > 0? A : -A same as abs (A)
4849 A <= 0? A : -A same as -abs (A)
4850 A < 0? A : -A same as -abs (A)
4852 None of these transformations work for modes with signed
4853 zeros. If A is +/-0, the first two transformations will
4854 change the sign of the result (from +0 to -0, or vice
4855 versa). The last four will fix the sign of the result,
4856 even though the original expressions could be positive or
4857 negative, depending on the sign of A.
4859 Note that all these transformations are correct if A is
4860 NaN, since the two alternatives (A and -A) are also NaNs. */
4861 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4862 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4863 ? real_zerop (arg01)
4864 : integer_zerop (arg01))
4865 && ((TREE_CODE (arg2) == NEGATE_EXPR
4866 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4867 /* In the case that A is of the form X-Y, '-A' (arg2) may
4868 have already been folded to Y-X, check for that. */
4869 || (TREE_CODE (arg1) == MINUS_EXPR
4870 && TREE_CODE (arg2) == MINUS_EXPR
4871 && operand_equal_p (TREE_OPERAND (arg1, 0),
4872 TREE_OPERAND (arg2, 1), 0)
4873 && operand_equal_p (TREE_OPERAND (arg1, 1),
4874 TREE_OPERAND (arg2, 0), 0))))
4875 switch (comp_code)
4877 case EQ_EXPR:
4878 case UNEQ_EXPR:
4879 tem = fold_convert (arg1_type, arg1);
4880 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4881 case NE_EXPR:
4882 case LTGT_EXPR:
4883 return pedantic_non_lvalue (fold_convert (type, arg1));
4884 case UNGE_EXPR:
4885 case UNGT_EXPR:
4886 if (flag_trapping_math)
4887 break;
4888 /* Fall through. */
4889 case GE_EXPR:
4890 case GT_EXPR:
4891 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4892 arg1 = fold_convert (signed_type_for
4893 (TREE_TYPE (arg1)), arg1);
4894 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4895 return pedantic_non_lvalue (fold_convert (type, tem));
4896 case UNLE_EXPR:
4897 case UNLT_EXPR:
4898 if (flag_trapping_math)
4899 break;
4900 case LE_EXPR:
4901 case LT_EXPR:
4902 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4903 arg1 = fold_convert (signed_type_for
4904 (TREE_TYPE (arg1)), arg1);
4905 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4906 return negate_expr (fold_convert (type, tem));
4907 default:
4908 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4909 break;
4912 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4913 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4914 both transformations are correct when A is NaN: A != 0
4915 is then true, and A == 0 is false. */
4917 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4918 && integer_zerop (arg01) && integer_zerop (arg2))
4920 if (comp_code == NE_EXPR)
4921 return pedantic_non_lvalue (fold_convert (type, arg1));
4922 else if (comp_code == EQ_EXPR)
4923 return build_int_cst (type, 0);
4926 /* Try some transformations of A op B ? A : B.
4928 A == B? A : B same as B
4929 A != B? A : B same as A
4930 A >= B? A : B same as max (A, B)
4931 A > B? A : B same as max (B, A)
4932 A <= B? A : B same as min (A, B)
4933 A < B? A : B same as min (B, A)
4935 As above, these transformations don't work in the presence
4936 of signed zeros. For example, if A and B are zeros of
4937 opposite sign, the first two transformations will change
4938 the sign of the result. In the last four, the original
4939 expressions give different results for (A=+0, B=-0) and
4940 (A=-0, B=+0), but the transformed expressions do not.
4942 The first two transformations are correct if either A or B
4943 is a NaN. In the first transformation, the condition will
4944 be false, and B will indeed be chosen. In the case of the
4945 second transformation, the condition A != B will be true,
4946 and A will be chosen.
4948 The conversions to max() and min() are not correct if B is
4949 a number and A is not. The conditions in the original
4950 expressions will be false, so all four give B. The min()
4951 and max() versions would give a NaN instead. */
4952 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4953 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4954 /* Avoid these transformations if the COND_EXPR may be used
4955 as an lvalue in the C++ front-end. PR c++/19199. */
4956 && (in_gimple_form
4957 || (strcmp (lang_hooks.name, "GNU C++") != 0
4958 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4959 || ! maybe_lvalue_p (arg1)
4960 || ! maybe_lvalue_p (arg2)))
4962 tree comp_op0 = arg00;
4963 tree comp_op1 = arg01;
4964 tree comp_type = TREE_TYPE (comp_op0);
4966 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4967 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4969 comp_type = type;
4970 comp_op0 = arg1;
4971 comp_op1 = arg2;
4974 switch (comp_code)
4976 case EQ_EXPR:
4977 return pedantic_non_lvalue (fold_convert (type, arg2));
4978 case NE_EXPR:
4979 return pedantic_non_lvalue (fold_convert (type, arg1));
4980 case LE_EXPR:
4981 case LT_EXPR:
4982 case UNLE_EXPR:
4983 case UNLT_EXPR:
4984 /* In C++ a ?: expression can be an lvalue, so put the
4985 operand which will be used if they are equal first
4986 so that we can convert this back to the
4987 corresponding COND_EXPR. */
4988 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4990 comp_op0 = fold_convert (comp_type, comp_op0);
4991 comp_op1 = fold_convert (comp_type, comp_op1);
4992 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4993 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4994 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4995 return pedantic_non_lvalue (fold_convert (type, tem));
4997 break;
4998 case GE_EXPR:
4999 case GT_EXPR:
5000 case UNGE_EXPR:
5001 case UNGT_EXPR:
5002 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5004 comp_op0 = fold_convert (comp_type, comp_op0);
5005 comp_op1 = fold_convert (comp_type, comp_op1);
5006 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5007 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5008 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5009 return pedantic_non_lvalue (fold_convert (type, tem));
5011 break;
5012 case UNEQ_EXPR:
5013 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5014 return pedantic_non_lvalue (fold_convert (type, arg2));
5015 break;
5016 case LTGT_EXPR:
5017 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5018 return pedantic_non_lvalue (fold_convert (type, arg1));
5019 break;
5020 default:
5021 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5022 break;
5026 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5027 we might still be able to simplify this. For example,
5028 if C1 is one less or one more than C2, this might have started
5029 out as a MIN or MAX and been transformed by this function.
5030 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5032 if (INTEGRAL_TYPE_P (type)
5033 && TREE_CODE (arg01) == INTEGER_CST
5034 && TREE_CODE (arg2) == INTEGER_CST)
5035 switch (comp_code)
5037 case EQ_EXPR:
5038 /* We can replace A with C1 in this case. */
5039 arg1 = fold_convert (type, arg01);
5040 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5042 case LT_EXPR:
5043 /* If C1 is C2 + 1, this is min(A, C2). */
5044 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5045 OEP_ONLY_CONST)
5046 && operand_equal_p (arg01,
5047 const_binop (PLUS_EXPR, arg2,
5048 build_int_cst (type, 1), 0),
5049 OEP_ONLY_CONST))
5050 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5051 type,
5052 fold_convert (type, arg1),
5053 arg2));
5054 break;
5056 case LE_EXPR:
5057 /* If C1 is C2 - 1, this is min(A, C2). */
5058 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5059 OEP_ONLY_CONST)
5060 && operand_equal_p (arg01,
5061 const_binop (MINUS_EXPR, arg2,
5062 build_int_cst (type, 1), 0),
5063 OEP_ONLY_CONST))
5064 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5065 type,
5066 fold_convert (type, arg1),
5067 arg2));
5068 break;
5070 case GT_EXPR:
5071 /* If C1 is C2 - 1, this is max(A, C2). */
5072 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5073 OEP_ONLY_CONST)
5074 && operand_equal_p (arg01,
5075 const_binop (MINUS_EXPR, arg2,
5076 build_int_cst (type, 1), 0),
5077 OEP_ONLY_CONST))
5078 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5079 type,
5080 fold_convert (type, arg1),
5081 arg2));
5082 break;
5084 case GE_EXPR:
5085 /* If C1 is C2 + 1, this is max(A, C2). */
5086 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5087 OEP_ONLY_CONST)
5088 && operand_equal_p (arg01,
5089 const_binop (PLUS_EXPR, arg2,
5090 build_int_cst (type, 1), 0),
5091 OEP_ONLY_CONST))
5092 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5093 type,
5094 fold_convert (type, arg1),
5095 arg2));
5096 break;
5097 case NE_EXPR:
5098 break;
5099 default:
5100 gcc_unreachable ();
5103 return NULL_TREE;
5108 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5109 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5110 #endif
5112 /* EXP is some logical combination of boolean tests. See if we can
5113 merge it into some range test. Return the new tree if so. */
5115 static tree
5116 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5118 int or_op = (code == TRUTH_ORIF_EXPR
5119 || code == TRUTH_OR_EXPR);
5120 int in0_p, in1_p, in_p;
5121 tree low0, low1, low, high0, high1, high;
5122 bool strict_overflow_p = false;
5123 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5124 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5125 tree tem;
5126 const char * const warnmsg = G_("assuming signed overflow does not occur "
5127 "when simplifying range test");
5129 /* If this is an OR operation, invert both sides; we will invert
5130 again at the end. */
5131 if (or_op)
5132 in0_p = ! in0_p, in1_p = ! in1_p;
5134 /* If both expressions are the same, if we can merge the ranges, and we
5135 can build the range test, return it or it inverted. If one of the
5136 ranges is always true or always false, consider it to be the same
5137 expression as the other. */
5138 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5139 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5140 in1_p, low1, high1)
5141 && 0 != (tem = (build_range_check (type,
5142 lhs != 0 ? lhs
5143 : rhs != 0 ? rhs : integer_zero_node,
5144 in_p, low, high))))
5146 if (strict_overflow_p)
5147 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5148 return or_op ? invert_truthvalue (tem) : tem;
5151 /* On machines where the branch cost is expensive, if this is a
5152 short-circuited branch and the underlying object on both sides
5153 is the same, make a non-short-circuit operation. */
5154 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5155 && lhs != 0 && rhs != 0
5156 && (code == TRUTH_ANDIF_EXPR
5157 || code == TRUTH_ORIF_EXPR)
5158 && operand_equal_p (lhs, rhs, 0))
5160 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5161 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5162 which cases we can't do this. */
5163 if (simple_operand_p (lhs))
5164 return build2 (code == TRUTH_ANDIF_EXPR
5165 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5166 type, op0, op1);
5168 else if (lang_hooks.decls.global_bindings_p () == 0
5169 && ! CONTAINS_PLACEHOLDER_P (lhs))
5171 tree common = save_expr (lhs);
5173 if (0 != (lhs = build_range_check (type, common,
5174 or_op ? ! in0_p : in0_p,
5175 low0, high0))
5176 && (0 != (rhs = build_range_check (type, common,
5177 or_op ? ! in1_p : in1_p,
5178 low1, high1))))
5180 if (strict_overflow_p)
5181 fold_overflow_warning (warnmsg,
5182 WARN_STRICT_OVERFLOW_COMPARISON);
5183 return build2 (code == TRUTH_ANDIF_EXPR
5184 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5185 type, lhs, rhs);
5190 return 0;
5193 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5194 bit value. Arrange things so the extra bits will be set to zero if and
5195 only if C is signed-extended to its full width. If MASK is nonzero,
5196 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5198 static tree
5199 unextend (tree c, int p, int unsignedp, tree mask)
5201 tree type = TREE_TYPE (c);
5202 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5203 tree temp;
5205 if (p == modesize || unsignedp)
5206 return c;
5208 /* We work by getting just the sign bit into the low-order bit, then
5209 into the high-order bit, then sign-extend. We then XOR that value
5210 with C. */
5211 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5212 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5214 /* We must use a signed type in order to get an arithmetic right shift.
5215 However, we must also avoid introducing accidental overflows, so that
5216 a subsequent call to integer_zerop will work. Hence we must
5217 do the type conversion here. At this point, the constant is either
5218 zero or one, and the conversion to a signed type can never overflow.
5219 We could get an overflow if this conversion is done anywhere else. */
5220 if (TYPE_UNSIGNED (type))
5221 temp = fold_convert (signed_type_for (type), temp);
5223 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5224 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5225 if (mask != 0)
5226 temp = const_binop (BIT_AND_EXPR, temp,
5227 fold_convert (TREE_TYPE (c), mask), 0);
5228 /* If necessary, convert the type back to match the type of C. */
5229 if (TYPE_UNSIGNED (type))
5230 temp = fold_convert (type, temp);
5232 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5235 /* Find ways of folding logical expressions of LHS and RHS:
5236 Try to merge two comparisons to the same innermost item.
5237 Look for range tests like "ch >= '0' && ch <= '9'".
5238 Look for combinations of simple terms on machines with expensive branches
5239 and evaluate the RHS unconditionally.
5241 For example, if we have p->a == 2 && p->b == 4 and we can make an
5242 object large enough to span both A and B, we can do this with a comparison
5243 against the object ANDed with the a mask.
5245 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5246 operations to do this with one comparison.
5248 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5249 function and the one above.
5251 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5252 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5254 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5255 two operands.
5257 We return the simplified tree or 0 if no optimization is possible. */
5259 static tree
5260 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5262 /* If this is the "or" of two comparisons, we can do something if
5263 the comparisons are NE_EXPR. If this is the "and", we can do something
5264 if the comparisons are EQ_EXPR. I.e.,
5265 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5267 WANTED_CODE is this operation code. For single bit fields, we can
5268 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5269 comparison for one-bit fields. */
5271 enum tree_code wanted_code;
5272 enum tree_code lcode, rcode;
5273 tree ll_arg, lr_arg, rl_arg, rr_arg;
5274 tree ll_inner, lr_inner, rl_inner, rr_inner;
5275 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5276 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5277 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5278 HOST_WIDE_INT lnbitsize, lnbitpos;
5279 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5280 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5281 enum machine_mode lnmode;
5282 tree ll_mask, lr_mask, rl_mask, rr_mask;
5283 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5284 tree l_const, r_const;
5285 tree lntype, result;
5286 int first_bit, end_bit;
5287 int volatilep;
5288 tree orig_lhs = lhs, orig_rhs = rhs;
5289 enum tree_code orig_code = code;
5291 /* Start by getting the comparison codes. Fail if anything is volatile.
5292 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5293 it were surrounded with a NE_EXPR. */
5295 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5296 return 0;
5298 lcode = TREE_CODE (lhs);
5299 rcode = TREE_CODE (rhs);
5301 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5303 lhs = build2 (NE_EXPR, truth_type, lhs,
5304 build_int_cst (TREE_TYPE (lhs), 0));
5305 lcode = NE_EXPR;
5308 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5310 rhs = build2 (NE_EXPR, truth_type, rhs,
5311 build_int_cst (TREE_TYPE (rhs), 0));
5312 rcode = NE_EXPR;
5315 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5316 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5317 return 0;
5319 ll_arg = TREE_OPERAND (lhs, 0);
5320 lr_arg = TREE_OPERAND (lhs, 1);
5321 rl_arg = TREE_OPERAND (rhs, 0);
5322 rr_arg = TREE_OPERAND (rhs, 1);
5324 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5325 if (simple_operand_p (ll_arg)
5326 && simple_operand_p (lr_arg))
5328 tree result;
5329 if (operand_equal_p (ll_arg, rl_arg, 0)
5330 && operand_equal_p (lr_arg, rr_arg, 0))
5332 result = combine_comparisons (code, lcode, rcode,
5333 truth_type, ll_arg, lr_arg);
5334 if (result)
5335 return result;
5337 else if (operand_equal_p (ll_arg, rr_arg, 0)
5338 && operand_equal_p (lr_arg, rl_arg, 0))
5340 result = combine_comparisons (code, lcode,
5341 swap_tree_comparison (rcode),
5342 truth_type, ll_arg, lr_arg);
5343 if (result)
5344 return result;
5348 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5349 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5351 /* If the RHS can be evaluated unconditionally and its operands are
5352 simple, it wins to evaluate the RHS unconditionally on machines
5353 with expensive branches. In this case, this isn't a comparison
5354 that can be merged. Avoid doing this if the RHS is a floating-point
5355 comparison since those can trap. */
5357 if (BRANCH_COST >= 2
5358 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5359 && simple_operand_p (rl_arg)
5360 && simple_operand_p (rr_arg))
5362 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5363 if (code == TRUTH_OR_EXPR
5364 && lcode == NE_EXPR && integer_zerop (lr_arg)
5365 && rcode == NE_EXPR && integer_zerop (rr_arg)
5366 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5367 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5368 return build2 (NE_EXPR, truth_type,
5369 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5370 ll_arg, rl_arg),
5371 build_int_cst (TREE_TYPE (ll_arg), 0));
5373 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5374 if (code == TRUTH_AND_EXPR
5375 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5376 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5377 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5378 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5379 return build2 (EQ_EXPR, truth_type,
5380 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5381 ll_arg, rl_arg),
5382 build_int_cst (TREE_TYPE (ll_arg), 0));
5384 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5386 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5387 return build2 (code, truth_type, lhs, rhs);
5388 return NULL_TREE;
5392 /* See if the comparisons can be merged. Then get all the parameters for
5393 each side. */
5395 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5396 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5397 return 0;
5399 volatilep = 0;
5400 ll_inner = decode_field_reference (ll_arg,
5401 &ll_bitsize, &ll_bitpos, &ll_mode,
5402 &ll_unsignedp, &volatilep, &ll_mask,
5403 &ll_and_mask);
5404 lr_inner = decode_field_reference (lr_arg,
5405 &lr_bitsize, &lr_bitpos, &lr_mode,
5406 &lr_unsignedp, &volatilep, &lr_mask,
5407 &lr_and_mask);
5408 rl_inner = decode_field_reference (rl_arg,
5409 &rl_bitsize, &rl_bitpos, &rl_mode,
5410 &rl_unsignedp, &volatilep, &rl_mask,
5411 &rl_and_mask);
5412 rr_inner = decode_field_reference (rr_arg,
5413 &rr_bitsize, &rr_bitpos, &rr_mode,
5414 &rr_unsignedp, &volatilep, &rr_mask,
5415 &rr_and_mask);
5417 /* It must be true that the inner operation on the lhs of each
5418 comparison must be the same if we are to be able to do anything.
5419 Then see if we have constants. If not, the same must be true for
5420 the rhs's. */
5421 if (volatilep || ll_inner == 0 || rl_inner == 0
5422 || ! operand_equal_p (ll_inner, rl_inner, 0))
5423 return 0;
5425 if (TREE_CODE (lr_arg) == INTEGER_CST
5426 && TREE_CODE (rr_arg) == INTEGER_CST)
5427 l_const = lr_arg, r_const = rr_arg;
5428 else if (lr_inner == 0 || rr_inner == 0
5429 || ! operand_equal_p (lr_inner, rr_inner, 0))
5430 return 0;
5431 else
5432 l_const = r_const = 0;
5434 /* If either comparison code is not correct for our logical operation,
5435 fail. However, we can convert a one-bit comparison against zero into
5436 the opposite comparison against that bit being set in the field. */
5438 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5439 if (lcode != wanted_code)
5441 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5443 /* Make the left operand unsigned, since we are only interested
5444 in the value of one bit. Otherwise we are doing the wrong
5445 thing below. */
5446 ll_unsignedp = 1;
5447 l_const = ll_mask;
5449 else
5450 return 0;
5453 /* This is analogous to the code for l_const above. */
5454 if (rcode != wanted_code)
5456 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5458 rl_unsignedp = 1;
5459 r_const = rl_mask;
5461 else
5462 return 0;
5465 /* See if we can find a mode that contains both fields being compared on
5466 the left. If we can't, fail. Otherwise, update all constants and masks
5467 to be relative to a field of that size. */
5468 first_bit = MIN (ll_bitpos, rl_bitpos);
5469 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5470 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5471 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5472 volatilep);
5473 if (lnmode == VOIDmode)
5474 return 0;
5476 lnbitsize = GET_MODE_BITSIZE (lnmode);
5477 lnbitpos = first_bit & ~ (lnbitsize - 1);
5478 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5479 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5481 if (BYTES_BIG_ENDIAN)
5483 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5484 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5487 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5488 size_int (xll_bitpos), 0);
5489 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5490 size_int (xrl_bitpos), 0);
5492 if (l_const)
5494 l_const = fold_convert (lntype, l_const);
5495 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5496 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5497 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5498 fold_build1 (BIT_NOT_EXPR,
5499 lntype, ll_mask),
5500 0)))
5502 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5504 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5507 if (r_const)
5509 r_const = fold_convert (lntype, r_const);
5510 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5511 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5512 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5513 fold_build1 (BIT_NOT_EXPR,
5514 lntype, rl_mask),
5515 0)))
5517 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5519 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5523 /* Handle the case of comparisons with constants. If there is something in
5524 common between the masks, those bits of the constants must be the same.
5525 If not, the condition is always false. Test for this to avoid generating
5526 incorrect code below. */
5527 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5528 if (! integer_zerop (result)
5529 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5530 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5532 if (wanted_code == NE_EXPR)
5534 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5535 return constant_boolean_node (true, truth_type);
5537 else
5539 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5540 return constant_boolean_node (false, truth_type);
5544 return NULL_TREE;
5547 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5548 constant. */
5550 static tree
5551 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5553 tree arg0 = op0;
5554 enum tree_code op_code;
5555 tree comp_const;
5556 tree minmax_const;
5557 int consts_equal, consts_lt;
5558 tree inner;
5560 STRIP_SIGN_NOPS (arg0);
5562 op_code = TREE_CODE (arg0);
5563 minmax_const = TREE_OPERAND (arg0, 1);
5564 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5565 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5566 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5567 inner = TREE_OPERAND (arg0, 0);
5569 /* If something does not permit us to optimize, return the original tree. */
5570 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5571 || TREE_CODE (comp_const) != INTEGER_CST
5572 || TREE_OVERFLOW (comp_const)
5573 || TREE_CODE (minmax_const) != INTEGER_CST
5574 || TREE_OVERFLOW (minmax_const))
5575 return NULL_TREE;
5577 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5578 and GT_EXPR, doing the rest with recursive calls using logical
5579 simplifications. */
5580 switch (code)
5582 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5584 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5585 type, op0, op1);
5586 if (tem)
5587 return invert_truthvalue (tem);
5588 return NULL_TREE;
5591 case GE_EXPR:
5592 return
5593 fold_build2 (TRUTH_ORIF_EXPR, type,
5594 optimize_minmax_comparison
5595 (EQ_EXPR, type, arg0, comp_const),
5596 optimize_minmax_comparison
5597 (GT_EXPR, type, arg0, comp_const));
5599 case EQ_EXPR:
5600 if (op_code == MAX_EXPR && consts_equal)
5601 /* MAX (X, 0) == 0 -> X <= 0 */
5602 return fold_build2 (LE_EXPR, type, inner, comp_const);
5604 else if (op_code == MAX_EXPR && consts_lt)
5605 /* MAX (X, 0) == 5 -> X == 5 */
5606 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5608 else if (op_code == MAX_EXPR)
5609 /* MAX (X, 0) == -1 -> false */
5610 return omit_one_operand (type, integer_zero_node, inner);
5612 else if (consts_equal)
5613 /* MIN (X, 0) == 0 -> X >= 0 */
5614 return fold_build2 (GE_EXPR, type, inner, comp_const);
5616 else if (consts_lt)
5617 /* MIN (X, 0) == 5 -> false */
5618 return omit_one_operand (type, integer_zero_node, inner);
5620 else
5621 /* MIN (X, 0) == -1 -> X == -1 */
5622 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5624 case GT_EXPR:
5625 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5626 /* MAX (X, 0) > 0 -> X > 0
5627 MAX (X, 0) > 5 -> X > 5 */
5628 return fold_build2 (GT_EXPR, type, inner, comp_const);
5630 else if (op_code == MAX_EXPR)
5631 /* MAX (X, 0) > -1 -> true */
5632 return omit_one_operand (type, integer_one_node, inner);
5634 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5635 /* MIN (X, 0) > 0 -> false
5636 MIN (X, 0) > 5 -> false */
5637 return omit_one_operand (type, integer_zero_node, inner);
5639 else
5640 /* MIN (X, 0) > -1 -> X > -1 */
5641 return fold_build2 (GT_EXPR, type, inner, comp_const);
5643 default:
5644 return NULL_TREE;
5648 /* T is an integer expression that is being multiplied, divided, or taken a
5649 modulus (CODE says which and what kind of divide or modulus) by a
5650 constant C. See if we can eliminate that operation by folding it with
5651 other operations already in T. WIDE_TYPE, if non-null, is a type that
5652 should be used for the computation if wider than our type.
5654 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5655 (X * 2) + (Y * 4). We must, however, be assured that either the original
5656 expression would not overflow or that overflow is undefined for the type
5657 in the language in question.
5659 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5660 the machine has a multiply-accumulate insn or that this is part of an
5661 addressing calculation.
5663 If we return a non-null expression, it is an equivalent form of the
5664 original computation, but need not be in the original type.
5666 We set *STRICT_OVERFLOW_P to true if the return values depends on
5667 signed overflow being undefined. Otherwise we do not change
5668 *STRICT_OVERFLOW_P. */
5670 static tree
5671 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5672 bool *strict_overflow_p)
5674 /* To avoid exponential search depth, refuse to allow recursion past
5675 three levels. Beyond that (1) it's highly unlikely that we'll find
5676 something interesting and (2) we've probably processed it before
5677 when we built the inner expression. */
5679 static int depth;
5680 tree ret;
5682 if (depth > 3)
5683 return NULL;
5685 depth++;
5686 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5687 depth--;
5689 return ret;
5692 static tree
5693 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5694 bool *strict_overflow_p)
5696 tree type = TREE_TYPE (t);
5697 enum tree_code tcode = TREE_CODE (t);
5698 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5699 > GET_MODE_SIZE (TYPE_MODE (type)))
5700 ? wide_type : type);
5701 tree t1, t2;
5702 int same_p = tcode == code;
5703 tree op0 = NULL_TREE, op1 = NULL_TREE;
5704 bool sub_strict_overflow_p;
5706 /* Don't deal with constants of zero here; they confuse the code below. */
5707 if (integer_zerop (c))
5708 return NULL_TREE;
5710 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5711 op0 = TREE_OPERAND (t, 0);
5713 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5714 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5716 /* Note that we need not handle conditional operations here since fold
5717 already handles those cases. So just do arithmetic here. */
5718 switch (tcode)
5720 case INTEGER_CST:
5721 /* For a constant, we can always simplify if we are a multiply
5722 or (for divide and modulus) if it is a multiple of our constant. */
5723 if (code == MULT_EXPR
5724 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5725 return const_binop (code, fold_convert (ctype, t),
5726 fold_convert (ctype, c), 0);
5727 break;
5729 CASE_CONVERT: case NON_LVALUE_EXPR:
5730 /* If op0 is an expression ... */
5731 if ((COMPARISON_CLASS_P (op0)
5732 || UNARY_CLASS_P (op0)
5733 || BINARY_CLASS_P (op0)
5734 || VL_EXP_CLASS_P (op0)
5735 || EXPRESSION_CLASS_P (op0))
5736 /* ... and is unsigned, and its type is smaller than ctype,
5737 then we cannot pass through as widening. */
5738 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5739 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5740 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5741 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5742 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5743 /* ... or this is a truncation (t is narrower than op0),
5744 then we cannot pass through this narrowing. */
5745 || (GET_MODE_SIZE (TYPE_MODE (type))
5746 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5747 /* ... or signedness changes for division or modulus,
5748 then we cannot pass through this conversion. */
5749 || (code != MULT_EXPR
5750 && (TYPE_UNSIGNED (ctype)
5751 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5752 /* ... or has undefined overflow while the converted to
5753 type has not, we cannot do the operation in the inner type
5754 as that would introduce undefined overflow. */
5755 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5756 && !TYPE_OVERFLOW_UNDEFINED (type))))
5757 break;
5759 /* Pass the constant down and see if we can make a simplification. If
5760 we can, replace this expression with the inner simplification for
5761 possible later conversion to our or some other type. */
5762 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5763 && TREE_CODE (t2) == INTEGER_CST
5764 && !TREE_OVERFLOW (t2)
5765 && (0 != (t1 = extract_muldiv (op0, t2, code,
5766 code == MULT_EXPR
5767 ? ctype : NULL_TREE,
5768 strict_overflow_p))))
5769 return t1;
5770 break;
5772 case ABS_EXPR:
5773 /* If widening the type changes it from signed to unsigned, then we
5774 must avoid building ABS_EXPR itself as unsigned. */
5775 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5777 tree cstype = (*signed_type_for) (ctype);
5778 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5779 != 0)
5781 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5782 return fold_convert (ctype, t1);
5784 break;
5786 /* If the constant is negative, we cannot simplify this. */
5787 if (tree_int_cst_sgn (c) == -1)
5788 break;
5789 /* FALLTHROUGH */
5790 case NEGATE_EXPR:
5791 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5792 != 0)
5793 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5794 break;
5796 case MIN_EXPR: case MAX_EXPR:
5797 /* If widening the type changes the signedness, then we can't perform
5798 this optimization as that changes the result. */
5799 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5800 break;
5802 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5803 sub_strict_overflow_p = false;
5804 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5805 &sub_strict_overflow_p)) != 0
5806 && (t2 = extract_muldiv (op1, c, code, wide_type,
5807 &sub_strict_overflow_p)) != 0)
5809 if (tree_int_cst_sgn (c) < 0)
5810 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5811 if (sub_strict_overflow_p)
5812 *strict_overflow_p = true;
5813 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5814 fold_convert (ctype, t2));
5816 break;
5818 case LSHIFT_EXPR: case RSHIFT_EXPR:
5819 /* If the second operand is constant, this is a multiplication
5820 or floor division, by a power of two, so we can treat it that
5821 way unless the multiplier or divisor overflows. Signed
5822 left-shift overflow is implementation-defined rather than
5823 undefined in C90, so do not convert signed left shift into
5824 multiplication. */
5825 if (TREE_CODE (op1) == INTEGER_CST
5826 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5827 /* const_binop may not detect overflow correctly,
5828 so check for it explicitly here. */
5829 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5830 && TREE_INT_CST_HIGH (op1) == 0
5831 && 0 != (t1 = fold_convert (ctype,
5832 const_binop (LSHIFT_EXPR,
5833 size_one_node,
5834 op1, 0)))
5835 && !TREE_OVERFLOW (t1))
5836 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5837 ? MULT_EXPR : FLOOR_DIV_EXPR,
5838 ctype, fold_convert (ctype, op0), t1),
5839 c, code, wide_type, strict_overflow_p);
5840 break;
5842 case PLUS_EXPR: case MINUS_EXPR:
5843 /* See if we can eliminate the operation on both sides. If we can, we
5844 can return a new PLUS or MINUS. If we can't, the only remaining
5845 cases where we can do anything are if the second operand is a
5846 constant. */
5847 sub_strict_overflow_p = false;
5848 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5849 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5850 if (t1 != 0 && t2 != 0
5851 && (code == MULT_EXPR
5852 /* If not multiplication, we can only do this if both operands
5853 are divisible by c. */
5854 || (multiple_of_p (ctype, op0, c)
5855 && multiple_of_p (ctype, op1, c))))
5857 if (sub_strict_overflow_p)
5858 *strict_overflow_p = true;
5859 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5860 fold_convert (ctype, t2));
5863 /* If this was a subtraction, negate OP1 and set it to be an addition.
5864 This simplifies the logic below. */
5865 if (tcode == MINUS_EXPR)
5866 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5868 if (TREE_CODE (op1) != INTEGER_CST)
5869 break;
5871 /* If either OP1 or C are negative, this optimization is not safe for
5872 some of the division and remainder types while for others we need
5873 to change the code. */
5874 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5876 if (code == CEIL_DIV_EXPR)
5877 code = FLOOR_DIV_EXPR;
5878 else if (code == FLOOR_DIV_EXPR)
5879 code = CEIL_DIV_EXPR;
5880 else if (code != MULT_EXPR
5881 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5882 break;
5885 /* If it's a multiply or a division/modulus operation of a multiple
5886 of our constant, do the operation and verify it doesn't overflow. */
5887 if (code == MULT_EXPR
5888 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5890 op1 = const_binop (code, fold_convert (ctype, op1),
5891 fold_convert (ctype, c), 0);
5892 /* We allow the constant to overflow with wrapping semantics. */
5893 if (op1 == 0
5894 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5895 break;
5897 else
5898 break;
5900 /* If we have an unsigned type is not a sizetype, we cannot widen
5901 the operation since it will change the result if the original
5902 computation overflowed. */
5903 if (TYPE_UNSIGNED (ctype)
5904 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5905 && ctype != type)
5906 break;
5908 /* If we were able to eliminate our operation from the first side,
5909 apply our operation to the second side and reform the PLUS. */
5910 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5911 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5913 /* The last case is if we are a multiply. In that case, we can
5914 apply the distributive law to commute the multiply and addition
5915 if the multiplication of the constants doesn't overflow. */
5916 if (code == MULT_EXPR)
5917 return fold_build2 (tcode, ctype,
5918 fold_build2 (code, ctype,
5919 fold_convert (ctype, op0),
5920 fold_convert (ctype, c)),
5921 op1);
5923 break;
5925 case MULT_EXPR:
5926 /* We have a special case here if we are doing something like
5927 (C * 8) % 4 since we know that's zero. */
5928 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5929 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5930 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5931 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5932 return omit_one_operand (type, integer_zero_node, op0);
5934 /* ... fall through ... */
5936 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5937 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5938 /* If we can extract our operation from the LHS, do so and return a
5939 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5940 do something only if the second operand is a constant. */
5941 if (same_p
5942 && (t1 = extract_muldiv (op0, c, code, wide_type,
5943 strict_overflow_p)) != 0)
5944 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5945 fold_convert (ctype, op1));
5946 else if (tcode == MULT_EXPR && code == MULT_EXPR
5947 && (t1 = extract_muldiv (op1, c, code, wide_type,
5948 strict_overflow_p)) != 0)
5949 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5950 fold_convert (ctype, t1));
5951 else if (TREE_CODE (op1) != INTEGER_CST)
5952 return 0;
5954 /* If these are the same operation types, we can associate them
5955 assuming no overflow. */
5956 if (tcode == code
5957 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5958 fold_convert (ctype, c), 1))
5959 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5960 TREE_INT_CST_HIGH (t1),
5961 (TYPE_UNSIGNED (ctype)
5962 && tcode != MULT_EXPR) ? -1 : 1,
5963 TREE_OVERFLOW (t1)))
5964 && !TREE_OVERFLOW (t1))
5965 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5967 /* If these operations "cancel" each other, we have the main
5968 optimizations of this pass, which occur when either constant is a
5969 multiple of the other, in which case we replace this with either an
5970 operation or CODE or TCODE.
5972 If we have an unsigned type that is not a sizetype, we cannot do
5973 this since it will change the result if the original computation
5974 overflowed. */
5975 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5976 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5977 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5978 || (tcode == MULT_EXPR
5979 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5980 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5981 && code != MULT_EXPR)))
5983 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5985 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5986 *strict_overflow_p = true;
5987 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5988 fold_convert (ctype,
5989 const_binop (TRUNC_DIV_EXPR,
5990 op1, c, 0)));
5992 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5994 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5995 *strict_overflow_p = true;
5996 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5997 fold_convert (ctype,
5998 const_binop (TRUNC_DIV_EXPR,
5999 c, op1, 0)));
6002 break;
6004 default:
6005 break;
6008 return 0;
6011 /* Return a node which has the indicated constant VALUE (either 0 or
6012 1), and is of the indicated TYPE. */
6014 tree
6015 constant_boolean_node (int value, tree type)
6017 if (type == integer_type_node)
6018 return value ? integer_one_node : integer_zero_node;
6019 else if (type == boolean_type_node)
6020 return value ? boolean_true_node : boolean_false_node;
6021 else
6022 return build_int_cst (type, value);
6026 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6027 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6028 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6029 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6030 COND is the first argument to CODE; otherwise (as in the example
6031 given here), it is the second argument. TYPE is the type of the
6032 original expression. Return NULL_TREE if no simplification is
6033 possible. */
6035 static tree
6036 fold_binary_op_with_conditional_arg (enum tree_code code,
6037 tree type, tree op0, tree op1,
6038 tree cond, tree arg, int cond_first_p)
6040 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6041 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6042 tree test, true_value, false_value;
6043 tree lhs = NULL_TREE;
6044 tree rhs = NULL_TREE;
6046 /* This transformation is only worthwhile if we don't have to wrap
6047 arg in a SAVE_EXPR, and the operation can be simplified on at least
6048 one of the branches once its pushed inside the COND_EXPR. */
6049 if (!TREE_CONSTANT (arg))
6050 return NULL_TREE;
6052 if (TREE_CODE (cond) == COND_EXPR)
6054 test = TREE_OPERAND (cond, 0);
6055 true_value = TREE_OPERAND (cond, 1);
6056 false_value = TREE_OPERAND (cond, 2);
6057 /* If this operand throws an expression, then it does not make
6058 sense to try to perform a logical or arithmetic operation
6059 involving it. */
6060 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6061 lhs = true_value;
6062 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6063 rhs = false_value;
6065 else
6067 tree testtype = TREE_TYPE (cond);
6068 test = cond;
6069 true_value = constant_boolean_node (true, testtype);
6070 false_value = constant_boolean_node (false, testtype);
6073 arg = fold_convert (arg_type, arg);
6074 if (lhs == 0)
6076 true_value = fold_convert (cond_type, true_value);
6077 if (cond_first_p)
6078 lhs = fold_build2 (code, type, true_value, arg);
6079 else
6080 lhs = fold_build2 (code, type, arg, true_value);
6082 if (rhs == 0)
6084 false_value = fold_convert (cond_type, false_value);
6085 if (cond_first_p)
6086 rhs = fold_build2 (code, type, false_value, arg);
6087 else
6088 rhs = fold_build2 (code, type, arg, false_value);
6091 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6092 return fold_convert (type, test);
6096 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6098 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6099 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6100 ADDEND is the same as X.
6102 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6103 and finite. The problematic cases are when X is zero, and its mode
6104 has signed zeros. In the case of rounding towards -infinity,
6105 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6106 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6108 bool
6109 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6111 if (!real_zerop (addend))
6112 return false;
6114 /* Don't allow the fold with -fsignaling-nans. */
6115 if (HONOR_SNANS (TYPE_MODE (type)))
6116 return false;
6118 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6119 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6120 return true;
6122 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6123 if (TREE_CODE (addend) == REAL_CST
6124 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6125 negate = !negate;
6127 /* The mode has signed zeros, and we have to honor their sign.
6128 In this situation, there is only one case we can return true for.
6129 X - 0 is the same as X unless rounding towards -infinity is
6130 supported. */
6131 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6134 /* Subroutine of fold() that checks comparisons of built-in math
6135 functions against real constants.
6137 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6138 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6139 is the type of the result and ARG0 and ARG1 are the operands of the
6140 comparison. ARG1 must be a TREE_REAL_CST.
6142 The function returns the constant folded tree if a simplification
6143 can be made, and NULL_TREE otherwise. */
6145 static tree
6146 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6147 tree type, tree arg0, tree arg1)
6149 REAL_VALUE_TYPE c;
6151 if (BUILTIN_SQRT_P (fcode))
6153 tree arg = CALL_EXPR_ARG (arg0, 0);
6154 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6156 c = TREE_REAL_CST (arg1);
6157 if (REAL_VALUE_NEGATIVE (c))
6159 /* sqrt(x) < y is always false, if y is negative. */
6160 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6161 return omit_one_operand (type, integer_zero_node, arg);
6163 /* sqrt(x) > y is always true, if y is negative and we
6164 don't care about NaNs, i.e. negative values of x. */
6165 if (code == NE_EXPR || !HONOR_NANS (mode))
6166 return omit_one_operand (type, integer_one_node, arg);
6168 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6169 return fold_build2 (GE_EXPR, type, arg,
6170 build_real (TREE_TYPE (arg), dconst0));
6172 else if (code == GT_EXPR || code == GE_EXPR)
6174 REAL_VALUE_TYPE c2;
6176 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6177 real_convert (&c2, mode, &c2);
6179 if (REAL_VALUE_ISINF (c2))
6181 /* sqrt(x) > y is x == +Inf, when y is very large. */
6182 if (HONOR_INFINITIES (mode))
6183 return fold_build2 (EQ_EXPR, type, arg,
6184 build_real (TREE_TYPE (arg), c2));
6186 /* sqrt(x) > y is always false, when y is very large
6187 and we don't care about infinities. */
6188 return omit_one_operand (type, integer_zero_node, arg);
6191 /* sqrt(x) > c is the same as x > c*c. */
6192 return fold_build2 (code, type, arg,
6193 build_real (TREE_TYPE (arg), c2));
6195 else if (code == LT_EXPR || code == LE_EXPR)
6197 REAL_VALUE_TYPE c2;
6199 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6200 real_convert (&c2, mode, &c2);
6202 if (REAL_VALUE_ISINF (c2))
6204 /* sqrt(x) < y is always true, when y is a very large
6205 value and we don't care about NaNs or Infinities. */
6206 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6207 return omit_one_operand (type, integer_one_node, arg);
6209 /* sqrt(x) < y is x != +Inf when y is very large and we
6210 don't care about NaNs. */
6211 if (! HONOR_NANS (mode))
6212 return fold_build2 (NE_EXPR, type, arg,
6213 build_real (TREE_TYPE (arg), c2));
6215 /* sqrt(x) < y is x >= 0 when y is very large and we
6216 don't care about Infinities. */
6217 if (! HONOR_INFINITIES (mode))
6218 return fold_build2 (GE_EXPR, type, arg,
6219 build_real (TREE_TYPE (arg), dconst0));
6221 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6222 if (lang_hooks.decls.global_bindings_p () != 0
6223 || CONTAINS_PLACEHOLDER_P (arg))
6224 return NULL_TREE;
6226 arg = save_expr (arg);
6227 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6228 fold_build2 (GE_EXPR, type, arg,
6229 build_real (TREE_TYPE (arg),
6230 dconst0)),
6231 fold_build2 (NE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg),
6233 c2)));
6236 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6237 if (! HONOR_NANS (mode))
6238 return fold_build2 (code, type, arg,
6239 build_real (TREE_TYPE (arg), c2));
6241 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6242 if (lang_hooks.decls.global_bindings_p () == 0
6243 && ! CONTAINS_PLACEHOLDER_P (arg))
6245 arg = save_expr (arg);
6246 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6247 fold_build2 (GE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg),
6249 dconst0)),
6250 fold_build2 (code, type, arg,
6251 build_real (TREE_TYPE (arg),
6252 c2)));
6257 return NULL_TREE;
6260 /* Subroutine of fold() that optimizes comparisons against Infinities,
6261 either +Inf or -Inf.
6263 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6264 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6265 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6267 The function returns the constant folded tree if a simplification
6268 can be made, and NULL_TREE otherwise. */
6270 static tree
6271 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6273 enum machine_mode mode;
6274 REAL_VALUE_TYPE max;
6275 tree temp;
6276 bool neg;
6278 mode = TYPE_MODE (TREE_TYPE (arg0));
6280 /* For negative infinity swap the sense of the comparison. */
6281 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6282 if (neg)
6283 code = swap_tree_comparison (code);
6285 switch (code)
6287 case GT_EXPR:
6288 /* x > +Inf is always false, if with ignore sNANs. */
6289 if (HONOR_SNANS (mode))
6290 return NULL_TREE;
6291 return omit_one_operand (type, integer_zero_node, arg0);
6293 case LE_EXPR:
6294 /* x <= +Inf is always true, if we don't case about NaNs. */
6295 if (! HONOR_NANS (mode))
6296 return omit_one_operand (type, integer_one_node, arg0);
6298 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6299 if (lang_hooks.decls.global_bindings_p () == 0
6300 && ! CONTAINS_PLACEHOLDER_P (arg0))
6302 arg0 = save_expr (arg0);
6303 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6305 break;
6307 case EQ_EXPR:
6308 case GE_EXPR:
6309 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6310 real_maxval (&max, neg, mode);
6311 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6312 arg0, build_real (TREE_TYPE (arg0), max));
6314 case LT_EXPR:
6315 /* x < +Inf is always equal to x <= DBL_MAX. */
6316 real_maxval (&max, neg, mode);
6317 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6318 arg0, build_real (TREE_TYPE (arg0), max));
6320 case NE_EXPR:
6321 /* x != +Inf is always equal to !(x > DBL_MAX). */
6322 real_maxval (&max, neg, mode);
6323 if (! HONOR_NANS (mode))
6324 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6325 arg0, build_real (TREE_TYPE (arg0), max));
6327 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6328 arg0, build_real (TREE_TYPE (arg0), max));
6329 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6331 default:
6332 break;
6335 return NULL_TREE;
6338 /* Subroutine of fold() that optimizes comparisons of a division by
6339 a nonzero integer constant against an integer constant, i.e.
6340 X/C1 op C2.
6342 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6343 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6344 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6346 The function returns the constant folded tree if a simplification
6347 can be made, and NULL_TREE otherwise. */
6349 static tree
6350 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6352 tree prod, tmp, hi, lo;
6353 tree arg00 = TREE_OPERAND (arg0, 0);
6354 tree arg01 = TREE_OPERAND (arg0, 1);
6355 unsigned HOST_WIDE_INT lpart;
6356 HOST_WIDE_INT hpart;
6357 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6358 bool neg_overflow;
6359 int overflow;
6361 /* We have to do this the hard way to detect unsigned overflow.
6362 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6363 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6364 TREE_INT_CST_HIGH (arg01),
6365 TREE_INT_CST_LOW (arg1),
6366 TREE_INT_CST_HIGH (arg1),
6367 &lpart, &hpart, unsigned_p);
6368 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6369 -1, overflow);
6370 neg_overflow = false;
6372 if (unsigned_p)
6374 tmp = int_const_binop (MINUS_EXPR, arg01,
6375 build_int_cst (TREE_TYPE (arg01), 1), 0);
6376 lo = prod;
6378 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6379 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6380 TREE_INT_CST_HIGH (prod),
6381 TREE_INT_CST_LOW (tmp),
6382 TREE_INT_CST_HIGH (tmp),
6383 &lpart, &hpart, unsigned_p);
6384 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6385 -1, overflow | TREE_OVERFLOW (prod));
6387 else if (tree_int_cst_sgn (arg01) >= 0)
6389 tmp = int_const_binop (MINUS_EXPR, arg01,
6390 build_int_cst (TREE_TYPE (arg01), 1), 0);
6391 switch (tree_int_cst_sgn (arg1))
6393 case -1:
6394 neg_overflow = true;
6395 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6396 hi = prod;
6397 break;
6399 case 0:
6400 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6401 hi = tmp;
6402 break;
6404 case 1:
6405 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6406 lo = prod;
6407 break;
6409 default:
6410 gcc_unreachable ();
6413 else
6415 /* A negative divisor reverses the relational operators. */
6416 code = swap_tree_comparison (code);
6418 tmp = int_const_binop (PLUS_EXPR, arg01,
6419 build_int_cst (TREE_TYPE (arg01), 1), 0);
6420 switch (tree_int_cst_sgn (arg1))
6422 case -1:
6423 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6424 lo = prod;
6425 break;
6427 case 0:
6428 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6429 lo = tmp;
6430 break;
6432 case 1:
6433 neg_overflow = true;
6434 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6435 hi = prod;
6436 break;
6438 default:
6439 gcc_unreachable ();
6443 switch (code)
6445 case EQ_EXPR:
6446 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6447 return omit_one_operand (type, integer_zero_node, arg00);
6448 if (TREE_OVERFLOW (hi))
6449 return fold_build2 (GE_EXPR, type, arg00, lo);
6450 if (TREE_OVERFLOW (lo))
6451 return fold_build2 (LE_EXPR, type, arg00, hi);
6452 return build_range_check (type, arg00, 1, lo, hi);
6454 case NE_EXPR:
6455 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6456 return omit_one_operand (type, integer_one_node, arg00);
6457 if (TREE_OVERFLOW (hi))
6458 return fold_build2 (LT_EXPR, type, arg00, lo);
6459 if (TREE_OVERFLOW (lo))
6460 return fold_build2 (GT_EXPR, type, arg00, hi);
6461 return build_range_check (type, arg00, 0, lo, hi);
6463 case LT_EXPR:
6464 if (TREE_OVERFLOW (lo))
6466 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6467 return omit_one_operand (type, tmp, arg00);
6469 return fold_build2 (LT_EXPR, type, arg00, lo);
6471 case LE_EXPR:
6472 if (TREE_OVERFLOW (hi))
6474 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6475 return omit_one_operand (type, tmp, arg00);
6477 return fold_build2 (LE_EXPR, type, arg00, hi);
6479 case GT_EXPR:
6480 if (TREE_OVERFLOW (hi))
6482 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6483 return omit_one_operand (type, tmp, arg00);
6485 return fold_build2 (GT_EXPR, type, arg00, hi);
6487 case GE_EXPR:
6488 if (TREE_OVERFLOW (lo))
6490 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6491 return omit_one_operand (type, tmp, arg00);
6493 return fold_build2 (GE_EXPR, type, arg00, lo);
6495 default:
6496 break;
6499 return NULL_TREE;
6503 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6504 equality/inequality test, then return a simplified form of the test
6505 using a sign testing. Otherwise return NULL. TYPE is the desired
6506 result type. */
6508 static tree
6509 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6510 tree result_type)
6512 /* If this is testing a single bit, we can optimize the test. */
6513 if ((code == NE_EXPR || code == EQ_EXPR)
6514 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6515 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6517 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6518 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6519 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6521 if (arg00 != NULL_TREE
6522 /* This is only a win if casting to a signed type is cheap,
6523 i.e. when arg00's type is not a partial mode. */
6524 && TYPE_PRECISION (TREE_TYPE (arg00))
6525 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6527 tree stype = signed_type_for (TREE_TYPE (arg00));
6528 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6529 result_type, fold_convert (stype, arg00),
6530 build_int_cst (stype, 0));
6534 return NULL_TREE;
6537 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6538 equality/inequality test, then return a simplified form of
6539 the test using shifts and logical operations. Otherwise return
6540 NULL. TYPE is the desired result type. */
6542 tree
6543 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6544 tree result_type)
6546 /* If this is testing a single bit, we can optimize the test. */
6547 if ((code == NE_EXPR || code == EQ_EXPR)
6548 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6549 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6551 tree inner = TREE_OPERAND (arg0, 0);
6552 tree type = TREE_TYPE (arg0);
6553 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6554 enum machine_mode operand_mode = TYPE_MODE (type);
6555 int ops_unsigned;
6556 tree signed_type, unsigned_type, intermediate_type;
6557 tree tem, one;
6559 /* First, see if we can fold the single bit test into a sign-bit
6560 test. */
6561 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6562 result_type);
6563 if (tem)
6564 return tem;
6566 /* Otherwise we have (A & C) != 0 where C is a single bit,
6567 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6568 Similarly for (A & C) == 0. */
6570 /* If INNER is a right shift of a constant and it plus BITNUM does
6571 not overflow, adjust BITNUM and INNER. */
6572 if (TREE_CODE (inner) == RSHIFT_EXPR
6573 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6574 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6575 && bitnum < TYPE_PRECISION (type)
6576 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6577 bitnum - TYPE_PRECISION (type)))
6579 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6580 inner = TREE_OPERAND (inner, 0);
6583 /* If we are going to be able to omit the AND below, we must do our
6584 operations as unsigned. If we must use the AND, we have a choice.
6585 Normally unsigned is faster, but for some machines signed is. */
6586 #ifdef LOAD_EXTEND_OP
6587 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6588 && !flag_syntax_only) ? 0 : 1;
6589 #else
6590 ops_unsigned = 1;
6591 #endif
6593 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6594 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6595 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6596 inner = fold_convert (intermediate_type, inner);
6598 if (bitnum != 0)
6599 inner = build2 (RSHIFT_EXPR, intermediate_type,
6600 inner, size_int (bitnum));
6602 one = build_int_cst (intermediate_type, 1);
6604 if (code == EQ_EXPR)
6605 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6607 /* Put the AND last so it can combine with more things. */
6608 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6610 /* Make sure to return the proper type. */
6611 inner = fold_convert (result_type, inner);
6613 return inner;
6615 return NULL_TREE;
6618 /* Check whether we are allowed to reorder operands arg0 and arg1,
6619 such that the evaluation of arg1 occurs before arg0. */
6621 static bool
6622 reorder_operands_p (const_tree arg0, const_tree arg1)
6624 if (! flag_evaluation_order)
6625 return true;
6626 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6627 return true;
6628 return ! TREE_SIDE_EFFECTS (arg0)
6629 && ! TREE_SIDE_EFFECTS (arg1);
6632 /* Test whether it is preferable two swap two operands, ARG0 and
6633 ARG1, for example because ARG0 is an integer constant and ARG1
6634 isn't. If REORDER is true, only recommend swapping if we can
6635 evaluate the operands in reverse order. */
6637 bool
6638 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6640 STRIP_SIGN_NOPS (arg0);
6641 STRIP_SIGN_NOPS (arg1);
6643 if (TREE_CODE (arg1) == INTEGER_CST)
6644 return 0;
6645 if (TREE_CODE (arg0) == INTEGER_CST)
6646 return 1;
6648 if (TREE_CODE (arg1) == REAL_CST)
6649 return 0;
6650 if (TREE_CODE (arg0) == REAL_CST)
6651 return 1;
6653 if (TREE_CODE (arg1) == FIXED_CST)
6654 return 0;
6655 if (TREE_CODE (arg0) == FIXED_CST)
6656 return 1;
6658 if (TREE_CODE (arg1) == COMPLEX_CST)
6659 return 0;
6660 if (TREE_CODE (arg0) == COMPLEX_CST)
6661 return 1;
6663 if (TREE_CONSTANT (arg1))
6664 return 0;
6665 if (TREE_CONSTANT (arg0))
6666 return 1;
6668 if (optimize_size)
6669 return 0;
6671 if (reorder && flag_evaluation_order
6672 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6673 return 0;
6675 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6676 for commutative and comparison operators. Ensuring a canonical
6677 form allows the optimizers to find additional redundancies without
6678 having to explicitly check for both orderings. */
6679 if (TREE_CODE (arg0) == SSA_NAME
6680 && TREE_CODE (arg1) == SSA_NAME
6681 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6682 return 1;
6684 /* Put SSA_NAMEs last. */
6685 if (TREE_CODE (arg1) == SSA_NAME)
6686 return 0;
6687 if (TREE_CODE (arg0) == SSA_NAME)
6688 return 1;
6690 /* Put variables last. */
6691 if (DECL_P (arg1))
6692 return 0;
6693 if (DECL_P (arg0))
6694 return 1;
6696 return 0;
6699 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6700 ARG0 is extended to a wider type. */
6702 static tree
6703 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6705 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6706 tree arg1_unw;
6707 tree shorter_type, outer_type;
6708 tree min, max;
6709 bool above, below;
6711 if (arg0_unw == arg0)
6712 return NULL_TREE;
6713 shorter_type = TREE_TYPE (arg0_unw);
6715 #ifdef HAVE_canonicalize_funcptr_for_compare
6716 /* Disable this optimization if we're casting a function pointer
6717 type on targets that require function pointer canonicalization. */
6718 if (HAVE_canonicalize_funcptr_for_compare
6719 && TREE_CODE (shorter_type) == POINTER_TYPE
6720 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6721 return NULL_TREE;
6722 #endif
6724 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6725 return NULL_TREE;
6727 arg1_unw = get_unwidened (arg1, NULL_TREE);
6729 /* If possible, express the comparison in the shorter mode. */
6730 if ((code == EQ_EXPR || code == NE_EXPR
6731 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6732 && (TREE_TYPE (arg1_unw) == shorter_type
6733 || (TYPE_PRECISION (shorter_type)
6734 > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6735 || ((TYPE_PRECISION (shorter_type)
6736 == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6737 && (TYPE_UNSIGNED (shorter_type)
6738 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6739 || (TREE_CODE (arg1_unw) == INTEGER_CST
6740 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6741 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6742 && int_fits_type_p (arg1_unw, shorter_type))))
6743 return fold_build2 (code, type, arg0_unw,
6744 fold_convert (shorter_type, arg1_unw));
6746 if (TREE_CODE (arg1_unw) != INTEGER_CST
6747 || TREE_CODE (shorter_type) != INTEGER_TYPE
6748 || !int_fits_type_p (arg1_unw, shorter_type))
6749 return NULL_TREE;
6751 /* If we are comparing with the integer that does not fit into the range
6752 of the shorter type, the result is known. */
6753 outer_type = TREE_TYPE (arg1_unw);
6754 min = lower_bound_in_type (outer_type, shorter_type);
6755 max = upper_bound_in_type (outer_type, shorter_type);
6757 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6758 max, arg1_unw));
6759 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6760 arg1_unw, min));
6762 switch (code)
6764 case EQ_EXPR:
6765 if (above || below)
6766 return omit_one_operand (type, integer_zero_node, arg0);
6767 break;
6769 case NE_EXPR:
6770 if (above || below)
6771 return omit_one_operand (type, integer_one_node, arg0);
6772 break;
6774 case LT_EXPR:
6775 case LE_EXPR:
6776 if (above)
6777 return omit_one_operand (type, integer_one_node, arg0);
6778 else if (below)
6779 return omit_one_operand (type, integer_zero_node, arg0);
6781 case GT_EXPR:
6782 case GE_EXPR:
6783 if (above)
6784 return omit_one_operand (type, integer_zero_node, arg0);
6785 else if (below)
6786 return omit_one_operand (type, integer_one_node, arg0);
6788 default:
6789 break;
6792 return NULL_TREE;
6795 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6796 ARG0 just the signedness is changed. */
6798 static tree
6799 fold_sign_changed_comparison (enum tree_code code, tree type,
6800 tree arg0, tree arg1)
6802 tree arg0_inner;
6803 tree inner_type, outer_type;
6805 if (!CONVERT_EXPR_P (arg0))
6806 return NULL_TREE;
6808 outer_type = TREE_TYPE (arg0);
6809 arg0_inner = TREE_OPERAND (arg0, 0);
6810 inner_type = TREE_TYPE (arg0_inner);
6812 #ifdef HAVE_canonicalize_funcptr_for_compare
6813 /* Disable this optimization if we're casting a function pointer
6814 type on targets that require function pointer canonicalization. */
6815 if (HAVE_canonicalize_funcptr_for_compare
6816 && TREE_CODE (inner_type) == POINTER_TYPE
6817 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6818 return NULL_TREE;
6819 #endif
6821 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6822 return NULL_TREE;
6824 /* If the conversion is from an integral subtype to its basetype
6825 leave it alone. */
6826 if (TREE_TYPE (inner_type) == outer_type)
6827 return NULL_TREE;
6829 if (TREE_CODE (arg1) != INTEGER_CST
6830 && !(CONVERT_EXPR_P (arg1)
6831 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6832 return NULL_TREE;
6834 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6835 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6836 && code != NE_EXPR
6837 && code != EQ_EXPR)
6838 return NULL_TREE;
6840 if (TREE_CODE (arg1) == INTEGER_CST)
6841 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6842 TREE_INT_CST_HIGH (arg1), 0,
6843 TREE_OVERFLOW (arg1));
6844 else
6845 arg1 = fold_convert (inner_type, arg1);
6847 return fold_build2 (code, type, arg0_inner, arg1);
6850 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6851 step of the array. Reconstructs s and delta in the case of s * delta
6852 being an integer constant (and thus already folded).
6853 ADDR is the address. MULT is the multiplicative expression.
6854 If the function succeeds, the new address expression is returned. Otherwise
6855 NULL_TREE is returned. */
6857 static tree
6858 try_move_mult_to_index (tree addr, tree op1)
6860 tree s, delta, step;
6861 tree ref = TREE_OPERAND (addr, 0), pref;
6862 tree ret, pos;
6863 tree itype;
6864 bool mdim = false;
6866 /* Strip the nops that might be added when converting op1 to sizetype. */
6867 STRIP_NOPS (op1);
6869 /* Canonicalize op1 into a possibly non-constant delta
6870 and an INTEGER_CST s. */
6871 if (TREE_CODE (op1) == MULT_EXPR)
6873 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6875 STRIP_NOPS (arg0);
6876 STRIP_NOPS (arg1);
6878 if (TREE_CODE (arg0) == INTEGER_CST)
6880 s = arg0;
6881 delta = arg1;
6883 else if (TREE_CODE (arg1) == INTEGER_CST)
6885 s = arg1;
6886 delta = arg0;
6888 else
6889 return NULL_TREE;
6891 else if (TREE_CODE (op1) == INTEGER_CST)
6893 delta = op1;
6894 s = NULL_TREE;
6896 else
6898 /* Simulate we are delta * 1. */
6899 delta = op1;
6900 s = integer_one_node;
6903 for (;; ref = TREE_OPERAND (ref, 0))
6905 if (TREE_CODE (ref) == ARRAY_REF)
6907 /* Remember if this was a multi-dimensional array. */
6908 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6909 mdim = true;
6911 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6912 if (! itype)
6913 continue;
6915 step = array_ref_element_size (ref);
6916 if (TREE_CODE (step) != INTEGER_CST)
6917 continue;
6919 if (s)
6921 if (! tree_int_cst_equal (step, s))
6922 continue;
6924 else
6926 /* Try if delta is a multiple of step. */
6927 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6928 if (! tmp)
6929 continue;
6930 delta = tmp;
6933 /* Only fold here if we can verify we do not overflow one
6934 dimension of a multi-dimensional array. */
6935 if (mdim)
6937 tree tmp;
6939 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6940 || !INTEGRAL_TYPE_P (itype)
6941 || !TYPE_MAX_VALUE (itype)
6942 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6943 continue;
6945 tmp = fold_binary (PLUS_EXPR, itype,
6946 fold_convert (itype,
6947 TREE_OPERAND (ref, 1)),
6948 fold_convert (itype, delta));
6949 if (!tmp
6950 || TREE_CODE (tmp) != INTEGER_CST
6951 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6952 continue;
6955 break;
6957 else
6958 mdim = false;
6960 if (!handled_component_p (ref))
6961 return NULL_TREE;
6964 /* We found the suitable array reference. So copy everything up to it,
6965 and replace the index. */
6967 pref = TREE_OPERAND (addr, 0);
6968 ret = copy_node (pref);
6969 pos = ret;
6971 while (pref != ref)
6973 pref = TREE_OPERAND (pref, 0);
6974 TREE_OPERAND (pos, 0) = copy_node (pref);
6975 pos = TREE_OPERAND (pos, 0);
6978 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6979 fold_convert (itype,
6980 TREE_OPERAND (pos, 1)),
6981 fold_convert (itype, delta));
6983 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6987 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6988 means A >= Y && A != MAX, but in this case we know that
6989 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6991 static tree
6992 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6994 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6996 if (TREE_CODE (bound) == LT_EXPR)
6997 a = TREE_OPERAND (bound, 0);
6998 else if (TREE_CODE (bound) == GT_EXPR)
6999 a = TREE_OPERAND (bound, 1);
7000 else
7001 return NULL_TREE;
7003 typea = TREE_TYPE (a);
7004 if (!INTEGRAL_TYPE_P (typea)
7005 && !POINTER_TYPE_P (typea))
7006 return NULL_TREE;
7008 if (TREE_CODE (ineq) == LT_EXPR)
7010 a1 = TREE_OPERAND (ineq, 1);
7011 y = TREE_OPERAND (ineq, 0);
7013 else if (TREE_CODE (ineq) == GT_EXPR)
7015 a1 = TREE_OPERAND (ineq, 0);
7016 y = TREE_OPERAND (ineq, 1);
7018 else
7019 return NULL_TREE;
7021 if (TREE_TYPE (a1) != typea)
7022 return NULL_TREE;
7024 if (POINTER_TYPE_P (typea))
7026 /* Convert the pointer types into integer before taking the difference. */
7027 tree ta = fold_convert (ssizetype, a);
7028 tree ta1 = fold_convert (ssizetype, a1);
7029 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7031 else
7032 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7034 if (!diff || !integer_onep (diff))
7035 return NULL_TREE;
7037 return fold_build2 (GE_EXPR, type, a, y);
7040 /* Fold a sum or difference of at least one multiplication.
7041 Returns the folded tree or NULL if no simplification could be made. */
7043 static tree
7044 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7046 tree arg00, arg01, arg10, arg11;
7047 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7049 /* (A * C) +- (B * C) -> (A+-B) * C.
7050 (A * C) +- A -> A * (C+-1).
7051 We are most concerned about the case where C is a constant,
7052 but other combinations show up during loop reduction. Since
7053 it is not difficult, try all four possibilities. */
7055 if (TREE_CODE (arg0) == MULT_EXPR)
7057 arg00 = TREE_OPERAND (arg0, 0);
7058 arg01 = TREE_OPERAND (arg0, 1);
7060 else if (TREE_CODE (arg0) == INTEGER_CST)
7062 arg00 = build_one_cst (type);
7063 arg01 = arg0;
7065 else
7067 /* We cannot generate constant 1 for fract. */
7068 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7069 return NULL_TREE;
7070 arg00 = arg0;
7071 arg01 = build_one_cst (type);
7073 if (TREE_CODE (arg1) == MULT_EXPR)
7075 arg10 = TREE_OPERAND (arg1, 0);
7076 arg11 = TREE_OPERAND (arg1, 1);
7078 else if (TREE_CODE (arg1) == INTEGER_CST)
7080 arg10 = build_one_cst (type);
7081 arg11 = arg1;
7083 else
7085 /* We cannot generate constant 1 for fract. */
7086 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7087 return NULL_TREE;
7088 arg10 = arg1;
7089 arg11 = build_one_cst (type);
7091 same = NULL_TREE;
7093 if (operand_equal_p (arg01, arg11, 0))
7094 same = arg01, alt0 = arg00, alt1 = arg10;
7095 else if (operand_equal_p (arg00, arg10, 0))
7096 same = arg00, alt0 = arg01, alt1 = arg11;
7097 else if (operand_equal_p (arg00, arg11, 0))
7098 same = arg00, alt0 = arg01, alt1 = arg10;
7099 else if (operand_equal_p (arg01, arg10, 0))
7100 same = arg01, alt0 = arg00, alt1 = arg11;
7102 /* No identical multiplicands; see if we can find a common
7103 power-of-two factor in non-power-of-two multiplies. This
7104 can help in multi-dimensional array access. */
7105 else if (host_integerp (arg01, 0)
7106 && host_integerp (arg11, 0))
7108 HOST_WIDE_INT int01, int11, tmp;
7109 bool swap = false;
7110 tree maybe_same;
7111 int01 = TREE_INT_CST_LOW (arg01);
7112 int11 = TREE_INT_CST_LOW (arg11);
7114 /* Move min of absolute values to int11. */
7115 if ((int01 >= 0 ? int01 : -int01)
7116 < (int11 >= 0 ? int11 : -int11))
7118 tmp = int01, int01 = int11, int11 = tmp;
7119 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7120 maybe_same = arg01;
7121 swap = true;
7123 else
7124 maybe_same = arg11;
7126 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7128 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7129 build_int_cst (TREE_TYPE (arg00),
7130 int01 / int11));
7131 alt1 = arg10;
7132 same = maybe_same;
7133 if (swap)
7134 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7138 if (same)
7139 return fold_build2 (MULT_EXPR, type,
7140 fold_build2 (code, type,
7141 fold_convert (type, alt0),
7142 fold_convert (type, alt1)),
7143 fold_convert (type, same));
7145 return NULL_TREE;
7148 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7151 upon failure. */
7153 static int
7154 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7156 tree type = TREE_TYPE (expr);
7157 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7158 int byte, offset, word, words;
7159 unsigned char value;
7161 if (total_bytes > len)
7162 return 0;
7163 words = total_bytes / UNITS_PER_WORD;
7165 for (byte = 0; byte < total_bytes; byte++)
7167 int bitpos = byte * BITS_PER_UNIT;
7168 if (bitpos < HOST_BITS_PER_WIDE_INT)
7169 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7170 else
7171 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7172 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7174 if (total_bytes > UNITS_PER_WORD)
7176 word = byte / UNITS_PER_WORD;
7177 if (WORDS_BIG_ENDIAN)
7178 word = (words - 1) - word;
7179 offset = word * UNITS_PER_WORD;
7180 if (BYTES_BIG_ENDIAN)
7181 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7182 else
7183 offset += byte % UNITS_PER_WORD;
7185 else
7186 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7187 ptr[offset] = value;
7189 return total_bytes;
7193 /* Subroutine of native_encode_expr. Encode the REAL_CST
7194 specified by EXPR into the buffer PTR of length LEN bytes.
7195 Return the number of bytes placed in the buffer, or zero
7196 upon failure. */
7198 static int
7199 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7201 tree type = TREE_TYPE (expr);
7202 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7203 int byte, offset, word, words, bitpos;
7204 unsigned char value;
7206 /* There are always 32 bits in each long, no matter the size of
7207 the hosts long. We handle floating point representations with
7208 up to 192 bits. */
7209 long tmp[6];
7211 if (total_bytes > len)
7212 return 0;
7213 words = 32 / UNITS_PER_WORD;
7215 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7217 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7218 bitpos += BITS_PER_UNIT)
7220 byte = (bitpos / BITS_PER_UNIT) & 3;
7221 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7223 if (UNITS_PER_WORD < 4)
7225 word = byte / UNITS_PER_WORD;
7226 if (WORDS_BIG_ENDIAN)
7227 word = (words - 1) - word;
7228 offset = word * UNITS_PER_WORD;
7229 if (BYTES_BIG_ENDIAN)
7230 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7231 else
7232 offset += byte % UNITS_PER_WORD;
7234 else
7235 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7236 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7238 return total_bytes;
7241 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7242 specified by EXPR into the buffer PTR of length LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero
7244 upon failure. */
7246 static int
7247 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7249 int rsize, isize;
7250 tree part;
7252 part = TREE_REALPART (expr);
7253 rsize = native_encode_expr (part, ptr, len);
7254 if (rsize == 0)
7255 return 0;
7256 part = TREE_IMAGPART (expr);
7257 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7258 if (isize != rsize)
7259 return 0;
7260 return rsize + isize;
7264 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7265 specified by EXPR into the buffer PTR of length LEN bytes.
7266 Return the number of bytes placed in the buffer, or zero
7267 upon failure. */
7269 static int
7270 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7272 int i, size, offset, count;
7273 tree itype, elem, elements;
7275 offset = 0;
7276 elements = TREE_VECTOR_CST_ELTS (expr);
7277 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7278 itype = TREE_TYPE (TREE_TYPE (expr));
7279 size = GET_MODE_SIZE (TYPE_MODE (itype));
7280 for (i = 0; i < count; i++)
7282 if (elements)
7284 elem = TREE_VALUE (elements);
7285 elements = TREE_CHAIN (elements);
7287 else
7288 elem = NULL_TREE;
7290 if (elem)
7292 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7293 return 0;
7295 else
7297 if (offset + size > len)
7298 return 0;
7299 memset (ptr+offset, 0, size);
7301 offset += size;
7303 return offset;
7307 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7308 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7309 buffer PTR of length LEN bytes. Return the number of bytes
7310 placed in the buffer, or zero upon failure. */
7313 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7315 switch (TREE_CODE (expr))
7317 case INTEGER_CST:
7318 return native_encode_int (expr, ptr, len);
7320 case REAL_CST:
7321 return native_encode_real (expr, ptr, len);
7323 case COMPLEX_CST:
7324 return native_encode_complex (expr, ptr, len);
7326 case VECTOR_CST:
7327 return native_encode_vector (expr, ptr, len);
7329 default:
7330 return 0;
7335 /* Subroutine of native_interpret_expr. Interpret the contents of
7336 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7337 If the buffer cannot be interpreted, return NULL_TREE. */
7339 static tree
7340 native_interpret_int (tree type, const unsigned char *ptr, int len)
7342 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7343 int byte, offset, word, words;
7344 unsigned char value;
7345 unsigned int HOST_WIDE_INT lo = 0;
7346 HOST_WIDE_INT hi = 0;
7348 if (total_bytes > len)
7349 return NULL_TREE;
7350 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7351 return NULL_TREE;
7352 words = total_bytes / UNITS_PER_WORD;
7354 for (byte = 0; byte < total_bytes; byte++)
7356 int bitpos = byte * BITS_PER_UNIT;
7357 if (total_bytes > UNITS_PER_WORD)
7359 word = byte / UNITS_PER_WORD;
7360 if (WORDS_BIG_ENDIAN)
7361 word = (words - 1) - word;
7362 offset = word * UNITS_PER_WORD;
7363 if (BYTES_BIG_ENDIAN)
7364 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7365 else
7366 offset += byte % UNITS_PER_WORD;
7368 else
7369 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7370 value = ptr[offset];
7372 if (bitpos < HOST_BITS_PER_WIDE_INT)
7373 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7374 else
7375 hi |= (unsigned HOST_WIDE_INT) value
7376 << (bitpos - HOST_BITS_PER_WIDE_INT);
7379 return build_int_cst_wide_type (type, lo, hi);
7383 /* Subroutine of native_interpret_expr. Interpret the contents of
7384 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7385 If the buffer cannot be interpreted, return NULL_TREE. */
7387 static tree
7388 native_interpret_real (tree type, const unsigned char *ptr, int len)
7390 enum machine_mode mode = TYPE_MODE (type);
7391 int total_bytes = GET_MODE_SIZE (mode);
7392 int byte, offset, word, words, bitpos;
7393 unsigned char value;
7394 /* There are always 32 bits in each long, no matter the size of
7395 the hosts long. We handle floating point representations with
7396 up to 192 bits. */
7397 REAL_VALUE_TYPE r;
7398 long tmp[6];
7400 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7401 if (total_bytes > len || total_bytes > 24)
7402 return NULL_TREE;
7403 words = 32 / UNITS_PER_WORD;
7405 memset (tmp, 0, sizeof (tmp));
7406 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7407 bitpos += BITS_PER_UNIT)
7409 byte = (bitpos / BITS_PER_UNIT) & 3;
7410 if (UNITS_PER_WORD < 4)
7412 word = byte / UNITS_PER_WORD;
7413 if (WORDS_BIG_ENDIAN)
7414 word = (words - 1) - word;
7415 offset = word * UNITS_PER_WORD;
7416 if (BYTES_BIG_ENDIAN)
7417 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7418 else
7419 offset += byte % UNITS_PER_WORD;
7421 else
7422 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7423 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7425 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7428 real_from_target (&r, tmp, mode);
7429 return build_real (type, r);
7433 /* Subroutine of native_interpret_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7435 If the buffer cannot be interpreted, return NULL_TREE. */
7437 static tree
7438 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7440 tree etype, rpart, ipart;
7441 int size;
7443 etype = TREE_TYPE (type);
7444 size = GET_MODE_SIZE (TYPE_MODE (etype));
7445 if (size * 2 > len)
7446 return NULL_TREE;
7447 rpart = native_interpret_expr (etype, ptr, size);
7448 if (!rpart)
7449 return NULL_TREE;
7450 ipart = native_interpret_expr (etype, ptr+size, size);
7451 if (!ipart)
7452 return NULL_TREE;
7453 return build_complex (type, rpart, ipart);
7457 /* Subroutine of native_interpret_expr. Interpret the contents of
7458 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7459 If the buffer cannot be interpreted, return NULL_TREE. */
7461 static tree
7462 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7464 tree etype, elem, elements;
7465 int i, size, count;
7467 etype = TREE_TYPE (type);
7468 size = GET_MODE_SIZE (TYPE_MODE (etype));
7469 count = TYPE_VECTOR_SUBPARTS (type);
7470 if (size * count > len)
7471 return NULL_TREE;
7473 elements = NULL_TREE;
7474 for (i = count - 1; i >= 0; i--)
7476 elem = native_interpret_expr (etype, ptr+(i*size), size);
7477 if (!elem)
7478 return NULL_TREE;
7479 elements = tree_cons (NULL_TREE, elem, elements);
7481 return build_vector (type, elements);
7485 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7486 the buffer PTR of length LEN as a constant of type TYPE. For
7487 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7488 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7489 return NULL_TREE. */
7491 tree
7492 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7494 switch (TREE_CODE (type))
7496 case INTEGER_TYPE:
7497 case ENUMERAL_TYPE:
7498 case BOOLEAN_TYPE:
7499 return native_interpret_int (type, ptr, len);
7501 case REAL_TYPE:
7502 return native_interpret_real (type, ptr, len);
7504 case COMPLEX_TYPE:
7505 return native_interpret_complex (type, ptr, len);
7507 case VECTOR_TYPE:
7508 return native_interpret_vector (type, ptr, len);
7510 default:
7511 return NULL_TREE;
7516 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7517 TYPE at compile-time. If we're unable to perform the conversion
7518 return NULL_TREE. */
7520 static tree
7521 fold_view_convert_expr (tree type, tree expr)
7523 /* We support up to 512-bit values (for V8DFmode). */
7524 unsigned char buffer[64];
7525 int len;
7527 /* Check that the host and target are sane. */
7528 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7529 return NULL_TREE;
7531 len = native_encode_expr (expr, buffer, sizeof (buffer));
7532 if (len == 0)
7533 return NULL_TREE;
7535 return native_interpret_expr (type, buffer, len);
7538 /* Build an expression for the address of T. Folds away INDIRECT_REF
7539 to avoid confusing the gimplify process. When IN_FOLD is true
7540 avoid modifications of T. */
7542 static tree
7543 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7545 /* The size of the object is not relevant when talking about its address. */
7546 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7547 t = TREE_OPERAND (t, 0);
7549 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7550 if (TREE_CODE (t) == INDIRECT_REF
7551 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7553 t = TREE_OPERAND (t, 0);
7555 if (TREE_TYPE (t) != ptrtype)
7556 t = build1 (NOP_EXPR, ptrtype, t);
7558 else if (!in_fold)
7560 tree base = t;
7562 while (handled_component_p (base))
7563 base = TREE_OPERAND (base, 0);
7565 if (DECL_P (base))
7566 TREE_ADDRESSABLE (base) = 1;
7568 t = build1 (ADDR_EXPR, ptrtype, t);
7570 else
7571 t = build1 (ADDR_EXPR, ptrtype, t);
7573 return t;
7576 /* Build an expression for the address of T with type PTRTYPE. This
7577 function modifies the input parameter 'T' by sometimes setting the
7578 TREE_ADDRESSABLE flag. */
7580 tree
7581 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7583 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7586 /* Build an expression for the address of T. This function modifies
7587 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7588 flag. When called from fold functions, use fold_addr_expr instead. */
7590 tree
7591 build_fold_addr_expr (tree t)
7593 return build_fold_addr_expr_with_type_1 (t,
7594 build_pointer_type (TREE_TYPE (t)),
7595 false);
7598 /* Same as build_fold_addr_expr, builds an expression for the address
7599 of T, but avoids touching the input node 't'. Fold functions
7600 should use this version. */
7602 static tree
7603 fold_addr_expr (tree t)
7605 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7607 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7610 /* Fold a unary expression of code CODE and type TYPE with operand
7611 OP0. Return the folded expression if folding is successful.
7612 Otherwise, return NULL_TREE. */
7614 tree
7615 fold_unary (enum tree_code code, tree type, tree op0)
7617 tree tem;
7618 tree arg0;
7619 enum tree_code_class kind = TREE_CODE_CLASS (code);
7621 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7622 && TREE_CODE_LENGTH (code) == 1);
7624 arg0 = op0;
7625 if (arg0)
7627 if (code == NOP_EXPR || code == CONVERT_EXPR
7628 || code == FLOAT_EXPR || code == ABS_EXPR)
7630 /* Don't use STRIP_NOPS, because signedness of argument type
7631 matters. */
7632 STRIP_SIGN_NOPS (arg0);
7634 else
7636 /* Strip any conversions that don't change the mode. This
7637 is safe for every expression, except for a comparison
7638 expression because its signedness is derived from its
7639 operands.
7641 Note that this is done as an internal manipulation within
7642 the constant folder, in order to find the simplest
7643 representation of the arguments so that their form can be
7644 studied. In any cases, the appropriate type conversions
7645 should be put back in the tree that will get out of the
7646 constant folder. */
7647 STRIP_NOPS (arg0);
7651 if (TREE_CODE_CLASS (code) == tcc_unary)
7653 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7654 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7655 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7656 else if (TREE_CODE (arg0) == COND_EXPR)
7658 tree arg01 = TREE_OPERAND (arg0, 1);
7659 tree arg02 = TREE_OPERAND (arg0, 2);
7660 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7661 arg01 = fold_build1 (code, type, arg01);
7662 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7663 arg02 = fold_build1 (code, type, arg02);
7664 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7665 arg01, arg02);
7667 /* If this was a conversion, and all we did was to move into
7668 inside the COND_EXPR, bring it back out. But leave it if
7669 it is a conversion from integer to integer and the
7670 result precision is no wider than a word since such a
7671 conversion is cheap and may be optimized away by combine,
7672 while it couldn't if it were outside the COND_EXPR. Then return
7673 so we don't get into an infinite recursion loop taking the
7674 conversion out and then back in. */
7676 if ((code == NOP_EXPR || code == CONVERT_EXPR
7677 || code == NON_LVALUE_EXPR)
7678 && TREE_CODE (tem) == COND_EXPR
7679 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7680 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7681 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7682 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7683 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7684 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7685 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7686 && (INTEGRAL_TYPE_P
7687 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7688 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7689 || flag_syntax_only))
7690 tem = build1 (code, type,
7691 build3 (COND_EXPR,
7692 TREE_TYPE (TREE_OPERAND
7693 (TREE_OPERAND (tem, 1), 0)),
7694 TREE_OPERAND (tem, 0),
7695 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7696 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7697 return tem;
7699 else if (COMPARISON_CLASS_P (arg0))
7701 if (TREE_CODE (type) == BOOLEAN_TYPE)
7703 arg0 = copy_node (arg0);
7704 TREE_TYPE (arg0) = type;
7705 return arg0;
7707 else if (TREE_CODE (type) != INTEGER_TYPE)
7708 return fold_build3 (COND_EXPR, type, arg0,
7709 fold_build1 (code, type,
7710 integer_one_node),
7711 fold_build1 (code, type,
7712 integer_zero_node));
7716 switch (code)
7718 case PAREN_EXPR:
7719 /* Re-association barriers around constants and other re-association
7720 barriers can be removed. */
7721 if (CONSTANT_CLASS_P (op0)
7722 || TREE_CODE (op0) == PAREN_EXPR)
7723 return fold_convert (type, op0);
7724 return NULL_TREE;
7726 CASE_CONVERT:
7727 case FLOAT_EXPR:
7728 case FIX_TRUNC_EXPR:
7729 if (TREE_TYPE (op0) == type)
7730 return op0;
7732 /* If we have (type) (a CMP b) and type is an integral type, return
7733 new expression involving the new type. */
7734 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7735 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7736 TREE_OPERAND (op0, 1));
7738 /* Handle cases of two conversions in a row. */
7739 if (CONVERT_EXPR_P (op0))
7741 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7742 tree inter_type = TREE_TYPE (op0);
7743 int inside_int = INTEGRAL_TYPE_P (inside_type);
7744 int inside_ptr = POINTER_TYPE_P (inside_type);
7745 int inside_float = FLOAT_TYPE_P (inside_type);
7746 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7747 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7748 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7749 int inter_int = INTEGRAL_TYPE_P (inter_type);
7750 int inter_ptr = POINTER_TYPE_P (inter_type);
7751 int inter_float = FLOAT_TYPE_P (inter_type);
7752 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7753 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7754 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7755 int final_int = INTEGRAL_TYPE_P (type);
7756 int final_ptr = POINTER_TYPE_P (type);
7757 int final_float = FLOAT_TYPE_P (type);
7758 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7759 unsigned int final_prec = TYPE_PRECISION (type);
7760 int final_unsignedp = TYPE_UNSIGNED (type);
7762 /* In addition to the cases of two conversions in a row
7763 handled below, if we are converting something to its own
7764 type via an object of identical or wider precision, neither
7765 conversion is needed. */
7766 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7767 && (((inter_int || inter_ptr) && final_int)
7768 || (inter_float && final_float))
7769 && inter_prec >= final_prec)
7770 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7772 /* Likewise, if the intermediate and final types are either both
7773 float or both integer, we don't need the middle conversion if
7774 it is wider than the final type and doesn't change the signedness
7775 (for integers). Avoid this if the final type is a pointer
7776 since then we sometimes need the inner conversion. Likewise if
7777 the outer has a precision not equal to the size of its mode. */
7778 if (((inter_int && inside_int)
7779 || (inter_float && inside_float)
7780 || (inter_vec && inside_vec))
7781 && inter_prec >= inside_prec
7782 && (inter_float || inter_vec
7783 || inter_unsignedp == inside_unsignedp)
7784 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7785 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7786 && ! final_ptr
7787 && (! final_vec || inter_prec == inside_prec))
7788 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7790 /* If we have a sign-extension of a zero-extended value, we can
7791 replace that by a single zero-extension. */
7792 if (inside_int && inter_int && final_int
7793 && inside_prec < inter_prec && inter_prec < final_prec
7794 && inside_unsignedp && !inter_unsignedp)
7795 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7797 /* Two conversions in a row are not needed unless:
7798 - some conversion is floating-point (overstrict for now), or
7799 - some conversion is a vector (overstrict for now), or
7800 - the intermediate type is narrower than both initial and
7801 final, or
7802 - the intermediate type and innermost type differ in signedness,
7803 and the outermost type is wider than the intermediate, or
7804 - the initial type is a pointer type and the precisions of the
7805 intermediate and final types differ, or
7806 - the final type is a pointer type and the precisions of the
7807 initial and intermediate types differ. */
7808 if (! inside_float && ! inter_float && ! final_float
7809 && ! inside_vec && ! inter_vec && ! final_vec
7810 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7811 && ! (inside_int && inter_int
7812 && inter_unsignedp != inside_unsignedp
7813 && inter_prec < final_prec)
7814 && ((inter_unsignedp && inter_prec > inside_prec)
7815 == (final_unsignedp && final_prec > inter_prec))
7816 && ! (inside_ptr && inter_prec != final_prec)
7817 && ! (final_ptr && inside_prec != inter_prec)
7818 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7819 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7820 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7823 /* Handle (T *)&A.B.C for A being of type T and B and C
7824 living at offset zero. This occurs frequently in
7825 C++ upcasting and then accessing the base. */
7826 if (TREE_CODE (op0) == ADDR_EXPR
7827 && POINTER_TYPE_P (type)
7828 && handled_component_p (TREE_OPERAND (op0, 0)))
7830 HOST_WIDE_INT bitsize, bitpos;
7831 tree offset;
7832 enum machine_mode mode;
7833 int unsignedp, volatilep;
7834 tree base = TREE_OPERAND (op0, 0);
7835 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7836 &mode, &unsignedp, &volatilep, false);
7837 /* If the reference was to a (constant) zero offset, we can use
7838 the address of the base if it has the same base type
7839 as the result type. */
7840 if (! offset && bitpos == 0
7841 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7842 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7843 return fold_convert (type, fold_addr_expr (base));
7846 if ((TREE_CODE (op0) == MODIFY_EXPR
7847 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7848 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7849 /* Detect assigning a bitfield. */
7850 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7851 && DECL_BIT_FIELD
7852 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7854 /* Don't leave an assignment inside a conversion
7855 unless assigning a bitfield. */
7856 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7857 /* First do the assignment, then return converted constant. */
7858 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7859 TREE_NO_WARNING (tem) = 1;
7860 TREE_USED (tem) = 1;
7861 return tem;
7864 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7865 constants (if x has signed type, the sign bit cannot be set
7866 in c). This folds extension into the BIT_AND_EXPR. */
7867 if (INTEGRAL_TYPE_P (type)
7868 && TREE_CODE (type) != BOOLEAN_TYPE
7869 && TREE_CODE (op0) == BIT_AND_EXPR
7870 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7872 tree and = op0;
7873 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7874 int change = 0;
7876 if (TYPE_UNSIGNED (TREE_TYPE (and))
7877 || (TYPE_PRECISION (type)
7878 <= TYPE_PRECISION (TREE_TYPE (and))))
7879 change = 1;
7880 else if (TYPE_PRECISION (TREE_TYPE (and1))
7881 <= HOST_BITS_PER_WIDE_INT
7882 && host_integerp (and1, 1))
7884 unsigned HOST_WIDE_INT cst;
7886 cst = tree_low_cst (and1, 1);
7887 cst &= (HOST_WIDE_INT) -1
7888 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7889 change = (cst == 0);
7890 #ifdef LOAD_EXTEND_OP
7891 if (change
7892 && !flag_syntax_only
7893 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7894 == ZERO_EXTEND))
7896 tree uns = unsigned_type_for (TREE_TYPE (and0));
7897 and0 = fold_convert (uns, and0);
7898 and1 = fold_convert (uns, and1);
7900 #endif
7902 if (change)
7904 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7905 TREE_INT_CST_HIGH (and1), 0,
7906 TREE_OVERFLOW (and1));
7907 return fold_build2 (BIT_AND_EXPR, type,
7908 fold_convert (type, and0), tem);
7912 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7913 when one of the new casts will fold away. Conservatively we assume
7914 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7915 if (POINTER_TYPE_P (type)
7916 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7917 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7918 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7919 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7921 tree arg00 = TREE_OPERAND (arg0, 0);
7922 tree arg01 = TREE_OPERAND (arg0, 1);
7924 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7925 fold_convert (sizetype, arg01));
7928 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7929 of the same precision, and X is an integer type not narrower than
7930 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7931 if (INTEGRAL_TYPE_P (type)
7932 && TREE_CODE (op0) == BIT_NOT_EXPR
7933 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7934 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7935 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7937 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7938 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7939 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7940 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7943 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7944 type of X and Y (integer types only). */
7945 if (INTEGRAL_TYPE_P (type)
7946 && TREE_CODE (op0) == MULT_EXPR
7947 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7948 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7950 /* Be careful not to introduce new overflows. */
7951 tree mult_type;
7952 if (TYPE_OVERFLOW_WRAPS (type))
7953 mult_type = type;
7954 else
7955 mult_type = unsigned_type_for (type);
7957 tem = fold_build2 (MULT_EXPR, mult_type,
7958 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7959 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7960 return fold_convert (type, tem);
7963 tem = fold_convert_const (code, type, op0);
7964 return tem ? tem : NULL_TREE;
7966 case FIXED_CONVERT_EXPR:
7967 tem = fold_convert_const (code, type, arg0);
7968 return tem ? tem : NULL_TREE;
7970 case VIEW_CONVERT_EXPR:
7971 if (TREE_TYPE (op0) == type)
7972 return op0;
7973 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7974 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7976 /* For integral conversions with the same precision or pointer
7977 conversions use a NOP_EXPR instead. */
7978 if ((INTEGRAL_TYPE_P (type)
7979 || POINTER_TYPE_P (type))
7980 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7981 || POINTER_TYPE_P (TREE_TYPE (op0)))
7982 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7983 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7984 a sub-type to its base type as generated by the Ada FE. */
7985 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7986 && TREE_TYPE (TREE_TYPE (op0))))
7987 return fold_convert (type, op0);
7989 /* Strip inner integral conversions that do not change the precision. */
7990 if (CONVERT_EXPR_P (op0)
7991 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7992 || POINTER_TYPE_P (TREE_TYPE (op0)))
7993 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7994 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7995 && (TYPE_PRECISION (TREE_TYPE (op0))
7996 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7997 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7999 return fold_view_convert_expr (type, op0);
8001 case NEGATE_EXPR:
8002 tem = fold_negate_expr (arg0);
8003 if (tem)
8004 return fold_convert (type, tem);
8005 return NULL_TREE;
8007 case ABS_EXPR:
8008 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8009 return fold_abs_const (arg0, type);
8010 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8011 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8012 /* Convert fabs((double)float) into (double)fabsf(float). */
8013 else if (TREE_CODE (arg0) == NOP_EXPR
8014 && TREE_CODE (type) == REAL_TYPE)
8016 tree targ0 = strip_float_extensions (arg0);
8017 if (targ0 != arg0)
8018 return fold_convert (type, fold_build1 (ABS_EXPR,
8019 TREE_TYPE (targ0),
8020 targ0));
8022 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8023 else if (TREE_CODE (arg0) == ABS_EXPR)
8024 return arg0;
8025 else if (tree_expr_nonnegative_p (arg0))
8026 return arg0;
8028 /* Strip sign ops from argument. */
8029 if (TREE_CODE (type) == REAL_TYPE)
8031 tem = fold_strip_sign_ops (arg0);
8032 if (tem)
8033 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8035 return NULL_TREE;
8037 case CONJ_EXPR:
8038 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8039 return fold_convert (type, arg0);
8040 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8042 tree itype = TREE_TYPE (type);
8043 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8044 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8045 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8047 if (TREE_CODE (arg0) == COMPLEX_CST)
8049 tree itype = TREE_TYPE (type);
8050 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8051 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8052 return build_complex (type, rpart, negate_expr (ipart));
8054 if (TREE_CODE (arg0) == CONJ_EXPR)
8055 return fold_convert (type, TREE_OPERAND (arg0, 0));
8056 return NULL_TREE;
8058 case BIT_NOT_EXPR:
8059 if (TREE_CODE (arg0) == INTEGER_CST)
8060 return fold_not_const (arg0, type);
8061 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8062 return fold_convert (type, TREE_OPERAND (arg0, 0));
8063 /* Convert ~ (-A) to A - 1. */
8064 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8065 return fold_build2 (MINUS_EXPR, type,
8066 fold_convert (type, TREE_OPERAND (arg0, 0)),
8067 build_int_cst (type, 1));
8068 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8069 else if (INTEGRAL_TYPE_P (type)
8070 && ((TREE_CODE (arg0) == MINUS_EXPR
8071 && integer_onep (TREE_OPERAND (arg0, 1)))
8072 || (TREE_CODE (arg0) == PLUS_EXPR
8073 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8074 return fold_build1 (NEGATE_EXPR, type,
8075 fold_convert (type, TREE_OPERAND (arg0, 0)));
8076 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8077 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8078 && (tem = fold_unary (BIT_NOT_EXPR, type,
8079 fold_convert (type,
8080 TREE_OPERAND (arg0, 0)))))
8081 return fold_build2 (BIT_XOR_EXPR, type, tem,
8082 fold_convert (type, TREE_OPERAND (arg0, 1)));
8083 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8084 && (tem = fold_unary (BIT_NOT_EXPR, type,
8085 fold_convert (type,
8086 TREE_OPERAND (arg0, 1)))))
8087 return fold_build2 (BIT_XOR_EXPR, type,
8088 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8089 /* Perform BIT_NOT_EXPR on each element individually. */
8090 else if (TREE_CODE (arg0) == VECTOR_CST)
8092 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8093 int count = TYPE_VECTOR_SUBPARTS (type), i;
8095 for (i = 0; i < count; i++)
8097 if (elements)
8099 elem = TREE_VALUE (elements);
8100 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8101 if (elem == NULL_TREE)
8102 break;
8103 elements = TREE_CHAIN (elements);
8105 else
8106 elem = build_int_cst (TREE_TYPE (type), -1);
8107 list = tree_cons (NULL_TREE, elem, list);
8109 if (i == count)
8110 return build_vector (type, nreverse (list));
8113 return NULL_TREE;
8115 case TRUTH_NOT_EXPR:
8116 /* The argument to invert_truthvalue must have Boolean type. */
8117 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8118 arg0 = fold_convert (boolean_type_node, arg0);
8120 /* Note that the operand of this must be an int
8121 and its values must be 0 or 1.
8122 ("true" is a fixed value perhaps depending on the language,
8123 but we don't handle values other than 1 correctly yet.) */
8124 tem = fold_truth_not_expr (arg0);
8125 if (!tem)
8126 return NULL_TREE;
8127 return fold_convert (type, tem);
8129 case REALPART_EXPR:
8130 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8131 return fold_convert (type, arg0);
8132 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8133 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8134 TREE_OPERAND (arg0, 1));
8135 if (TREE_CODE (arg0) == COMPLEX_CST)
8136 return fold_convert (type, TREE_REALPART (arg0));
8137 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8139 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8140 tem = fold_build2 (TREE_CODE (arg0), itype,
8141 fold_build1 (REALPART_EXPR, itype,
8142 TREE_OPERAND (arg0, 0)),
8143 fold_build1 (REALPART_EXPR, itype,
8144 TREE_OPERAND (arg0, 1)));
8145 return fold_convert (type, tem);
8147 if (TREE_CODE (arg0) == CONJ_EXPR)
8149 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8150 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8151 return fold_convert (type, tem);
8153 if (TREE_CODE (arg0) == CALL_EXPR)
8155 tree fn = get_callee_fndecl (arg0);
8156 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8157 switch (DECL_FUNCTION_CODE (fn))
8159 CASE_FLT_FN (BUILT_IN_CEXPI):
8160 fn = mathfn_built_in (type, BUILT_IN_COS);
8161 if (fn)
8162 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8163 break;
8165 default:
8166 break;
8169 return NULL_TREE;
8171 case IMAGPART_EXPR:
8172 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8173 return fold_convert (type, integer_zero_node);
8174 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8175 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8176 TREE_OPERAND (arg0, 0));
8177 if (TREE_CODE (arg0) == COMPLEX_CST)
8178 return fold_convert (type, TREE_IMAGPART (arg0));
8179 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8181 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8182 tem = fold_build2 (TREE_CODE (arg0), itype,
8183 fold_build1 (IMAGPART_EXPR, itype,
8184 TREE_OPERAND (arg0, 0)),
8185 fold_build1 (IMAGPART_EXPR, itype,
8186 TREE_OPERAND (arg0, 1)));
8187 return fold_convert (type, tem);
8189 if (TREE_CODE (arg0) == CONJ_EXPR)
8191 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8192 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8193 return fold_convert (type, negate_expr (tem));
8195 if (TREE_CODE (arg0) == CALL_EXPR)
8197 tree fn = get_callee_fndecl (arg0);
8198 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8199 switch (DECL_FUNCTION_CODE (fn))
8201 CASE_FLT_FN (BUILT_IN_CEXPI):
8202 fn = mathfn_built_in (type, BUILT_IN_SIN);
8203 if (fn)
8204 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8205 break;
8207 default:
8208 break;
8211 return NULL_TREE;
8213 default:
8214 return NULL_TREE;
8215 } /* switch (code) */
8218 /* Fold a binary expression of code CODE and type TYPE with operands
8219 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8220 Return the folded expression if folding is successful. Otherwise,
8221 return NULL_TREE. */
8223 static tree
8224 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8226 enum tree_code compl_code;
8228 if (code == MIN_EXPR)
8229 compl_code = MAX_EXPR;
8230 else if (code == MAX_EXPR)
8231 compl_code = MIN_EXPR;
8232 else
8233 gcc_unreachable ();
8235 /* MIN (MAX (a, b), b) == b. */
8236 if (TREE_CODE (op0) == compl_code
8237 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8238 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8240 /* MIN (MAX (b, a), b) == b. */
8241 if (TREE_CODE (op0) == compl_code
8242 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8243 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8244 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8246 /* MIN (a, MAX (a, b)) == a. */
8247 if (TREE_CODE (op1) == compl_code
8248 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8249 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8250 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8252 /* MIN (a, MAX (b, a)) == a. */
8253 if (TREE_CODE (op1) == compl_code
8254 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8255 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8256 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8258 return NULL_TREE;
8261 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8262 by changing CODE to reduce the magnitude of constants involved in
8263 ARG0 of the comparison.
8264 Returns a canonicalized comparison tree if a simplification was
8265 possible, otherwise returns NULL_TREE.
8266 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8267 valid if signed overflow is undefined. */
8269 static tree
8270 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8271 tree arg0, tree arg1,
8272 bool *strict_overflow_p)
8274 enum tree_code code0 = TREE_CODE (arg0);
8275 tree t, cst0 = NULL_TREE;
8276 int sgn0;
8277 bool swap = false;
8279 /* Match A +- CST code arg1 and CST code arg1. */
8280 if (!(((code0 == MINUS_EXPR
8281 || code0 == PLUS_EXPR)
8282 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8283 || code0 == INTEGER_CST))
8284 return NULL_TREE;
8286 /* Identify the constant in arg0 and its sign. */
8287 if (code0 == INTEGER_CST)
8288 cst0 = arg0;
8289 else
8290 cst0 = TREE_OPERAND (arg0, 1);
8291 sgn0 = tree_int_cst_sgn (cst0);
8293 /* Overflowed constants and zero will cause problems. */
8294 if (integer_zerop (cst0)
8295 || TREE_OVERFLOW (cst0))
8296 return NULL_TREE;
8298 /* See if we can reduce the magnitude of the constant in
8299 arg0 by changing the comparison code. */
8300 if (code0 == INTEGER_CST)
8302 /* CST <= arg1 -> CST-1 < arg1. */
8303 if (code == LE_EXPR && sgn0 == 1)
8304 code = LT_EXPR;
8305 /* -CST < arg1 -> -CST-1 <= arg1. */
8306 else if (code == LT_EXPR && sgn0 == -1)
8307 code = LE_EXPR;
8308 /* CST > arg1 -> CST-1 >= arg1. */
8309 else if (code == GT_EXPR && sgn0 == 1)
8310 code = GE_EXPR;
8311 /* -CST >= arg1 -> -CST-1 > arg1. */
8312 else if (code == GE_EXPR && sgn0 == -1)
8313 code = GT_EXPR;
8314 else
8315 return NULL_TREE;
8316 /* arg1 code' CST' might be more canonical. */
8317 swap = true;
8319 else
8321 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8322 if (code == LT_EXPR
8323 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8324 code = LE_EXPR;
8325 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8326 else if (code == GT_EXPR
8327 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8328 code = GE_EXPR;
8329 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8330 else if (code == LE_EXPR
8331 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8332 code = LT_EXPR;
8333 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8334 else if (code == GE_EXPR
8335 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8336 code = GT_EXPR;
8337 else
8338 return NULL_TREE;
8339 *strict_overflow_p = true;
8342 /* Now build the constant reduced in magnitude. */
8343 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8344 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8345 if (code0 != INTEGER_CST)
8346 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8348 /* If swapping might yield to a more canonical form, do so. */
8349 if (swap)
8350 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8351 else
8352 return fold_build2 (code, type, t, arg1);
8355 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8356 overflow further. Try to decrease the magnitude of constants involved
8357 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8358 and put sole constants at the second argument position.
8359 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8361 static tree
8362 maybe_canonicalize_comparison (enum tree_code code, tree type,
8363 tree arg0, tree arg1)
8365 tree t;
8366 bool strict_overflow_p;
8367 const char * const warnmsg = G_("assuming signed overflow does not occur "
8368 "when reducing constant in comparison");
8370 /* In principle pointers also have undefined overflow behavior,
8371 but that causes problems elsewhere. */
8372 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8373 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8374 return NULL_TREE;
8376 /* Try canonicalization by simplifying arg0. */
8377 strict_overflow_p = false;
8378 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8379 &strict_overflow_p);
8380 if (t)
8382 if (strict_overflow_p)
8383 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8384 return t;
8387 /* Try canonicalization by simplifying arg1 using the swapped
8388 comparison. */
8389 code = swap_tree_comparison (code);
8390 strict_overflow_p = false;
8391 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8392 &strict_overflow_p);
8393 if (t && strict_overflow_p)
8394 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8395 return t;
8398 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8399 space. This is used to avoid issuing overflow warnings for
8400 expressions like &p->x which can not wrap. */
8402 static bool
8403 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8405 unsigned HOST_WIDE_INT offset_low, total_low;
8406 HOST_WIDE_INT size, offset_high, total_high;
8408 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8409 return true;
8411 if (bitpos < 0)
8412 return true;
8414 if (offset == NULL_TREE)
8416 offset_low = 0;
8417 offset_high = 0;
8419 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8420 return true;
8421 else
8423 offset_low = TREE_INT_CST_LOW (offset);
8424 offset_high = TREE_INT_CST_HIGH (offset);
8427 if (add_double_with_sign (offset_low, offset_high,
8428 bitpos / BITS_PER_UNIT, 0,
8429 &total_low, &total_high,
8430 true))
8431 return true;
8433 if (total_high != 0)
8434 return true;
8436 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8437 if (size <= 0)
8438 return true;
8440 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8441 array. */
8442 if (TREE_CODE (base) == ADDR_EXPR)
8444 HOST_WIDE_INT base_size;
8446 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8447 if (base_size > 0 && size < base_size)
8448 size = base_size;
8451 return total_low > (unsigned HOST_WIDE_INT) size;
8454 /* Subroutine of fold_binary. This routine performs all of the
8455 transformations that are common to the equality/inequality
8456 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8457 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8458 fold_binary should call fold_binary. Fold a comparison with
8459 tree code CODE and type TYPE with operands OP0 and OP1. Return
8460 the folded comparison or NULL_TREE. */
8462 static tree
8463 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8465 tree arg0, arg1, tem;
8467 arg0 = op0;
8468 arg1 = op1;
8470 STRIP_SIGN_NOPS (arg0);
8471 STRIP_SIGN_NOPS (arg1);
8473 tem = fold_relational_const (code, type, arg0, arg1);
8474 if (tem != NULL_TREE)
8475 return tem;
8477 /* If one arg is a real or integer constant, put it last. */
8478 if (tree_swap_operands_p (arg0, arg1, true))
8479 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8481 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8482 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8483 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8484 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8485 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8486 && (TREE_CODE (arg1) == INTEGER_CST
8487 && !TREE_OVERFLOW (arg1)))
8489 tree const1 = TREE_OPERAND (arg0, 1);
8490 tree const2 = arg1;
8491 tree variable = TREE_OPERAND (arg0, 0);
8492 tree lhs;
8493 int lhs_add;
8494 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8496 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8497 TREE_TYPE (arg1), const2, const1);
8499 /* If the constant operation overflowed this can be
8500 simplified as a comparison against INT_MAX/INT_MIN. */
8501 if (TREE_CODE (lhs) == INTEGER_CST
8502 && TREE_OVERFLOW (lhs))
8504 int const1_sgn = tree_int_cst_sgn (const1);
8505 enum tree_code code2 = code;
8507 /* Get the sign of the constant on the lhs if the
8508 operation were VARIABLE + CONST1. */
8509 if (TREE_CODE (arg0) == MINUS_EXPR)
8510 const1_sgn = -const1_sgn;
8512 /* The sign of the constant determines if we overflowed
8513 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8514 Canonicalize to the INT_MIN overflow by swapping the comparison
8515 if necessary. */
8516 if (const1_sgn == -1)
8517 code2 = swap_tree_comparison (code);
8519 /* We now can look at the canonicalized case
8520 VARIABLE + 1 CODE2 INT_MIN
8521 and decide on the result. */
8522 if (code2 == LT_EXPR
8523 || code2 == LE_EXPR
8524 || code2 == EQ_EXPR)
8525 return omit_one_operand (type, boolean_false_node, variable);
8526 else if (code2 == NE_EXPR
8527 || code2 == GE_EXPR
8528 || code2 == GT_EXPR)
8529 return omit_one_operand (type, boolean_true_node, variable);
8532 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8533 && (TREE_CODE (lhs) != INTEGER_CST
8534 || !TREE_OVERFLOW (lhs)))
8536 fold_overflow_warning (("assuming signed overflow does not occur "
8537 "when changing X +- C1 cmp C2 to "
8538 "X cmp C1 +- C2"),
8539 WARN_STRICT_OVERFLOW_COMPARISON);
8540 return fold_build2 (code, type, variable, lhs);
8544 /* For comparisons of pointers we can decompose it to a compile time
8545 comparison of the base objects and the offsets into the object.
8546 This requires at least one operand being an ADDR_EXPR or a
8547 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8548 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8549 && (TREE_CODE (arg0) == ADDR_EXPR
8550 || TREE_CODE (arg1) == ADDR_EXPR
8551 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8552 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8554 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8555 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8556 enum machine_mode mode;
8557 int volatilep, unsignedp;
8558 bool indirect_base0 = false, indirect_base1 = false;
8560 /* Get base and offset for the access. Strip ADDR_EXPR for
8561 get_inner_reference, but put it back by stripping INDIRECT_REF
8562 off the base object if possible. indirect_baseN will be true
8563 if baseN is not an address but refers to the object itself. */
8564 base0 = arg0;
8565 if (TREE_CODE (arg0) == ADDR_EXPR)
8567 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8568 &bitsize, &bitpos0, &offset0, &mode,
8569 &unsignedp, &volatilep, false);
8570 if (TREE_CODE (base0) == INDIRECT_REF)
8571 base0 = TREE_OPERAND (base0, 0);
8572 else
8573 indirect_base0 = true;
8575 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8577 base0 = TREE_OPERAND (arg0, 0);
8578 offset0 = TREE_OPERAND (arg0, 1);
8581 base1 = arg1;
8582 if (TREE_CODE (arg1) == ADDR_EXPR)
8584 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8585 &bitsize, &bitpos1, &offset1, &mode,
8586 &unsignedp, &volatilep, false);
8587 if (TREE_CODE (base1) == INDIRECT_REF)
8588 base1 = TREE_OPERAND (base1, 0);
8589 else
8590 indirect_base1 = true;
8592 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8594 base1 = TREE_OPERAND (arg1, 0);
8595 offset1 = TREE_OPERAND (arg1, 1);
8598 /* If we have equivalent bases we might be able to simplify. */
8599 if (indirect_base0 == indirect_base1
8600 && operand_equal_p (base0, base1, 0))
8602 /* We can fold this expression to a constant if the non-constant
8603 offset parts are equal. */
8604 if ((offset0 == offset1
8605 || (offset0 && offset1
8606 && operand_equal_p (offset0, offset1, 0)))
8607 && (code == EQ_EXPR
8608 || code == NE_EXPR
8609 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8612 if (code != EQ_EXPR
8613 && code != NE_EXPR
8614 && bitpos0 != bitpos1
8615 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8616 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8617 fold_overflow_warning (("assuming pointer wraparound does not "
8618 "occur when comparing P +- C1 with "
8619 "P +- C2"),
8620 WARN_STRICT_OVERFLOW_CONDITIONAL);
8622 switch (code)
8624 case EQ_EXPR:
8625 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8626 case NE_EXPR:
8627 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8628 case LT_EXPR:
8629 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8630 case LE_EXPR:
8631 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8632 case GE_EXPR:
8633 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8634 case GT_EXPR:
8635 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8636 default:;
8639 /* We can simplify the comparison to a comparison of the variable
8640 offset parts if the constant offset parts are equal.
8641 Be careful to use signed size type here because otherwise we
8642 mess with array offsets in the wrong way. This is possible
8643 because pointer arithmetic is restricted to retain within an
8644 object and overflow on pointer differences is undefined as of
8645 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8646 else if (bitpos0 == bitpos1
8647 && ((code == EQ_EXPR || code == NE_EXPR)
8648 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8650 tree signed_size_type_node;
8651 signed_size_type_node = signed_type_for (size_type_node);
8653 /* By converting to signed size type we cover middle-end pointer
8654 arithmetic which operates on unsigned pointer types of size
8655 type size and ARRAY_REF offsets which are properly sign or
8656 zero extended from their type in case it is narrower than
8657 size type. */
8658 if (offset0 == NULL_TREE)
8659 offset0 = build_int_cst (signed_size_type_node, 0);
8660 else
8661 offset0 = fold_convert (signed_size_type_node, offset0);
8662 if (offset1 == NULL_TREE)
8663 offset1 = build_int_cst (signed_size_type_node, 0);
8664 else
8665 offset1 = fold_convert (signed_size_type_node, offset1);
8667 if (code != EQ_EXPR
8668 && code != NE_EXPR
8669 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8670 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8671 fold_overflow_warning (("assuming pointer wraparound does not "
8672 "occur when comparing P +- C1 with "
8673 "P +- C2"),
8674 WARN_STRICT_OVERFLOW_COMPARISON);
8676 return fold_build2 (code, type, offset0, offset1);
8679 /* For non-equal bases we can simplify if they are addresses
8680 of local binding decls or constants. */
8681 else if (indirect_base0 && indirect_base1
8682 /* We know that !operand_equal_p (base0, base1, 0)
8683 because the if condition was false. But make
8684 sure two decls are not the same. */
8685 && base0 != base1
8686 && TREE_CODE (arg0) == ADDR_EXPR
8687 && TREE_CODE (arg1) == ADDR_EXPR
8688 && (((TREE_CODE (base0) == VAR_DECL
8689 || TREE_CODE (base0) == PARM_DECL)
8690 && (targetm.binds_local_p (base0)
8691 || CONSTANT_CLASS_P (base1)))
8692 || CONSTANT_CLASS_P (base0))
8693 && (((TREE_CODE (base1) == VAR_DECL
8694 || TREE_CODE (base1) == PARM_DECL)
8695 && (targetm.binds_local_p (base1)
8696 || CONSTANT_CLASS_P (base0)))
8697 || CONSTANT_CLASS_P (base1)))
8699 if (code == EQ_EXPR)
8700 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8701 else if (code == NE_EXPR)
8702 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8704 /* For equal offsets we can simplify to a comparison of the
8705 base addresses. */
8706 else if (bitpos0 == bitpos1
8707 && (indirect_base0
8708 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8709 && (indirect_base1
8710 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8711 && ((offset0 == offset1)
8712 || (offset0 && offset1
8713 && operand_equal_p (offset0, offset1, 0))))
8715 if (indirect_base0)
8716 base0 = fold_addr_expr (base0);
8717 if (indirect_base1)
8718 base1 = fold_addr_expr (base1);
8719 return fold_build2 (code, type, base0, base1);
8723 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8724 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8725 the resulting offset is smaller in absolute value than the
8726 original one. */
8727 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8728 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8729 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8730 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8731 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8732 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8733 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8735 tree const1 = TREE_OPERAND (arg0, 1);
8736 tree const2 = TREE_OPERAND (arg1, 1);
8737 tree variable1 = TREE_OPERAND (arg0, 0);
8738 tree variable2 = TREE_OPERAND (arg1, 0);
8739 tree cst;
8740 const char * const warnmsg = G_("assuming signed overflow does not "
8741 "occur when combining constants around "
8742 "a comparison");
8744 /* Put the constant on the side where it doesn't overflow and is
8745 of lower absolute value than before. */
8746 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8747 ? MINUS_EXPR : PLUS_EXPR,
8748 const2, const1, 0);
8749 if (!TREE_OVERFLOW (cst)
8750 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8752 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8753 return fold_build2 (code, type,
8754 variable1,
8755 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8756 variable2, cst));
8759 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8760 ? MINUS_EXPR : PLUS_EXPR,
8761 const1, const2, 0);
8762 if (!TREE_OVERFLOW (cst)
8763 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8765 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8766 return fold_build2 (code, type,
8767 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8768 variable1, cst),
8769 variable2);
8773 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8774 signed arithmetic case. That form is created by the compiler
8775 often enough for folding it to be of value. One example is in
8776 computing loop trip counts after Operator Strength Reduction. */
8777 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8778 && TREE_CODE (arg0) == MULT_EXPR
8779 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8780 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8781 && integer_zerop (arg1))
8783 tree const1 = TREE_OPERAND (arg0, 1);
8784 tree const2 = arg1; /* zero */
8785 tree variable1 = TREE_OPERAND (arg0, 0);
8786 enum tree_code cmp_code = code;
8788 gcc_assert (!integer_zerop (const1));
8790 fold_overflow_warning (("assuming signed overflow does not occur when "
8791 "eliminating multiplication in comparison "
8792 "with zero"),
8793 WARN_STRICT_OVERFLOW_COMPARISON);
8795 /* If const1 is negative we swap the sense of the comparison. */
8796 if (tree_int_cst_sgn (const1) < 0)
8797 cmp_code = swap_tree_comparison (cmp_code);
8799 return fold_build2 (cmp_code, type, variable1, const2);
8802 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8803 if (tem)
8804 return tem;
8806 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8808 tree targ0 = strip_float_extensions (arg0);
8809 tree targ1 = strip_float_extensions (arg1);
8810 tree newtype = TREE_TYPE (targ0);
8812 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8813 newtype = TREE_TYPE (targ1);
8815 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8816 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8817 return fold_build2 (code, type, fold_convert (newtype, targ0),
8818 fold_convert (newtype, targ1));
8820 /* (-a) CMP (-b) -> b CMP a */
8821 if (TREE_CODE (arg0) == NEGATE_EXPR
8822 && TREE_CODE (arg1) == NEGATE_EXPR)
8823 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8824 TREE_OPERAND (arg0, 0));
8826 if (TREE_CODE (arg1) == REAL_CST)
8828 REAL_VALUE_TYPE cst;
8829 cst = TREE_REAL_CST (arg1);
8831 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8832 if (TREE_CODE (arg0) == NEGATE_EXPR)
8833 return fold_build2 (swap_tree_comparison (code), type,
8834 TREE_OPERAND (arg0, 0),
8835 build_real (TREE_TYPE (arg1),
8836 REAL_VALUE_NEGATE (cst)));
8838 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8839 /* a CMP (-0) -> a CMP 0 */
8840 if (REAL_VALUE_MINUS_ZERO (cst))
8841 return fold_build2 (code, type, arg0,
8842 build_real (TREE_TYPE (arg1), dconst0));
8844 /* x != NaN is always true, other ops are always false. */
8845 if (REAL_VALUE_ISNAN (cst)
8846 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8848 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8849 return omit_one_operand (type, tem, arg0);
8852 /* Fold comparisons against infinity. */
8853 if (REAL_VALUE_ISINF (cst))
8855 tem = fold_inf_compare (code, type, arg0, arg1);
8856 if (tem != NULL_TREE)
8857 return tem;
8861 /* If this is a comparison of a real constant with a PLUS_EXPR
8862 or a MINUS_EXPR of a real constant, we can convert it into a
8863 comparison with a revised real constant as long as no overflow
8864 occurs when unsafe_math_optimizations are enabled. */
8865 if (flag_unsafe_math_optimizations
8866 && TREE_CODE (arg1) == REAL_CST
8867 && (TREE_CODE (arg0) == PLUS_EXPR
8868 || TREE_CODE (arg0) == MINUS_EXPR)
8869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8870 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8871 ? MINUS_EXPR : PLUS_EXPR,
8872 arg1, TREE_OPERAND (arg0, 1), 0))
8873 && !TREE_OVERFLOW (tem))
8874 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8876 /* Likewise, we can simplify a comparison of a real constant with
8877 a MINUS_EXPR whose first operand is also a real constant, i.e.
8878 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8879 floating-point types only if -fassociative-math is set. */
8880 if (flag_associative_math
8881 && TREE_CODE (arg1) == REAL_CST
8882 && TREE_CODE (arg0) == MINUS_EXPR
8883 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8884 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8885 arg1, 0))
8886 && !TREE_OVERFLOW (tem))
8887 return fold_build2 (swap_tree_comparison (code), type,
8888 TREE_OPERAND (arg0, 1), tem);
8890 /* Fold comparisons against built-in math functions. */
8891 if (TREE_CODE (arg1) == REAL_CST
8892 && flag_unsafe_math_optimizations
8893 && ! flag_errno_math)
8895 enum built_in_function fcode = builtin_mathfn_code (arg0);
8897 if (fcode != END_BUILTINS)
8899 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8900 if (tem != NULL_TREE)
8901 return tem;
8906 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8907 && CONVERT_EXPR_P (arg0))
8909 /* If we are widening one operand of an integer comparison,
8910 see if the other operand is similarly being widened. Perhaps we
8911 can do the comparison in the narrower type. */
8912 tem = fold_widened_comparison (code, type, arg0, arg1);
8913 if (tem)
8914 return tem;
8916 /* Or if we are changing signedness. */
8917 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8918 if (tem)
8919 return tem;
8922 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8923 constant, we can simplify it. */
8924 if (TREE_CODE (arg1) == INTEGER_CST
8925 && (TREE_CODE (arg0) == MIN_EXPR
8926 || TREE_CODE (arg0) == MAX_EXPR)
8927 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8929 tem = optimize_minmax_comparison (code, type, op0, op1);
8930 if (tem)
8931 return tem;
8934 /* Simplify comparison of something with itself. (For IEEE
8935 floating-point, we can only do some of these simplifications.) */
8936 if (operand_equal_p (arg0, arg1, 0))
8938 switch (code)
8940 case EQ_EXPR:
8941 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8942 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8943 return constant_boolean_node (1, type);
8944 break;
8946 case GE_EXPR:
8947 case LE_EXPR:
8948 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8949 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8950 return constant_boolean_node (1, type);
8951 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8953 case NE_EXPR:
8954 /* For NE, we can only do this simplification if integer
8955 or we don't honor IEEE floating point NaNs. */
8956 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8957 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8958 break;
8959 /* ... fall through ... */
8960 case GT_EXPR:
8961 case LT_EXPR:
8962 return constant_boolean_node (0, type);
8963 default:
8964 gcc_unreachable ();
8968 /* If we are comparing an expression that just has comparisons
8969 of two integer values, arithmetic expressions of those comparisons,
8970 and constants, we can simplify it. There are only three cases
8971 to check: the two values can either be equal, the first can be
8972 greater, or the second can be greater. Fold the expression for
8973 those three values. Since each value must be 0 or 1, we have
8974 eight possibilities, each of which corresponds to the constant 0
8975 or 1 or one of the six possible comparisons.
8977 This handles common cases like (a > b) == 0 but also handles
8978 expressions like ((x > y) - (y > x)) > 0, which supposedly
8979 occur in macroized code. */
8981 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8983 tree cval1 = 0, cval2 = 0;
8984 int save_p = 0;
8986 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8987 /* Don't handle degenerate cases here; they should already
8988 have been handled anyway. */
8989 && cval1 != 0 && cval2 != 0
8990 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8991 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8992 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8993 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8994 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8995 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8996 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8998 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8999 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9001 /* We can't just pass T to eval_subst in case cval1 or cval2
9002 was the same as ARG1. */
9004 tree high_result
9005 = fold_build2 (code, type,
9006 eval_subst (arg0, cval1, maxval,
9007 cval2, minval),
9008 arg1);
9009 tree equal_result
9010 = fold_build2 (code, type,
9011 eval_subst (arg0, cval1, maxval,
9012 cval2, maxval),
9013 arg1);
9014 tree low_result
9015 = fold_build2 (code, type,
9016 eval_subst (arg0, cval1, minval,
9017 cval2, maxval),
9018 arg1);
9020 /* All three of these results should be 0 or 1. Confirm they are.
9021 Then use those values to select the proper code to use. */
9023 if (TREE_CODE (high_result) == INTEGER_CST
9024 && TREE_CODE (equal_result) == INTEGER_CST
9025 && TREE_CODE (low_result) == INTEGER_CST)
9027 /* Make a 3-bit mask with the high-order bit being the
9028 value for `>', the next for '=', and the low for '<'. */
9029 switch ((integer_onep (high_result) * 4)
9030 + (integer_onep (equal_result) * 2)
9031 + integer_onep (low_result))
9033 case 0:
9034 /* Always false. */
9035 return omit_one_operand (type, integer_zero_node, arg0);
9036 case 1:
9037 code = LT_EXPR;
9038 break;
9039 case 2:
9040 code = EQ_EXPR;
9041 break;
9042 case 3:
9043 code = LE_EXPR;
9044 break;
9045 case 4:
9046 code = GT_EXPR;
9047 break;
9048 case 5:
9049 code = NE_EXPR;
9050 break;
9051 case 6:
9052 code = GE_EXPR;
9053 break;
9054 case 7:
9055 /* Always true. */
9056 return omit_one_operand (type, integer_one_node, arg0);
9059 if (save_p)
9060 return save_expr (build2 (code, type, cval1, cval2));
9061 return fold_build2 (code, type, cval1, cval2);
9066 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9067 into a single range test. */
9068 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9069 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9070 && TREE_CODE (arg1) == INTEGER_CST
9071 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9072 && !integer_zerop (TREE_OPERAND (arg0, 1))
9073 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9074 && !TREE_OVERFLOW (arg1))
9076 tem = fold_div_compare (code, type, arg0, arg1);
9077 if (tem != NULL_TREE)
9078 return tem;
9081 /* Fold ~X op ~Y as Y op X. */
9082 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9083 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9085 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9086 return fold_build2 (code, type,
9087 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9088 TREE_OPERAND (arg0, 0));
9091 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9092 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9093 && TREE_CODE (arg1) == INTEGER_CST)
9095 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9096 return fold_build2 (swap_tree_comparison (code), type,
9097 TREE_OPERAND (arg0, 0),
9098 fold_build1 (BIT_NOT_EXPR, cmp_type,
9099 fold_convert (cmp_type, arg1)));
9102 return NULL_TREE;
9106 /* Subroutine of fold_binary. Optimize complex multiplications of the
9107 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9108 argument EXPR represents the expression "z" of type TYPE. */
9110 static tree
9111 fold_mult_zconjz (tree type, tree expr)
9113 tree itype = TREE_TYPE (type);
9114 tree rpart, ipart, tem;
9116 if (TREE_CODE (expr) == COMPLEX_EXPR)
9118 rpart = TREE_OPERAND (expr, 0);
9119 ipart = TREE_OPERAND (expr, 1);
9121 else if (TREE_CODE (expr) == COMPLEX_CST)
9123 rpart = TREE_REALPART (expr);
9124 ipart = TREE_IMAGPART (expr);
9126 else
9128 expr = save_expr (expr);
9129 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9130 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9133 rpart = save_expr (rpart);
9134 ipart = save_expr (ipart);
9135 tem = fold_build2 (PLUS_EXPR, itype,
9136 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9137 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9138 return fold_build2 (COMPLEX_EXPR, type, tem,
9139 fold_convert (itype, integer_zero_node));
9143 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9144 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9145 guarantees that P and N have the same least significant log2(M) bits.
9146 N is not otherwise constrained. In particular, N is not normalized to
9147 0 <= N < M as is common. In general, the precise value of P is unknown.
9148 M is chosen as large as possible such that constant N can be determined.
9150 Returns M and sets *RESIDUE to N. */
9152 static unsigned HOST_WIDE_INT
9153 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9155 enum tree_code code;
9157 *residue = 0;
9159 code = TREE_CODE (expr);
9160 if (code == ADDR_EXPR)
9162 expr = TREE_OPERAND (expr, 0);
9163 if (handled_component_p (expr))
9165 HOST_WIDE_INT bitsize, bitpos;
9166 tree offset;
9167 enum machine_mode mode;
9168 int unsignedp, volatilep;
9170 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9171 &mode, &unsignedp, &volatilep, false);
9172 *residue = bitpos / BITS_PER_UNIT;
9173 if (offset)
9175 if (TREE_CODE (offset) == INTEGER_CST)
9176 *residue += TREE_INT_CST_LOW (offset);
9177 else
9178 /* We don't handle more complicated offset expressions. */
9179 return 1;
9183 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9184 return DECL_ALIGN_UNIT (expr);
9186 else if (code == POINTER_PLUS_EXPR)
9188 tree op0, op1;
9189 unsigned HOST_WIDE_INT modulus;
9190 enum tree_code inner_code;
9192 op0 = TREE_OPERAND (expr, 0);
9193 STRIP_NOPS (op0);
9194 modulus = get_pointer_modulus_and_residue (op0, residue);
9196 op1 = TREE_OPERAND (expr, 1);
9197 STRIP_NOPS (op1);
9198 inner_code = TREE_CODE (op1);
9199 if (inner_code == INTEGER_CST)
9201 *residue += TREE_INT_CST_LOW (op1);
9202 return modulus;
9204 else if (inner_code == MULT_EXPR)
9206 op1 = TREE_OPERAND (op1, 1);
9207 if (TREE_CODE (op1) == INTEGER_CST)
9209 unsigned HOST_WIDE_INT align;
9211 /* Compute the greatest power-of-2 divisor of op1. */
9212 align = TREE_INT_CST_LOW (op1);
9213 align &= -align;
9215 /* If align is non-zero and less than *modulus, replace
9216 *modulus with align., If align is 0, then either op1 is 0
9217 or the greatest power-of-2 divisor of op1 doesn't fit in an
9218 unsigned HOST_WIDE_INT. In either case, no additional
9219 constraint is imposed. */
9220 if (align)
9221 modulus = MIN (modulus, align);
9223 return modulus;
9228 /* If we get here, we were unable to determine anything useful about the
9229 expression. */
9230 return 1;
9234 /* Fold a binary expression of code CODE and type TYPE with operands
9235 OP0 and OP1. Return the folded expression if folding is
9236 successful. Otherwise, return NULL_TREE. */
9238 tree
9239 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9241 enum tree_code_class kind = TREE_CODE_CLASS (code);
9242 tree arg0, arg1, tem;
9243 tree t1 = NULL_TREE;
9244 bool strict_overflow_p;
9246 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9247 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9248 && TREE_CODE_LENGTH (code) == 2
9249 && op0 != NULL_TREE
9250 && op1 != NULL_TREE);
9252 arg0 = op0;
9253 arg1 = op1;
9255 /* Strip any conversions that don't change the mode. This is
9256 safe for every expression, except for a comparison expression
9257 because its signedness is derived from its operands. So, in
9258 the latter case, only strip conversions that don't change the
9259 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9260 preserved.
9262 Note that this is done as an internal manipulation within the
9263 constant folder, in order to find the simplest representation
9264 of the arguments so that their form can be studied. In any
9265 cases, the appropriate type conversions should be put back in
9266 the tree that will get out of the constant folder. */
9268 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9270 STRIP_SIGN_NOPS (arg0);
9271 STRIP_SIGN_NOPS (arg1);
9273 else
9275 STRIP_NOPS (arg0);
9276 STRIP_NOPS (arg1);
9279 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9280 constant but we can't do arithmetic on them. */
9281 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9282 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9283 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9284 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9285 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9286 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9288 if (kind == tcc_binary)
9290 /* Make sure type and arg0 have the same saturating flag. */
9291 gcc_assert (TYPE_SATURATING (type)
9292 == TYPE_SATURATING (TREE_TYPE (arg0)));
9293 tem = const_binop (code, arg0, arg1, 0);
9295 else if (kind == tcc_comparison)
9296 tem = fold_relational_const (code, type, arg0, arg1);
9297 else
9298 tem = NULL_TREE;
9300 if (tem != NULL_TREE)
9302 if (TREE_TYPE (tem) != type)
9303 tem = fold_convert (type, tem);
9304 return tem;
9308 /* If this is a commutative operation, and ARG0 is a constant, move it
9309 to ARG1 to reduce the number of tests below. */
9310 if (commutative_tree_code (code)
9311 && tree_swap_operands_p (arg0, arg1, true))
9312 return fold_build2 (code, type, op1, op0);
9314 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9316 First check for cases where an arithmetic operation is applied to a
9317 compound, conditional, or comparison operation. Push the arithmetic
9318 operation inside the compound or conditional to see if any folding
9319 can then be done. Convert comparison to conditional for this purpose.
9320 The also optimizes non-constant cases that used to be done in
9321 expand_expr.
9323 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9324 one of the operands is a comparison and the other is a comparison, a
9325 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9326 code below would make the expression more complex. Change it to a
9327 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9328 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9330 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9331 || code == EQ_EXPR || code == NE_EXPR)
9332 && ((truth_value_p (TREE_CODE (arg0))
9333 && (truth_value_p (TREE_CODE (arg1))
9334 || (TREE_CODE (arg1) == BIT_AND_EXPR
9335 && integer_onep (TREE_OPERAND (arg1, 1)))))
9336 || (truth_value_p (TREE_CODE (arg1))
9337 && (truth_value_p (TREE_CODE (arg0))
9338 || (TREE_CODE (arg0) == BIT_AND_EXPR
9339 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9341 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9342 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9343 : TRUTH_XOR_EXPR,
9344 boolean_type_node,
9345 fold_convert (boolean_type_node, arg0),
9346 fold_convert (boolean_type_node, arg1));
9348 if (code == EQ_EXPR)
9349 tem = invert_truthvalue (tem);
9351 return fold_convert (type, tem);
9354 if (TREE_CODE_CLASS (code) == tcc_binary
9355 || TREE_CODE_CLASS (code) == tcc_comparison)
9357 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9358 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9359 fold_build2 (code, type,
9360 fold_convert (TREE_TYPE (op0),
9361 TREE_OPERAND (arg0, 1)),
9362 op1));
9363 if (TREE_CODE (arg1) == COMPOUND_EXPR
9364 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9365 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9366 fold_build2 (code, type, op0,
9367 fold_convert (TREE_TYPE (op1),
9368 TREE_OPERAND (arg1, 1))));
9370 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9372 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9373 arg0, arg1,
9374 /*cond_first_p=*/1);
9375 if (tem != NULL_TREE)
9376 return tem;
9379 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9381 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9382 arg1, arg0,
9383 /*cond_first_p=*/0);
9384 if (tem != NULL_TREE)
9385 return tem;
9389 switch (code)
9391 case POINTER_PLUS_EXPR:
9392 /* 0 +p index -> (type)index */
9393 if (integer_zerop (arg0))
9394 return non_lvalue (fold_convert (type, arg1));
9396 /* PTR +p 0 -> PTR */
9397 if (integer_zerop (arg1))
9398 return non_lvalue (fold_convert (type, arg0));
9400 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9401 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9402 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9403 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9404 fold_convert (sizetype, arg1),
9405 fold_convert (sizetype, arg0)));
9407 /* index +p PTR -> PTR +p index */
9408 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9409 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9410 return fold_build2 (POINTER_PLUS_EXPR, type,
9411 fold_convert (type, arg1),
9412 fold_convert (sizetype, arg0));
9414 /* (PTR +p B) +p A -> PTR +p (B + A) */
9415 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9417 tree inner;
9418 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9419 tree arg00 = TREE_OPERAND (arg0, 0);
9420 inner = fold_build2 (PLUS_EXPR, sizetype,
9421 arg01, fold_convert (sizetype, arg1));
9422 return fold_convert (type,
9423 fold_build2 (POINTER_PLUS_EXPR,
9424 TREE_TYPE (arg00), arg00, inner));
9427 /* PTR_CST +p CST -> CST1 */
9428 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9429 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9431 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9432 of the array. Loop optimizer sometimes produce this type of
9433 expressions. */
9434 if (TREE_CODE (arg0) == ADDR_EXPR)
9436 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9437 if (tem)
9438 return fold_convert (type, tem);
9441 return NULL_TREE;
9443 case PLUS_EXPR:
9444 /* PTR + INT -> (INT)(PTR p+ INT) */
9445 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9446 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9447 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9448 TREE_TYPE (arg0),
9449 arg0,
9450 fold_convert (sizetype, arg1)));
9451 /* INT + PTR -> (INT)(PTR p+ INT) */
9452 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9453 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9454 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9455 TREE_TYPE (arg1),
9456 arg1,
9457 fold_convert (sizetype, arg0)));
9458 /* A + (-B) -> A - B */
9459 if (TREE_CODE (arg1) == NEGATE_EXPR)
9460 return fold_build2 (MINUS_EXPR, type,
9461 fold_convert (type, arg0),
9462 fold_convert (type, TREE_OPERAND (arg1, 0)));
9463 /* (-A) + B -> B - A */
9464 if (TREE_CODE (arg0) == NEGATE_EXPR
9465 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9466 return fold_build2 (MINUS_EXPR, type,
9467 fold_convert (type, arg1),
9468 fold_convert (type, TREE_OPERAND (arg0, 0)));
9470 if (INTEGRAL_TYPE_P (type))
9472 /* Convert ~A + 1 to -A. */
9473 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9474 && integer_onep (arg1))
9475 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9477 /* ~X + X is -1. */
9478 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9479 && !TYPE_OVERFLOW_TRAPS (type))
9481 tree tem = TREE_OPERAND (arg0, 0);
9483 STRIP_NOPS (tem);
9484 if (operand_equal_p (tem, arg1, 0))
9486 t1 = build_int_cst_type (type, -1);
9487 return omit_one_operand (type, t1, arg1);
9491 /* X + ~X is -1. */
9492 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9493 && !TYPE_OVERFLOW_TRAPS (type))
9495 tree tem = TREE_OPERAND (arg1, 0);
9497 STRIP_NOPS (tem);
9498 if (operand_equal_p (arg0, tem, 0))
9500 t1 = build_int_cst_type (type, -1);
9501 return omit_one_operand (type, t1, arg0);
9505 /* X + (X / CST) * -CST is X % CST. */
9506 if (TREE_CODE (arg1) == MULT_EXPR
9507 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9508 && operand_equal_p (arg0,
9509 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9511 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9512 tree cst1 = TREE_OPERAND (arg1, 1);
9513 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9514 if (sum && integer_zerop (sum))
9515 return fold_convert (type,
9516 fold_build2 (TRUNC_MOD_EXPR,
9517 TREE_TYPE (arg0), arg0, cst0));
9521 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9522 same or one. Make sure type is not saturating.
9523 fold_plusminus_mult_expr will re-associate. */
9524 if ((TREE_CODE (arg0) == MULT_EXPR
9525 || TREE_CODE (arg1) == MULT_EXPR)
9526 && !TYPE_SATURATING (type)
9527 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9529 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9530 if (tem)
9531 return tem;
9534 if (! FLOAT_TYPE_P (type))
9536 if (integer_zerop (arg1))
9537 return non_lvalue (fold_convert (type, arg0));
9539 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9540 with a constant, and the two constants have no bits in common,
9541 we should treat this as a BIT_IOR_EXPR since this may produce more
9542 simplifications. */
9543 if (TREE_CODE (arg0) == BIT_AND_EXPR
9544 && TREE_CODE (arg1) == BIT_AND_EXPR
9545 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9546 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9547 && integer_zerop (const_binop (BIT_AND_EXPR,
9548 TREE_OPERAND (arg0, 1),
9549 TREE_OPERAND (arg1, 1), 0)))
9551 code = BIT_IOR_EXPR;
9552 goto bit_ior;
9555 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9556 (plus (plus (mult) (mult)) (foo)) so that we can
9557 take advantage of the factoring cases below. */
9558 if (((TREE_CODE (arg0) == PLUS_EXPR
9559 || TREE_CODE (arg0) == MINUS_EXPR)
9560 && TREE_CODE (arg1) == MULT_EXPR)
9561 || ((TREE_CODE (arg1) == PLUS_EXPR
9562 || TREE_CODE (arg1) == MINUS_EXPR)
9563 && TREE_CODE (arg0) == MULT_EXPR))
9565 tree parg0, parg1, parg, marg;
9566 enum tree_code pcode;
9568 if (TREE_CODE (arg1) == MULT_EXPR)
9569 parg = arg0, marg = arg1;
9570 else
9571 parg = arg1, marg = arg0;
9572 pcode = TREE_CODE (parg);
9573 parg0 = TREE_OPERAND (parg, 0);
9574 parg1 = TREE_OPERAND (parg, 1);
9575 STRIP_NOPS (parg0);
9576 STRIP_NOPS (parg1);
9578 if (TREE_CODE (parg0) == MULT_EXPR
9579 && TREE_CODE (parg1) != MULT_EXPR)
9580 return fold_build2 (pcode, type,
9581 fold_build2 (PLUS_EXPR, type,
9582 fold_convert (type, parg0),
9583 fold_convert (type, marg)),
9584 fold_convert (type, parg1));
9585 if (TREE_CODE (parg0) != MULT_EXPR
9586 && TREE_CODE (parg1) == MULT_EXPR)
9587 return fold_build2 (PLUS_EXPR, type,
9588 fold_convert (type, parg0),
9589 fold_build2 (pcode, type,
9590 fold_convert (type, marg),
9591 fold_convert (type,
9592 parg1)));
9595 else
9597 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9598 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9599 return non_lvalue (fold_convert (type, arg0));
9601 /* Likewise if the operands are reversed. */
9602 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9603 return non_lvalue (fold_convert (type, arg1));
9605 /* Convert X + -C into X - C. */
9606 if (TREE_CODE (arg1) == REAL_CST
9607 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9609 tem = fold_negate_const (arg1, type);
9610 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9611 return fold_build2 (MINUS_EXPR, type,
9612 fold_convert (type, arg0),
9613 fold_convert (type, tem));
9616 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9617 to __complex__ ( x, y ). This is not the same for SNaNs or
9618 if signed zeros are involved. */
9619 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9621 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9623 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9624 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9625 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9626 bool arg0rz = false, arg0iz = false;
9627 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9628 || (arg0i && (arg0iz = real_zerop (arg0i))))
9630 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9631 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9632 if (arg0rz && arg1i && real_zerop (arg1i))
9634 tree rp = arg1r ? arg1r
9635 : build1 (REALPART_EXPR, rtype, arg1);
9636 tree ip = arg0i ? arg0i
9637 : build1 (IMAGPART_EXPR, rtype, arg0);
9638 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9640 else if (arg0iz && arg1r && real_zerop (arg1r))
9642 tree rp = arg0r ? arg0r
9643 : build1 (REALPART_EXPR, rtype, arg0);
9644 tree ip = arg1i ? arg1i
9645 : build1 (IMAGPART_EXPR, rtype, arg1);
9646 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9651 if (flag_unsafe_math_optimizations
9652 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9653 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9654 && (tem = distribute_real_division (code, type, arg0, arg1)))
9655 return tem;
9657 /* Convert x+x into x*2.0. */
9658 if (operand_equal_p (arg0, arg1, 0)
9659 && SCALAR_FLOAT_TYPE_P (type))
9660 return fold_build2 (MULT_EXPR, type, arg0,
9661 build_real (type, dconst2));
9663 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9664 We associate floats only if the user has specified
9665 -fassociative-math. */
9666 if (flag_associative_math
9667 && TREE_CODE (arg1) == PLUS_EXPR
9668 && TREE_CODE (arg0) != MULT_EXPR)
9670 tree tree10 = TREE_OPERAND (arg1, 0);
9671 tree tree11 = TREE_OPERAND (arg1, 1);
9672 if (TREE_CODE (tree11) == MULT_EXPR
9673 && TREE_CODE (tree10) == MULT_EXPR)
9675 tree tree0;
9676 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9677 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9680 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9681 We associate floats only if the user has specified
9682 -fassociative-math. */
9683 if (flag_associative_math
9684 && TREE_CODE (arg0) == PLUS_EXPR
9685 && TREE_CODE (arg1) != MULT_EXPR)
9687 tree tree00 = TREE_OPERAND (arg0, 0);
9688 tree tree01 = TREE_OPERAND (arg0, 1);
9689 if (TREE_CODE (tree01) == MULT_EXPR
9690 && TREE_CODE (tree00) == MULT_EXPR)
9692 tree tree0;
9693 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9694 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9699 bit_rotate:
9700 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9701 is a rotate of A by C1 bits. */
9702 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9703 is a rotate of A by B bits. */
9705 enum tree_code code0, code1;
9706 tree rtype;
9707 code0 = TREE_CODE (arg0);
9708 code1 = TREE_CODE (arg1);
9709 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9710 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9711 && operand_equal_p (TREE_OPERAND (arg0, 0),
9712 TREE_OPERAND (arg1, 0), 0)
9713 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9714 TYPE_UNSIGNED (rtype))
9715 /* Only create rotates in complete modes. Other cases are not
9716 expanded properly. */
9717 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9719 tree tree01, tree11;
9720 enum tree_code code01, code11;
9722 tree01 = TREE_OPERAND (arg0, 1);
9723 tree11 = TREE_OPERAND (arg1, 1);
9724 STRIP_NOPS (tree01);
9725 STRIP_NOPS (tree11);
9726 code01 = TREE_CODE (tree01);
9727 code11 = TREE_CODE (tree11);
9728 if (code01 == INTEGER_CST
9729 && code11 == INTEGER_CST
9730 && TREE_INT_CST_HIGH (tree01) == 0
9731 && TREE_INT_CST_HIGH (tree11) == 0
9732 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9733 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9734 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9735 code0 == LSHIFT_EXPR ? tree01 : tree11);
9736 else if (code11 == MINUS_EXPR)
9738 tree tree110, tree111;
9739 tree110 = TREE_OPERAND (tree11, 0);
9740 tree111 = TREE_OPERAND (tree11, 1);
9741 STRIP_NOPS (tree110);
9742 STRIP_NOPS (tree111);
9743 if (TREE_CODE (tree110) == INTEGER_CST
9744 && 0 == compare_tree_int (tree110,
9745 TYPE_PRECISION
9746 (TREE_TYPE (TREE_OPERAND
9747 (arg0, 0))))
9748 && operand_equal_p (tree01, tree111, 0))
9749 return build2 ((code0 == LSHIFT_EXPR
9750 ? LROTATE_EXPR
9751 : RROTATE_EXPR),
9752 type, TREE_OPERAND (arg0, 0), tree01);
9754 else if (code01 == MINUS_EXPR)
9756 tree tree010, tree011;
9757 tree010 = TREE_OPERAND (tree01, 0);
9758 tree011 = TREE_OPERAND (tree01, 1);
9759 STRIP_NOPS (tree010);
9760 STRIP_NOPS (tree011);
9761 if (TREE_CODE (tree010) == INTEGER_CST
9762 && 0 == compare_tree_int (tree010,
9763 TYPE_PRECISION
9764 (TREE_TYPE (TREE_OPERAND
9765 (arg0, 0))))
9766 && operand_equal_p (tree11, tree011, 0))
9767 return build2 ((code0 != LSHIFT_EXPR
9768 ? LROTATE_EXPR
9769 : RROTATE_EXPR),
9770 type, TREE_OPERAND (arg0, 0), tree11);
9775 associate:
9776 /* In most languages, can't associate operations on floats through
9777 parentheses. Rather than remember where the parentheses were, we
9778 don't associate floats at all, unless the user has specified
9779 -fassociative-math.
9780 And, we need to make sure type is not saturating. */
9782 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9783 && !TYPE_SATURATING (type))
9785 tree var0, con0, lit0, minus_lit0;
9786 tree var1, con1, lit1, minus_lit1;
9787 bool ok = true;
9789 /* Split both trees into variables, constants, and literals. Then
9790 associate each group together, the constants with literals,
9791 then the result with variables. This increases the chances of
9792 literals being recombined later and of generating relocatable
9793 expressions for the sum of a constant and literal. */
9794 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9795 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9796 code == MINUS_EXPR);
9798 /* With undefined overflow we can only associate constants
9799 with one variable. */
9800 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9801 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9802 && var0 && var1)
9804 tree tmp0 = var0;
9805 tree tmp1 = var1;
9807 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9808 tmp0 = TREE_OPERAND (tmp0, 0);
9809 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9810 tmp1 = TREE_OPERAND (tmp1, 0);
9811 /* The only case we can still associate with two variables
9812 is if they are the same, modulo negation. */
9813 if (!operand_equal_p (tmp0, tmp1, 0))
9814 ok = false;
9817 /* Only do something if we found more than two objects. Otherwise,
9818 nothing has changed and we risk infinite recursion. */
9819 if (ok
9820 && (2 < ((var0 != 0) + (var1 != 0)
9821 + (con0 != 0) + (con1 != 0)
9822 + (lit0 != 0) + (lit1 != 0)
9823 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9825 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9826 if (code == MINUS_EXPR)
9827 code = PLUS_EXPR;
9829 var0 = associate_trees (var0, var1, code, type);
9830 con0 = associate_trees (con0, con1, code, type);
9831 lit0 = associate_trees (lit0, lit1, code, type);
9832 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9834 /* Preserve the MINUS_EXPR if the negative part of the literal is
9835 greater than the positive part. Otherwise, the multiplicative
9836 folding code (i.e extract_muldiv) may be fooled in case
9837 unsigned constants are subtracted, like in the following
9838 example: ((X*2 + 4) - 8U)/2. */
9839 if (minus_lit0 && lit0)
9841 if (TREE_CODE (lit0) == INTEGER_CST
9842 && TREE_CODE (minus_lit0) == INTEGER_CST
9843 && tree_int_cst_lt (lit0, minus_lit0))
9845 minus_lit0 = associate_trees (minus_lit0, lit0,
9846 MINUS_EXPR, type);
9847 lit0 = 0;
9849 else
9851 lit0 = associate_trees (lit0, minus_lit0,
9852 MINUS_EXPR, type);
9853 minus_lit0 = 0;
9856 if (minus_lit0)
9858 if (con0 == 0)
9859 return fold_convert (type,
9860 associate_trees (var0, minus_lit0,
9861 MINUS_EXPR, type));
9862 else
9864 con0 = associate_trees (con0, minus_lit0,
9865 MINUS_EXPR, type);
9866 return fold_convert (type,
9867 associate_trees (var0, con0,
9868 PLUS_EXPR, type));
9872 con0 = associate_trees (con0, lit0, code, type);
9873 return fold_convert (type, associate_trees (var0, con0,
9874 code, type));
9878 return NULL_TREE;
9880 case MINUS_EXPR:
9881 /* Pointer simplifications for subtraction, simple reassociations. */
9882 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9884 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9885 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9886 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9888 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9889 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9890 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9891 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9892 return fold_build2 (PLUS_EXPR, type,
9893 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9894 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9896 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9897 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9899 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9900 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9901 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9902 if (tmp)
9903 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9906 /* A - (-B) -> A + B */
9907 if (TREE_CODE (arg1) == NEGATE_EXPR)
9908 return fold_build2 (PLUS_EXPR, type, op0,
9909 fold_convert (type, TREE_OPERAND (arg1, 0)));
9910 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9911 if (TREE_CODE (arg0) == NEGATE_EXPR
9912 && (FLOAT_TYPE_P (type)
9913 || INTEGRAL_TYPE_P (type))
9914 && negate_expr_p (arg1)
9915 && reorder_operands_p (arg0, arg1))
9916 return fold_build2 (MINUS_EXPR, type,
9917 fold_convert (type, negate_expr (arg1)),
9918 fold_convert (type, TREE_OPERAND (arg0, 0)));
9919 /* Convert -A - 1 to ~A. */
9920 if (INTEGRAL_TYPE_P (type)
9921 && TREE_CODE (arg0) == NEGATE_EXPR
9922 && integer_onep (arg1)
9923 && !TYPE_OVERFLOW_TRAPS (type))
9924 return fold_build1 (BIT_NOT_EXPR, type,
9925 fold_convert (type, TREE_OPERAND (arg0, 0)));
9927 /* Convert -1 - A to ~A. */
9928 if (INTEGRAL_TYPE_P (type)
9929 && integer_all_onesp (arg0))
9930 return fold_build1 (BIT_NOT_EXPR, type, op1);
9933 /* X - (X / CST) * CST is X % CST. */
9934 if (INTEGRAL_TYPE_P (type)
9935 && TREE_CODE (arg1) == MULT_EXPR
9936 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9937 && operand_equal_p (arg0,
9938 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9939 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9940 TREE_OPERAND (arg1, 1), 0))
9941 return fold_convert (type,
9942 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9943 arg0, TREE_OPERAND (arg1, 1)));
9945 if (! FLOAT_TYPE_P (type))
9947 if (integer_zerop (arg0))
9948 return negate_expr (fold_convert (type, arg1));
9949 if (integer_zerop (arg1))
9950 return non_lvalue (fold_convert (type, arg0));
9952 /* Fold A - (A & B) into ~B & A. */
9953 if (!TREE_SIDE_EFFECTS (arg0)
9954 && TREE_CODE (arg1) == BIT_AND_EXPR)
9956 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9958 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9959 return fold_build2 (BIT_AND_EXPR, type,
9960 fold_build1 (BIT_NOT_EXPR, type, arg10),
9961 fold_convert (type, arg0));
9963 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9965 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9966 return fold_build2 (BIT_AND_EXPR, type,
9967 fold_build1 (BIT_NOT_EXPR, type, arg11),
9968 fold_convert (type, arg0));
9972 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9973 any power of 2 minus 1. */
9974 if (TREE_CODE (arg0) == BIT_AND_EXPR
9975 && TREE_CODE (arg1) == BIT_AND_EXPR
9976 && operand_equal_p (TREE_OPERAND (arg0, 0),
9977 TREE_OPERAND (arg1, 0), 0))
9979 tree mask0 = TREE_OPERAND (arg0, 1);
9980 tree mask1 = TREE_OPERAND (arg1, 1);
9981 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9983 if (operand_equal_p (tem, mask1, 0))
9985 tem = fold_build2 (BIT_XOR_EXPR, type,
9986 TREE_OPERAND (arg0, 0), mask1);
9987 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9992 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9993 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9994 return non_lvalue (fold_convert (type, arg0));
9996 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9997 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9998 (-ARG1 + ARG0) reduces to -ARG1. */
9999 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10000 return negate_expr (fold_convert (type, arg1));
10002 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10003 __complex__ ( x, -y ). This is not the same for SNaNs or if
10004 signed zeros are involved. */
10005 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10006 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10007 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10009 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10010 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10011 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10012 bool arg0rz = false, arg0iz = false;
10013 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10014 || (arg0i && (arg0iz = real_zerop (arg0i))))
10016 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10017 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10018 if (arg0rz && arg1i && real_zerop (arg1i))
10020 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10021 arg1r ? arg1r
10022 : build1 (REALPART_EXPR, rtype, arg1));
10023 tree ip = arg0i ? arg0i
10024 : build1 (IMAGPART_EXPR, rtype, arg0);
10025 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10027 else if (arg0iz && arg1r && real_zerop (arg1r))
10029 tree rp = arg0r ? arg0r
10030 : build1 (REALPART_EXPR, rtype, arg0);
10031 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10032 arg1i ? arg1i
10033 : build1 (IMAGPART_EXPR, rtype, arg1));
10034 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10039 /* Fold &x - &x. This can happen from &x.foo - &x.
10040 This is unsafe for certain floats even in non-IEEE formats.
10041 In IEEE, it is unsafe because it does wrong for NaNs.
10042 Also note that operand_equal_p is always false if an operand
10043 is volatile. */
10045 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10046 && operand_equal_p (arg0, arg1, 0))
10047 return fold_convert (type, integer_zero_node);
10049 /* A - B -> A + (-B) if B is easily negatable. */
10050 if (negate_expr_p (arg1)
10051 && ((FLOAT_TYPE_P (type)
10052 /* Avoid this transformation if B is a positive REAL_CST. */
10053 && (TREE_CODE (arg1) != REAL_CST
10054 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10055 || INTEGRAL_TYPE_P (type)))
10056 return fold_build2 (PLUS_EXPR, type,
10057 fold_convert (type, arg0),
10058 fold_convert (type, negate_expr (arg1)));
10060 /* Try folding difference of addresses. */
10062 HOST_WIDE_INT diff;
10064 if ((TREE_CODE (arg0) == ADDR_EXPR
10065 || TREE_CODE (arg1) == ADDR_EXPR)
10066 && ptr_difference_const (arg0, arg1, &diff))
10067 return build_int_cst_type (type, diff);
10070 /* Fold &a[i] - &a[j] to i-j. */
10071 if (TREE_CODE (arg0) == ADDR_EXPR
10072 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10073 && TREE_CODE (arg1) == ADDR_EXPR
10074 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10076 tree aref0 = TREE_OPERAND (arg0, 0);
10077 tree aref1 = TREE_OPERAND (arg1, 0);
10078 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10079 TREE_OPERAND (aref1, 0), 0))
10081 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10082 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10083 tree esz = array_ref_element_size (aref0);
10084 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10085 return fold_build2 (MULT_EXPR, type, diff,
10086 fold_convert (type, esz));
10091 if (flag_unsafe_math_optimizations
10092 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10093 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10094 && (tem = distribute_real_division (code, type, arg0, arg1)))
10095 return tem;
10097 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10098 same or one. Make sure type is not saturating.
10099 fold_plusminus_mult_expr will re-associate. */
10100 if ((TREE_CODE (arg0) == MULT_EXPR
10101 || TREE_CODE (arg1) == MULT_EXPR)
10102 && !TYPE_SATURATING (type)
10103 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10105 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10106 if (tem)
10107 return tem;
10110 goto associate;
10112 case MULT_EXPR:
10113 /* (-A) * (-B) -> A * B */
10114 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10115 return fold_build2 (MULT_EXPR, type,
10116 fold_convert (type, TREE_OPERAND (arg0, 0)),
10117 fold_convert (type, negate_expr (arg1)));
10118 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10119 return fold_build2 (MULT_EXPR, type,
10120 fold_convert (type, negate_expr (arg0)),
10121 fold_convert (type, TREE_OPERAND (arg1, 0)));
10123 if (! FLOAT_TYPE_P (type))
10125 if (integer_zerop (arg1))
10126 return omit_one_operand (type, arg1, arg0);
10127 if (integer_onep (arg1))
10128 return non_lvalue (fold_convert (type, arg0));
10129 /* Transform x * -1 into -x. Make sure to do the negation
10130 on the original operand with conversions not stripped
10131 because we can only strip non-sign-changing conversions. */
10132 if (integer_all_onesp (arg1))
10133 return fold_convert (type, negate_expr (op0));
10134 /* Transform x * -C into -x * C if x is easily negatable. */
10135 if (TREE_CODE (arg1) == INTEGER_CST
10136 && tree_int_cst_sgn (arg1) == -1
10137 && negate_expr_p (arg0)
10138 && (tem = negate_expr (arg1)) != arg1
10139 && !TREE_OVERFLOW (tem))
10140 return fold_build2 (MULT_EXPR, type,
10141 fold_convert (type, negate_expr (arg0)), tem);
10143 /* (a * (1 << b)) is (a << b) */
10144 if (TREE_CODE (arg1) == LSHIFT_EXPR
10145 && integer_onep (TREE_OPERAND (arg1, 0)))
10146 return fold_build2 (LSHIFT_EXPR, type, op0,
10147 TREE_OPERAND (arg1, 1));
10148 if (TREE_CODE (arg0) == LSHIFT_EXPR
10149 && integer_onep (TREE_OPERAND (arg0, 0)))
10150 return fold_build2 (LSHIFT_EXPR, type, op1,
10151 TREE_OPERAND (arg0, 1));
10153 /* (A + A) * C -> A * 2 * C */
10154 if (TREE_CODE (arg0) == PLUS_EXPR
10155 && TREE_CODE (arg1) == INTEGER_CST
10156 && operand_equal_p (TREE_OPERAND (arg0, 0),
10157 TREE_OPERAND (arg0, 1), 0))
10158 return fold_build2 (MULT_EXPR, type,
10159 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10160 TREE_OPERAND (arg0, 1)),
10161 fold_build2 (MULT_EXPR, type,
10162 build_int_cst (type, 2) , arg1));
10164 strict_overflow_p = false;
10165 if (TREE_CODE (arg1) == INTEGER_CST
10166 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10167 &strict_overflow_p)))
10169 if (strict_overflow_p)
10170 fold_overflow_warning (("assuming signed overflow does not "
10171 "occur when simplifying "
10172 "multiplication"),
10173 WARN_STRICT_OVERFLOW_MISC);
10174 return fold_convert (type, tem);
10177 /* Optimize z * conj(z) for integer complex numbers. */
10178 if (TREE_CODE (arg0) == CONJ_EXPR
10179 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10180 return fold_mult_zconjz (type, arg1);
10181 if (TREE_CODE (arg1) == CONJ_EXPR
10182 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10183 return fold_mult_zconjz (type, arg0);
10185 else
10187 /* Maybe fold x * 0 to 0. The expressions aren't the same
10188 when x is NaN, since x * 0 is also NaN. Nor are they the
10189 same in modes with signed zeros, since multiplying a
10190 negative value by 0 gives -0, not +0. */
10191 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10192 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10193 && real_zerop (arg1))
10194 return omit_one_operand (type, arg1, arg0);
10195 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10196 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10197 && real_onep (arg1))
10198 return non_lvalue (fold_convert (type, arg0));
10200 /* Transform x * -1.0 into -x. */
10201 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10202 && real_minus_onep (arg1))
10203 return fold_convert (type, negate_expr (arg0));
10205 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10206 the result for floating point types due to rounding so it is applied
10207 only if -fassociative-math was specify. */
10208 if (flag_associative_math
10209 && TREE_CODE (arg0) == RDIV_EXPR
10210 && TREE_CODE (arg1) == REAL_CST
10211 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10213 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10214 arg1, 0);
10215 if (tem)
10216 return fold_build2 (RDIV_EXPR, type, tem,
10217 TREE_OPERAND (arg0, 1));
10220 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10221 if (operand_equal_p (arg0, arg1, 0))
10223 tree tem = fold_strip_sign_ops (arg0);
10224 if (tem != NULL_TREE)
10226 tem = fold_convert (type, tem);
10227 return fold_build2 (MULT_EXPR, type, tem, tem);
10231 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10232 This is not the same for NaNs or if signed zeros are
10233 involved. */
10234 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10235 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10236 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10237 && TREE_CODE (arg1) == COMPLEX_CST
10238 && real_zerop (TREE_REALPART (arg1)))
10240 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10241 if (real_onep (TREE_IMAGPART (arg1)))
10242 return fold_build2 (COMPLEX_EXPR, type,
10243 negate_expr (fold_build1 (IMAGPART_EXPR,
10244 rtype, arg0)),
10245 fold_build1 (REALPART_EXPR, rtype, arg0));
10246 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10247 return fold_build2 (COMPLEX_EXPR, type,
10248 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10249 negate_expr (fold_build1 (REALPART_EXPR,
10250 rtype, arg0)));
10253 /* Optimize z * conj(z) for floating point complex numbers.
10254 Guarded by flag_unsafe_math_optimizations as non-finite
10255 imaginary components don't produce scalar results. */
10256 if (flag_unsafe_math_optimizations
10257 && TREE_CODE (arg0) == CONJ_EXPR
10258 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10259 return fold_mult_zconjz (type, arg1);
10260 if (flag_unsafe_math_optimizations
10261 && TREE_CODE (arg1) == CONJ_EXPR
10262 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10263 return fold_mult_zconjz (type, arg0);
10265 if (flag_unsafe_math_optimizations)
10267 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10268 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10270 /* Optimizations of root(...)*root(...). */
10271 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10273 tree rootfn, arg;
10274 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10275 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10277 /* Optimize sqrt(x)*sqrt(x) as x. */
10278 if (BUILTIN_SQRT_P (fcode0)
10279 && operand_equal_p (arg00, arg10, 0)
10280 && ! HONOR_SNANS (TYPE_MODE (type)))
10281 return arg00;
10283 /* Optimize root(x)*root(y) as root(x*y). */
10284 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10285 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10286 return build_call_expr (rootfn, 1, arg);
10289 /* Optimize expN(x)*expN(y) as expN(x+y). */
10290 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10292 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10293 tree arg = fold_build2 (PLUS_EXPR, type,
10294 CALL_EXPR_ARG (arg0, 0),
10295 CALL_EXPR_ARG (arg1, 0));
10296 return build_call_expr (expfn, 1, arg);
10299 /* Optimizations of pow(...)*pow(...). */
10300 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10301 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10302 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10304 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10305 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10306 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10307 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10309 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10310 if (operand_equal_p (arg01, arg11, 0))
10312 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10313 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10314 return build_call_expr (powfn, 2, arg, arg01);
10317 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10318 if (operand_equal_p (arg00, arg10, 0))
10320 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10321 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10322 return build_call_expr (powfn, 2, arg00, arg);
10326 /* Optimize tan(x)*cos(x) as sin(x). */
10327 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10328 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10329 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10330 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10331 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10332 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10333 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10334 CALL_EXPR_ARG (arg1, 0), 0))
10336 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10338 if (sinfn != NULL_TREE)
10339 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10342 /* Optimize x*pow(x,c) as pow(x,c+1). */
10343 if (fcode1 == BUILT_IN_POW
10344 || fcode1 == BUILT_IN_POWF
10345 || fcode1 == BUILT_IN_POWL)
10347 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10348 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10349 if (TREE_CODE (arg11) == REAL_CST
10350 && !TREE_OVERFLOW (arg11)
10351 && operand_equal_p (arg0, arg10, 0))
10353 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10354 REAL_VALUE_TYPE c;
10355 tree arg;
10357 c = TREE_REAL_CST (arg11);
10358 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10359 arg = build_real (type, c);
10360 return build_call_expr (powfn, 2, arg0, arg);
10364 /* Optimize pow(x,c)*x as pow(x,c+1). */
10365 if (fcode0 == BUILT_IN_POW
10366 || fcode0 == BUILT_IN_POWF
10367 || fcode0 == BUILT_IN_POWL)
10369 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10370 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10371 if (TREE_CODE (arg01) == REAL_CST
10372 && !TREE_OVERFLOW (arg01)
10373 && operand_equal_p (arg1, arg00, 0))
10375 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10376 REAL_VALUE_TYPE c;
10377 tree arg;
10379 c = TREE_REAL_CST (arg01);
10380 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10381 arg = build_real (type, c);
10382 return build_call_expr (powfn, 2, arg1, arg);
10386 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10387 if (! optimize_size
10388 && operand_equal_p (arg0, arg1, 0))
10390 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10392 if (powfn)
10394 tree arg = build_real (type, dconst2);
10395 return build_call_expr (powfn, 2, arg0, arg);
10400 goto associate;
10402 case BIT_IOR_EXPR:
10403 bit_ior:
10404 if (integer_all_onesp (arg1))
10405 return omit_one_operand (type, arg1, arg0);
10406 if (integer_zerop (arg1))
10407 return non_lvalue (fold_convert (type, arg0));
10408 if (operand_equal_p (arg0, arg1, 0))
10409 return non_lvalue (fold_convert (type, arg0));
10411 /* ~X | X is -1. */
10412 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10413 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10415 t1 = fold_convert (type, integer_zero_node);
10416 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10417 return omit_one_operand (type, t1, arg1);
10420 /* X | ~X is -1. */
10421 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10424 t1 = fold_convert (type, integer_zero_node);
10425 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10426 return omit_one_operand (type, t1, arg0);
10429 /* Canonicalize (X & C1) | C2. */
10430 if (TREE_CODE (arg0) == BIT_AND_EXPR
10431 && TREE_CODE (arg1) == INTEGER_CST
10432 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10434 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10435 int width = TYPE_PRECISION (type), w;
10436 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10437 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10438 hi2 = TREE_INT_CST_HIGH (arg1);
10439 lo2 = TREE_INT_CST_LOW (arg1);
10441 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10442 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10443 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10445 if (width > HOST_BITS_PER_WIDE_INT)
10447 mhi = (unsigned HOST_WIDE_INT) -1
10448 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10449 mlo = -1;
10451 else
10453 mhi = 0;
10454 mlo = (unsigned HOST_WIDE_INT) -1
10455 >> (HOST_BITS_PER_WIDE_INT - width);
10458 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10459 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10460 return fold_build2 (BIT_IOR_EXPR, type,
10461 TREE_OPERAND (arg0, 0), arg1);
10463 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10464 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10465 mode which allows further optimizations. */
10466 hi1 &= mhi;
10467 lo1 &= mlo;
10468 hi2 &= mhi;
10469 lo2 &= mlo;
10470 hi3 = hi1 & ~hi2;
10471 lo3 = lo1 & ~lo2;
10472 for (w = BITS_PER_UNIT;
10473 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10474 w <<= 1)
10476 unsigned HOST_WIDE_INT mask
10477 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10478 if (((lo1 | lo2) & mask) == mask
10479 && (lo1 & ~mask) == 0 && hi1 == 0)
10481 hi3 = 0;
10482 lo3 = mask;
10483 break;
10486 if (hi3 != hi1 || lo3 != lo1)
10487 return fold_build2 (BIT_IOR_EXPR, type,
10488 fold_build2 (BIT_AND_EXPR, type,
10489 TREE_OPERAND (arg0, 0),
10490 build_int_cst_wide (type,
10491 lo3, hi3)),
10492 arg1);
10495 /* (X & Y) | Y is (X, Y). */
10496 if (TREE_CODE (arg0) == BIT_AND_EXPR
10497 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10498 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10499 /* (X & Y) | X is (Y, X). */
10500 if (TREE_CODE (arg0) == BIT_AND_EXPR
10501 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10502 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10503 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10504 /* X | (X & Y) is (Y, X). */
10505 if (TREE_CODE (arg1) == BIT_AND_EXPR
10506 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10507 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10508 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10509 /* X | (Y & X) is (Y, X). */
10510 if (TREE_CODE (arg1) == BIT_AND_EXPR
10511 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10512 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10513 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10515 t1 = distribute_bit_expr (code, type, arg0, arg1);
10516 if (t1 != NULL_TREE)
10517 return t1;
10519 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10521 This results in more efficient code for machines without a NAND
10522 instruction. Combine will canonicalize to the first form
10523 which will allow use of NAND instructions provided by the
10524 backend if they exist. */
10525 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10526 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10528 return fold_build1 (BIT_NOT_EXPR, type,
10529 build2 (BIT_AND_EXPR, type,
10530 fold_convert (type,
10531 TREE_OPERAND (arg0, 0)),
10532 fold_convert (type,
10533 TREE_OPERAND (arg1, 0))));
10536 /* See if this can be simplified into a rotate first. If that
10537 is unsuccessful continue in the association code. */
10538 goto bit_rotate;
10540 case BIT_XOR_EXPR:
10541 if (integer_zerop (arg1))
10542 return non_lvalue (fold_convert (type, arg0));
10543 if (integer_all_onesp (arg1))
10544 return fold_build1 (BIT_NOT_EXPR, type, op0);
10545 if (operand_equal_p (arg0, arg1, 0))
10546 return omit_one_operand (type, integer_zero_node, arg0);
10548 /* ~X ^ X is -1. */
10549 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10550 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10552 t1 = fold_convert (type, integer_zero_node);
10553 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10554 return omit_one_operand (type, t1, arg1);
10557 /* X ^ ~X is -1. */
10558 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10559 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10561 t1 = fold_convert (type, integer_zero_node);
10562 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10563 return omit_one_operand (type, t1, arg0);
10566 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10567 with a constant, and the two constants have no bits in common,
10568 we should treat this as a BIT_IOR_EXPR since this may produce more
10569 simplifications. */
10570 if (TREE_CODE (arg0) == BIT_AND_EXPR
10571 && TREE_CODE (arg1) == BIT_AND_EXPR
10572 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10573 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10574 && integer_zerop (const_binop (BIT_AND_EXPR,
10575 TREE_OPERAND (arg0, 1),
10576 TREE_OPERAND (arg1, 1), 0)))
10578 code = BIT_IOR_EXPR;
10579 goto bit_ior;
10582 /* (X | Y) ^ X -> Y & ~ X*/
10583 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10584 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10586 tree t2 = TREE_OPERAND (arg0, 1);
10587 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10588 arg1);
10589 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10590 fold_convert (type, t1));
10591 return t1;
10594 /* (Y | X) ^ X -> Y & ~ X*/
10595 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10596 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10598 tree t2 = TREE_OPERAND (arg0, 0);
10599 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10600 arg1);
10601 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10602 fold_convert (type, t1));
10603 return t1;
10606 /* X ^ (X | Y) -> Y & ~ X*/
10607 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10608 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10610 tree t2 = TREE_OPERAND (arg1, 1);
10611 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10612 arg0);
10613 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10614 fold_convert (type, t1));
10615 return t1;
10618 /* X ^ (Y | X) -> Y & ~ X*/
10619 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10620 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10622 tree t2 = TREE_OPERAND (arg1, 0);
10623 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10624 arg0);
10625 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10626 fold_convert (type, t1));
10627 return t1;
10630 /* Convert ~X ^ ~Y to X ^ Y. */
10631 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10632 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10633 return fold_build2 (code, type,
10634 fold_convert (type, TREE_OPERAND (arg0, 0)),
10635 fold_convert (type, TREE_OPERAND (arg1, 0)));
10637 /* Convert ~X ^ C to X ^ ~C. */
10638 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10639 && TREE_CODE (arg1) == INTEGER_CST)
10640 return fold_build2 (code, type,
10641 fold_convert (type, TREE_OPERAND (arg0, 0)),
10642 fold_build1 (BIT_NOT_EXPR, type, arg1));
10644 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10645 if (TREE_CODE (arg0) == BIT_AND_EXPR
10646 && integer_onep (TREE_OPERAND (arg0, 1))
10647 && integer_onep (arg1))
10648 return fold_build2 (EQ_EXPR, type, arg0,
10649 build_int_cst (TREE_TYPE (arg0), 0));
10651 /* Fold (X & Y) ^ Y as ~X & Y. */
10652 if (TREE_CODE (arg0) == BIT_AND_EXPR
10653 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10655 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10656 return fold_build2 (BIT_AND_EXPR, type,
10657 fold_build1 (BIT_NOT_EXPR, type, tem),
10658 fold_convert (type, arg1));
10660 /* Fold (X & Y) ^ X as ~Y & X. */
10661 if (TREE_CODE (arg0) == BIT_AND_EXPR
10662 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10663 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10665 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10666 return fold_build2 (BIT_AND_EXPR, type,
10667 fold_build1 (BIT_NOT_EXPR, type, tem),
10668 fold_convert (type, arg1));
10670 /* Fold X ^ (X & Y) as X & ~Y. */
10671 if (TREE_CODE (arg1) == BIT_AND_EXPR
10672 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10674 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10675 return fold_build2 (BIT_AND_EXPR, type,
10676 fold_convert (type, arg0),
10677 fold_build1 (BIT_NOT_EXPR, type, tem));
10679 /* Fold X ^ (Y & X) as ~Y & X. */
10680 if (TREE_CODE (arg1) == BIT_AND_EXPR
10681 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10682 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10684 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10685 return fold_build2 (BIT_AND_EXPR, type,
10686 fold_build1 (BIT_NOT_EXPR, type, tem),
10687 fold_convert (type, arg0));
10690 /* See if this can be simplified into a rotate first. If that
10691 is unsuccessful continue in the association code. */
10692 goto bit_rotate;
10694 case BIT_AND_EXPR:
10695 if (integer_all_onesp (arg1))
10696 return non_lvalue (fold_convert (type, arg0));
10697 if (integer_zerop (arg1))
10698 return omit_one_operand (type, arg1, arg0);
10699 if (operand_equal_p (arg0, arg1, 0))
10700 return non_lvalue (fold_convert (type, arg0));
10702 /* ~X & X is always zero. */
10703 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10704 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10705 return omit_one_operand (type, integer_zero_node, arg1);
10707 /* X & ~X is always zero. */
10708 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10709 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10710 return omit_one_operand (type, integer_zero_node, arg0);
10712 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10713 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10714 && TREE_CODE (arg1) == INTEGER_CST
10715 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10717 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10718 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10719 TREE_OPERAND (arg0, 0), tmp1);
10720 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10721 TREE_OPERAND (arg0, 1), tmp1);
10722 return fold_convert (type,
10723 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10724 tmp2, tmp3));
10727 /* (X | Y) & Y is (X, Y). */
10728 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10729 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10730 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10731 /* (X | Y) & X is (Y, X). */
10732 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10733 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10734 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10735 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10736 /* X & (X | Y) is (Y, X). */
10737 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10738 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10739 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10740 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10741 /* X & (Y | X) is (Y, X). */
10742 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10743 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10744 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10745 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10747 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10748 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10749 && integer_onep (TREE_OPERAND (arg0, 1))
10750 && integer_onep (arg1))
10752 tem = TREE_OPERAND (arg0, 0);
10753 return fold_build2 (EQ_EXPR, type,
10754 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10755 build_int_cst (TREE_TYPE (tem), 1)),
10756 build_int_cst (TREE_TYPE (tem), 0));
10758 /* Fold ~X & 1 as (X & 1) == 0. */
10759 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10760 && integer_onep (arg1))
10762 tem = TREE_OPERAND (arg0, 0);
10763 return fold_build2 (EQ_EXPR, type,
10764 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10765 build_int_cst (TREE_TYPE (tem), 1)),
10766 build_int_cst (TREE_TYPE (tem), 0));
10769 /* Fold (X ^ Y) & Y as ~X & Y. */
10770 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10771 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10773 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10774 return fold_build2 (BIT_AND_EXPR, type,
10775 fold_build1 (BIT_NOT_EXPR, type, tem),
10776 fold_convert (type, arg1));
10778 /* Fold (X ^ Y) & X as ~Y & X. */
10779 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10780 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10781 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10783 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10784 return fold_build2 (BIT_AND_EXPR, type,
10785 fold_build1 (BIT_NOT_EXPR, type, tem),
10786 fold_convert (type, arg1));
10788 /* Fold X & (X ^ Y) as X & ~Y. */
10789 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10790 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10792 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10793 return fold_build2 (BIT_AND_EXPR, type,
10794 fold_convert (type, arg0),
10795 fold_build1 (BIT_NOT_EXPR, type, tem));
10797 /* Fold X & (Y ^ X) as ~Y & X. */
10798 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10799 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10800 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10802 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10803 return fold_build2 (BIT_AND_EXPR, type,
10804 fold_build1 (BIT_NOT_EXPR, type, tem),
10805 fold_convert (type, arg0));
10808 t1 = distribute_bit_expr (code, type, arg0, arg1);
10809 if (t1 != NULL_TREE)
10810 return t1;
10811 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10812 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10813 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10815 unsigned int prec
10816 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10818 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10819 && (~TREE_INT_CST_LOW (arg1)
10820 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10821 return fold_convert (type, TREE_OPERAND (arg0, 0));
10824 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10826 This results in more efficient code for machines without a NOR
10827 instruction. Combine will canonicalize to the first form
10828 which will allow use of NOR instructions provided by the
10829 backend if they exist. */
10830 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10831 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10833 return fold_build1 (BIT_NOT_EXPR, type,
10834 build2 (BIT_IOR_EXPR, type,
10835 fold_convert (type,
10836 TREE_OPERAND (arg0, 0)),
10837 fold_convert (type,
10838 TREE_OPERAND (arg1, 0))));
10841 /* If arg0 is derived from the address of an object or function, we may
10842 be able to fold this expression using the object or function's
10843 alignment. */
10844 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10846 unsigned HOST_WIDE_INT modulus, residue;
10847 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10849 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10851 /* This works because modulus is a power of 2. If this weren't the
10852 case, we'd have to replace it by its greatest power-of-2
10853 divisor: modulus & -modulus. */
10854 if (low < modulus)
10855 return build_int_cst (type, residue & low);
10858 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10859 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10860 if the new mask might be further optimized. */
10861 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10862 || TREE_CODE (arg0) == RSHIFT_EXPR)
10863 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10864 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10865 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10866 < TYPE_PRECISION (TREE_TYPE (arg0))
10867 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10868 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10870 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10871 unsigned HOST_WIDE_INT mask
10872 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10873 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10874 tree shift_type = TREE_TYPE (arg0);
10876 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10877 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10878 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10879 && TYPE_PRECISION (TREE_TYPE (arg0))
10880 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10882 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10883 tree arg00 = TREE_OPERAND (arg0, 0);
10884 /* See if more bits can be proven as zero because of
10885 zero extension. */
10886 if (TREE_CODE (arg00) == NOP_EXPR
10887 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10889 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10890 if (TYPE_PRECISION (inner_type)
10891 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10892 && TYPE_PRECISION (inner_type) < prec)
10894 prec = TYPE_PRECISION (inner_type);
10895 /* See if we can shorten the right shift. */
10896 if (shiftc < prec)
10897 shift_type = inner_type;
10900 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10901 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10902 zerobits <<= prec - shiftc;
10903 /* For arithmetic shift if sign bit could be set, zerobits
10904 can contain actually sign bits, so no transformation is
10905 possible, unless MASK masks them all away. In that
10906 case the shift needs to be converted into logical shift. */
10907 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10908 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10910 if ((mask & zerobits) == 0)
10911 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10912 else
10913 zerobits = 0;
10917 /* ((X << 16) & 0xff00) is (X, 0). */
10918 if ((mask & zerobits) == mask)
10919 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10921 newmask = mask | zerobits;
10922 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10924 unsigned int prec;
10926 /* Only do the transformation if NEWMASK is some integer
10927 mode's mask. */
10928 for (prec = BITS_PER_UNIT;
10929 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10930 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10931 break;
10932 if (prec < HOST_BITS_PER_WIDE_INT
10933 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10935 if (shift_type != TREE_TYPE (arg0))
10937 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10938 fold_convert (shift_type,
10939 TREE_OPERAND (arg0, 0)),
10940 TREE_OPERAND (arg0, 1));
10941 tem = fold_convert (type, tem);
10943 else
10944 tem = op0;
10945 return fold_build2 (BIT_AND_EXPR, type, tem,
10946 build_int_cst_type (TREE_TYPE (op1),
10947 newmask));
10952 goto associate;
10954 case RDIV_EXPR:
10955 /* Don't touch a floating-point divide by zero unless the mode
10956 of the constant can represent infinity. */
10957 if (TREE_CODE (arg1) == REAL_CST
10958 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10959 && real_zerop (arg1))
10960 return NULL_TREE;
10962 /* Optimize A / A to 1.0 if we don't care about
10963 NaNs or Infinities. Skip the transformation
10964 for non-real operands. */
10965 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10966 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10967 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10968 && operand_equal_p (arg0, arg1, 0))
10970 tree r = build_real (TREE_TYPE (arg0), dconst1);
10972 return omit_two_operands (type, r, arg0, arg1);
10975 /* The complex version of the above A / A optimization. */
10976 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10977 && operand_equal_p (arg0, arg1, 0))
10979 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10980 if (! HONOR_NANS (TYPE_MODE (elem_type))
10981 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10983 tree r = build_real (elem_type, dconst1);
10984 /* omit_two_operands will call fold_convert for us. */
10985 return omit_two_operands (type, r, arg0, arg1);
10989 /* (-A) / (-B) -> A / B */
10990 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10991 return fold_build2 (RDIV_EXPR, type,
10992 TREE_OPERAND (arg0, 0),
10993 negate_expr (arg1));
10994 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10995 return fold_build2 (RDIV_EXPR, type,
10996 negate_expr (arg0),
10997 TREE_OPERAND (arg1, 0));
10999 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11000 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11001 && real_onep (arg1))
11002 return non_lvalue (fold_convert (type, arg0));
11004 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11005 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11006 && real_minus_onep (arg1))
11007 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11009 /* If ARG1 is a constant, we can convert this to a multiply by the
11010 reciprocal. This does not have the same rounding properties,
11011 so only do this if -freciprocal-math. We can actually
11012 always safely do it if ARG1 is a power of two, but it's hard to
11013 tell if it is or not in a portable manner. */
11014 if (TREE_CODE (arg1) == REAL_CST)
11016 if (flag_reciprocal_math
11017 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11018 arg1, 0)))
11019 return fold_build2 (MULT_EXPR, type, arg0, tem);
11020 /* Find the reciprocal if optimizing and the result is exact. */
11021 if (optimize)
11023 REAL_VALUE_TYPE r;
11024 r = TREE_REAL_CST (arg1);
11025 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11027 tem = build_real (type, r);
11028 return fold_build2 (MULT_EXPR, type,
11029 fold_convert (type, arg0), tem);
11033 /* Convert A/B/C to A/(B*C). */
11034 if (flag_reciprocal_math
11035 && TREE_CODE (arg0) == RDIV_EXPR)
11036 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11037 fold_build2 (MULT_EXPR, type,
11038 TREE_OPERAND (arg0, 1), arg1));
11040 /* Convert A/(B/C) to (A/B)*C. */
11041 if (flag_reciprocal_math
11042 && TREE_CODE (arg1) == RDIV_EXPR)
11043 return fold_build2 (MULT_EXPR, type,
11044 fold_build2 (RDIV_EXPR, type, arg0,
11045 TREE_OPERAND (arg1, 0)),
11046 TREE_OPERAND (arg1, 1));
11048 /* Convert C1/(X*C2) into (C1/C2)/X. */
11049 if (flag_reciprocal_math
11050 && TREE_CODE (arg1) == MULT_EXPR
11051 && TREE_CODE (arg0) == REAL_CST
11052 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11054 tree tem = const_binop (RDIV_EXPR, arg0,
11055 TREE_OPERAND (arg1, 1), 0);
11056 if (tem)
11057 return fold_build2 (RDIV_EXPR, type, tem,
11058 TREE_OPERAND (arg1, 0));
11061 if (flag_unsafe_math_optimizations)
11063 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11064 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11066 /* Optimize sin(x)/cos(x) as tan(x). */
11067 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11068 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11069 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11070 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11071 CALL_EXPR_ARG (arg1, 0), 0))
11073 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11075 if (tanfn != NULL_TREE)
11076 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11079 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11080 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11081 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11082 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11083 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11084 CALL_EXPR_ARG (arg1, 0), 0))
11086 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11088 if (tanfn != NULL_TREE)
11090 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11091 return fold_build2 (RDIV_EXPR, type,
11092 build_real (type, dconst1), tmp);
11096 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11097 NaNs or Infinities. */
11098 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11099 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11100 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11102 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11103 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11105 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11106 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11107 && operand_equal_p (arg00, arg01, 0))
11109 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11111 if (cosfn != NULL_TREE)
11112 return build_call_expr (cosfn, 1, arg00);
11116 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11117 NaNs or Infinities. */
11118 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11119 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11120 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11122 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11123 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11125 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11126 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11127 && operand_equal_p (arg00, arg01, 0))
11129 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11131 if (cosfn != NULL_TREE)
11133 tree tmp = build_call_expr (cosfn, 1, arg00);
11134 return fold_build2 (RDIV_EXPR, type,
11135 build_real (type, dconst1),
11136 tmp);
11141 /* Optimize pow(x,c)/x as pow(x,c-1). */
11142 if (fcode0 == BUILT_IN_POW
11143 || fcode0 == BUILT_IN_POWF
11144 || fcode0 == BUILT_IN_POWL)
11146 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11147 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11148 if (TREE_CODE (arg01) == REAL_CST
11149 && !TREE_OVERFLOW (arg01)
11150 && operand_equal_p (arg1, arg00, 0))
11152 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11153 REAL_VALUE_TYPE c;
11154 tree arg;
11156 c = TREE_REAL_CST (arg01);
11157 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11158 arg = build_real (type, c);
11159 return build_call_expr (powfn, 2, arg1, arg);
11163 /* Optimize a/root(b/c) into a*root(c/b). */
11164 if (BUILTIN_ROOT_P (fcode1))
11166 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11168 if (TREE_CODE (rootarg) == RDIV_EXPR)
11170 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11171 tree b = TREE_OPERAND (rootarg, 0);
11172 tree c = TREE_OPERAND (rootarg, 1);
11174 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11176 tmp = build_call_expr (rootfn, 1, tmp);
11177 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11181 /* Optimize x/expN(y) into x*expN(-y). */
11182 if (BUILTIN_EXPONENT_P (fcode1))
11184 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11185 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11186 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11187 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11190 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11191 if (fcode1 == BUILT_IN_POW
11192 || fcode1 == BUILT_IN_POWF
11193 || fcode1 == BUILT_IN_POWL)
11195 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11196 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11197 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11198 tree neg11 = fold_convert (type, negate_expr (arg11));
11199 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11200 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11203 return NULL_TREE;
11205 case TRUNC_DIV_EXPR:
11206 case FLOOR_DIV_EXPR:
11207 /* Simplify A / (B << N) where A and B are positive and B is
11208 a power of 2, to A >> (N + log2(B)). */
11209 strict_overflow_p = false;
11210 if (TREE_CODE (arg1) == LSHIFT_EXPR
11211 && (TYPE_UNSIGNED (type)
11212 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11214 tree sval = TREE_OPERAND (arg1, 0);
11215 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11217 tree sh_cnt = TREE_OPERAND (arg1, 1);
11218 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11220 if (strict_overflow_p)
11221 fold_overflow_warning (("assuming signed overflow does not "
11222 "occur when simplifying A / (B << N)"),
11223 WARN_STRICT_OVERFLOW_MISC);
11225 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11226 sh_cnt, build_int_cst (NULL_TREE, pow2));
11227 return fold_build2 (RSHIFT_EXPR, type,
11228 fold_convert (type, arg0), sh_cnt);
11232 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11233 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11234 if (INTEGRAL_TYPE_P (type)
11235 && TYPE_UNSIGNED (type)
11236 && code == FLOOR_DIV_EXPR)
11237 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11239 /* Fall thru */
11241 case ROUND_DIV_EXPR:
11242 case CEIL_DIV_EXPR:
11243 case EXACT_DIV_EXPR:
11244 if (integer_onep (arg1))
11245 return non_lvalue (fold_convert (type, arg0));
11246 if (integer_zerop (arg1))
11247 return NULL_TREE;
11248 /* X / -1 is -X. */
11249 if (!TYPE_UNSIGNED (type)
11250 && TREE_CODE (arg1) == INTEGER_CST
11251 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11252 && TREE_INT_CST_HIGH (arg1) == -1)
11253 return fold_convert (type, negate_expr (arg0));
11255 /* Convert -A / -B to A / B when the type is signed and overflow is
11256 undefined. */
11257 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11258 && TREE_CODE (arg0) == NEGATE_EXPR
11259 && negate_expr_p (arg1))
11261 if (INTEGRAL_TYPE_P (type))
11262 fold_overflow_warning (("assuming signed overflow does not occur "
11263 "when distributing negation across "
11264 "division"),
11265 WARN_STRICT_OVERFLOW_MISC);
11266 return fold_build2 (code, type,
11267 fold_convert (type, TREE_OPERAND (arg0, 0)),
11268 negate_expr (arg1));
11270 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11271 && TREE_CODE (arg1) == NEGATE_EXPR
11272 && negate_expr_p (arg0))
11274 if (INTEGRAL_TYPE_P (type))
11275 fold_overflow_warning (("assuming signed overflow does not occur "
11276 "when distributing negation across "
11277 "division"),
11278 WARN_STRICT_OVERFLOW_MISC);
11279 return fold_build2 (code, type, negate_expr (arg0),
11280 TREE_OPERAND (arg1, 0));
11283 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11284 operation, EXACT_DIV_EXPR.
11286 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11287 At one time others generated faster code, it's not clear if they do
11288 after the last round to changes to the DIV code in expmed.c. */
11289 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11290 && multiple_of_p (type, arg0, arg1))
11291 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11293 strict_overflow_p = false;
11294 if (TREE_CODE (arg1) == INTEGER_CST
11295 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11296 &strict_overflow_p)))
11298 if (strict_overflow_p)
11299 fold_overflow_warning (("assuming signed overflow does not occur "
11300 "when simplifying division"),
11301 WARN_STRICT_OVERFLOW_MISC);
11302 return fold_convert (type, tem);
11305 return NULL_TREE;
11307 case CEIL_MOD_EXPR:
11308 case FLOOR_MOD_EXPR:
11309 case ROUND_MOD_EXPR:
11310 case TRUNC_MOD_EXPR:
11311 /* X % 1 is always zero, but be sure to preserve any side
11312 effects in X. */
11313 if (integer_onep (arg1))
11314 return omit_one_operand (type, integer_zero_node, arg0);
11316 /* X % 0, return X % 0 unchanged so that we can get the
11317 proper warnings and errors. */
11318 if (integer_zerop (arg1))
11319 return NULL_TREE;
11321 /* 0 % X is always zero, but be sure to preserve any side
11322 effects in X. Place this after checking for X == 0. */
11323 if (integer_zerop (arg0))
11324 return omit_one_operand (type, integer_zero_node, arg1);
11326 /* X % -1 is zero. */
11327 if (!TYPE_UNSIGNED (type)
11328 && TREE_CODE (arg1) == INTEGER_CST
11329 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11330 && TREE_INT_CST_HIGH (arg1) == -1)
11331 return omit_one_operand (type, integer_zero_node, arg0);
11333 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11334 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11335 strict_overflow_p = false;
11336 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11337 && (TYPE_UNSIGNED (type)
11338 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11340 tree c = arg1;
11341 /* Also optimize A % (C << N) where C is a power of 2,
11342 to A & ((C << N) - 1). */
11343 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11344 c = TREE_OPERAND (arg1, 0);
11346 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11348 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11349 build_int_cst (TREE_TYPE (arg1), 1));
11350 if (strict_overflow_p)
11351 fold_overflow_warning (("assuming signed overflow does not "
11352 "occur when simplifying "
11353 "X % (power of two)"),
11354 WARN_STRICT_OVERFLOW_MISC);
11355 return fold_build2 (BIT_AND_EXPR, type,
11356 fold_convert (type, arg0),
11357 fold_convert (type, mask));
11361 /* X % -C is the same as X % C. */
11362 if (code == TRUNC_MOD_EXPR
11363 && !TYPE_UNSIGNED (type)
11364 && TREE_CODE (arg1) == INTEGER_CST
11365 && !TREE_OVERFLOW (arg1)
11366 && TREE_INT_CST_HIGH (arg1) < 0
11367 && !TYPE_OVERFLOW_TRAPS (type)
11368 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11369 && !sign_bit_p (arg1, arg1))
11370 return fold_build2 (code, type, fold_convert (type, arg0),
11371 fold_convert (type, negate_expr (arg1)));
11373 /* X % -Y is the same as X % Y. */
11374 if (code == TRUNC_MOD_EXPR
11375 && !TYPE_UNSIGNED (type)
11376 && TREE_CODE (arg1) == NEGATE_EXPR
11377 && !TYPE_OVERFLOW_TRAPS (type))
11378 return fold_build2 (code, type, fold_convert (type, arg0),
11379 fold_convert (type, TREE_OPERAND (arg1, 0)));
11381 if (TREE_CODE (arg1) == INTEGER_CST
11382 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11383 &strict_overflow_p)))
11385 if (strict_overflow_p)
11386 fold_overflow_warning (("assuming signed overflow does not occur "
11387 "when simplifying modulos"),
11388 WARN_STRICT_OVERFLOW_MISC);
11389 return fold_convert (type, tem);
11392 return NULL_TREE;
11394 case LROTATE_EXPR:
11395 case RROTATE_EXPR:
11396 if (integer_all_onesp (arg0))
11397 return omit_one_operand (type, arg0, arg1);
11398 goto shift;
11400 case RSHIFT_EXPR:
11401 /* Optimize -1 >> x for arithmetic right shifts. */
11402 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11403 return omit_one_operand (type, arg0, arg1);
11404 /* ... fall through ... */
11406 case LSHIFT_EXPR:
11407 shift:
11408 if (integer_zerop (arg1))
11409 return non_lvalue (fold_convert (type, arg0));
11410 if (integer_zerop (arg0))
11411 return omit_one_operand (type, arg0, arg1);
11413 /* Since negative shift count is not well-defined,
11414 don't try to compute it in the compiler. */
11415 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11416 return NULL_TREE;
11418 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11419 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11420 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11421 && host_integerp (TREE_OPERAND (arg0, 1), false)
11422 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11424 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11425 + TREE_INT_CST_LOW (arg1));
11427 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11428 being well defined. */
11429 if (low >= TYPE_PRECISION (type))
11431 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11432 low = low % TYPE_PRECISION (type);
11433 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11434 return build_int_cst (type, 0);
11435 else
11436 low = TYPE_PRECISION (type) - 1;
11439 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11440 build_int_cst (type, low));
11443 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11444 into x & ((unsigned)-1 >> c) for unsigned types. */
11445 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11446 || (TYPE_UNSIGNED (type)
11447 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11448 && host_integerp (arg1, false)
11449 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11450 && host_integerp (TREE_OPERAND (arg0, 1), false)
11451 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11453 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11454 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11455 tree lshift;
11456 tree arg00;
11458 if (low0 == low1)
11460 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11462 lshift = build_int_cst (type, -1);
11463 lshift = int_const_binop (code, lshift, arg1, 0);
11465 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11469 /* Rewrite an LROTATE_EXPR by a constant into an
11470 RROTATE_EXPR by a new constant. */
11471 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11473 tree tem = build_int_cst (TREE_TYPE (arg1),
11474 TYPE_PRECISION (type));
11475 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11476 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11479 /* If we have a rotate of a bit operation with the rotate count and
11480 the second operand of the bit operation both constant,
11481 permute the two operations. */
11482 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11483 && (TREE_CODE (arg0) == BIT_AND_EXPR
11484 || TREE_CODE (arg0) == BIT_IOR_EXPR
11485 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11486 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11487 return fold_build2 (TREE_CODE (arg0), type,
11488 fold_build2 (code, type,
11489 TREE_OPERAND (arg0, 0), arg1),
11490 fold_build2 (code, type,
11491 TREE_OPERAND (arg0, 1), arg1));
11493 /* Two consecutive rotates adding up to the precision of the
11494 type can be ignored. */
11495 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11496 && TREE_CODE (arg0) == RROTATE_EXPR
11497 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11498 && TREE_INT_CST_HIGH (arg1) == 0
11499 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11500 && ((TREE_INT_CST_LOW (arg1)
11501 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11502 == (unsigned int) TYPE_PRECISION (type)))
11503 return TREE_OPERAND (arg0, 0);
11505 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11506 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11507 if the latter can be further optimized. */
11508 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11509 && TREE_CODE (arg0) == BIT_AND_EXPR
11510 && TREE_CODE (arg1) == INTEGER_CST
11511 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11513 tree mask = fold_build2 (code, type,
11514 fold_convert (type, TREE_OPERAND (arg0, 1)),
11515 arg1);
11516 tree shift = fold_build2 (code, type,
11517 fold_convert (type, TREE_OPERAND (arg0, 0)),
11518 arg1);
11519 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11520 if (tem)
11521 return tem;
11524 return NULL_TREE;
11526 case MIN_EXPR:
11527 if (operand_equal_p (arg0, arg1, 0))
11528 return omit_one_operand (type, arg0, arg1);
11529 if (INTEGRAL_TYPE_P (type)
11530 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11531 return omit_one_operand (type, arg1, arg0);
11532 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11533 if (tem)
11534 return tem;
11535 goto associate;
11537 case MAX_EXPR:
11538 if (operand_equal_p (arg0, arg1, 0))
11539 return omit_one_operand (type, arg0, arg1);
11540 if (INTEGRAL_TYPE_P (type)
11541 && TYPE_MAX_VALUE (type)
11542 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11543 return omit_one_operand (type, arg1, arg0);
11544 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11545 if (tem)
11546 return tem;
11547 goto associate;
11549 case TRUTH_ANDIF_EXPR:
11550 /* Note that the operands of this must be ints
11551 and their values must be 0 or 1.
11552 ("true" is a fixed value perhaps depending on the language.) */
11553 /* If first arg is constant zero, return it. */
11554 if (integer_zerop (arg0))
11555 return fold_convert (type, arg0);
11556 case TRUTH_AND_EXPR:
11557 /* If either arg is constant true, drop it. */
11558 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11559 return non_lvalue (fold_convert (type, arg1));
11560 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11561 /* Preserve sequence points. */
11562 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11563 return non_lvalue (fold_convert (type, arg0));
11564 /* If second arg is constant zero, result is zero, but first arg
11565 must be evaluated. */
11566 if (integer_zerop (arg1))
11567 return omit_one_operand (type, arg1, arg0);
11568 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11569 case will be handled here. */
11570 if (integer_zerop (arg0))
11571 return omit_one_operand (type, arg0, arg1);
11573 /* !X && X is always false. */
11574 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11575 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11576 return omit_one_operand (type, integer_zero_node, arg1);
11577 /* X && !X is always false. */
11578 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11579 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11580 return omit_one_operand (type, integer_zero_node, arg0);
11582 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11583 means A >= Y && A != MAX, but in this case we know that
11584 A < X <= MAX. */
11586 if (!TREE_SIDE_EFFECTS (arg0)
11587 && !TREE_SIDE_EFFECTS (arg1))
11589 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11590 if (tem && !operand_equal_p (tem, arg0, 0))
11591 return fold_build2 (code, type, tem, arg1);
11593 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11594 if (tem && !operand_equal_p (tem, arg1, 0))
11595 return fold_build2 (code, type, arg0, tem);
11598 truth_andor:
11599 /* We only do these simplifications if we are optimizing. */
11600 if (!optimize)
11601 return NULL_TREE;
11603 /* Check for things like (A || B) && (A || C). We can convert this
11604 to A || (B && C). Note that either operator can be any of the four
11605 truth and/or operations and the transformation will still be
11606 valid. Also note that we only care about order for the
11607 ANDIF and ORIF operators. If B contains side effects, this
11608 might change the truth-value of A. */
11609 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11610 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11611 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11612 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11613 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11614 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11616 tree a00 = TREE_OPERAND (arg0, 0);
11617 tree a01 = TREE_OPERAND (arg0, 1);
11618 tree a10 = TREE_OPERAND (arg1, 0);
11619 tree a11 = TREE_OPERAND (arg1, 1);
11620 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11621 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11622 && (code == TRUTH_AND_EXPR
11623 || code == TRUTH_OR_EXPR));
11625 if (operand_equal_p (a00, a10, 0))
11626 return fold_build2 (TREE_CODE (arg0), type, a00,
11627 fold_build2 (code, type, a01, a11));
11628 else if (commutative && operand_equal_p (a00, a11, 0))
11629 return fold_build2 (TREE_CODE (arg0), type, a00,
11630 fold_build2 (code, type, a01, a10));
11631 else if (commutative && operand_equal_p (a01, a10, 0))
11632 return fold_build2 (TREE_CODE (arg0), type, a01,
11633 fold_build2 (code, type, a00, a11));
11635 /* This case if tricky because we must either have commutative
11636 operators or else A10 must not have side-effects. */
11638 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11639 && operand_equal_p (a01, a11, 0))
11640 return fold_build2 (TREE_CODE (arg0), type,
11641 fold_build2 (code, type, a00, a10),
11642 a01);
11645 /* See if we can build a range comparison. */
11646 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11647 return tem;
11649 /* Check for the possibility of merging component references. If our
11650 lhs is another similar operation, try to merge its rhs with our
11651 rhs. Then try to merge our lhs and rhs. */
11652 if (TREE_CODE (arg0) == code
11653 && 0 != (tem = fold_truthop (code, type,
11654 TREE_OPERAND (arg0, 1), arg1)))
11655 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11657 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11658 return tem;
11660 return NULL_TREE;
11662 case TRUTH_ORIF_EXPR:
11663 /* Note that the operands of this must be ints
11664 and their values must be 0 or true.
11665 ("true" is a fixed value perhaps depending on the language.) */
11666 /* If first arg is constant true, return it. */
11667 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11668 return fold_convert (type, arg0);
11669 case TRUTH_OR_EXPR:
11670 /* If either arg is constant zero, drop it. */
11671 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11672 return non_lvalue (fold_convert (type, arg1));
11673 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11674 /* Preserve sequence points. */
11675 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11676 return non_lvalue (fold_convert (type, arg0));
11677 /* If second arg is constant true, result is true, but we must
11678 evaluate first arg. */
11679 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11680 return omit_one_operand (type, arg1, arg0);
11681 /* Likewise for first arg, but note this only occurs here for
11682 TRUTH_OR_EXPR. */
11683 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11684 return omit_one_operand (type, arg0, arg1);
11686 /* !X || X is always true. */
11687 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11688 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11689 return omit_one_operand (type, integer_one_node, arg1);
11690 /* X || !X is always true. */
11691 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11692 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11693 return omit_one_operand (type, integer_one_node, arg0);
11695 goto truth_andor;
11697 case TRUTH_XOR_EXPR:
11698 /* If the second arg is constant zero, drop it. */
11699 if (integer_zerop (arg1))
11700 return non_lvalue (fold_convert (type, arg0));
11701 /* If the second arg is constant true, this is a logical inversion. */
11702 if (integer_onep (arg1))
11704 /* Only call invert_truthvalue if operand is a truth value. */
11705 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11706 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11707 else
11708 tem = invert_truthvalue (arg0);
11709 return non_lvalue (fold_convert (type, tem));
11711 /* Identical arguments cancel to zero. */
11712 if (operand_equal_p (arg0, arg1, 0))
11713 return omit_one_operand (type, integer_zero_node, arg0);
11715 /* !X ^ X is always true. */
11716 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11717 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11718 return omit_one_operand (type, integer_one_node, arg1);
11720 /* X ^ !X is always true. */
11721 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11722 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11723 return omit_one_operand (type, integer_one_node, arg0);
11725 return NULL_TREE;
11727 case EQ_EXPR:
11728 case NE_EXPR:
11729 tem = fold_comparison (code, type, op0, op1);
11730 if (tem != NULL_TREE)
11731 return tem;
11733 /* bool_var != 0 becomes bool_var. */
11734 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11735 && code == NE_EXPR)
11736 return non_lvalue (fold_convert (type, arg0));
11738 /* bool_var == 1 becomes bool_var. */
11739 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11740 && code == EQ_EXPR)
11741 return non_lvalue (fold_convert (type, arg0));
11743 /* bool_var != 1 becomes !bool_var. */
11744 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11745 && code == NE_EXPR)
11746 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11748 /* bool_var == 0 becomes !bool_var. */
11749 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11750 && code == EQ_EXPR)
11751 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11753 /* If this is an equality comparison of the address of two non-weak,
11754 unaliased symbols neither of which are extern (since we do not
11755 have access to attributes for externs), then we know the result. */
11756 if (TREE_CODE (arg0) == ADDR_EXPR
11757 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11758 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11759 && ! lookup_attribute ("alias",
11760 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11761 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11762 && TREE_CODE (arg1) == ADDR_EXPR
11763 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11764 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11765 && ! lookup_attribute ("alias",
11766 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11767 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11769 /* We know that we're looking at the address of two
11770 non-weak, unaliased, static _DECL nodes.
11772 It is both wasteful and incorrect to call operand_equal_p
11773 to compare the two ADDR_EXPR nodes. It is wasteful in that
11774 all we need to do is test pointer equality for the arguments
11775 to the two ADDR_EXPR nodes. It is incorrect to use
11776 operand_equal_p as that function is NOT equivalent to a
11777 C equality test. It can in fact return false for two
11778 objects which would test as equal using the C equality
11779 operator. */
11780 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11781 return constant_boolean_node (equal
11782 ? code == EQ_EXPR : code != EQ_EXPR,
11783 type);
11786 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11787 a MINUS_EXPR of a constant, we can convert it into a comparison with
11788 a revised constant as long as no overflow occurs. */
11789 if (TREE_CODE (arg1) == INTEGER_CST
11790 && (TREE_CODE (arg0) == PLUS_EXPR
11791 || TREE_CODE (arg0) == MINUS_EXPR)
11792 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11793 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11794 ? MINUS_EXPR : PLUS_EXPR,
11795 fold_convert (TREE_TYPE (arg0), arg1),
11796 TREE_OPERAND (arg0, 1), 0))
11797 && !TREE_OVERFLOW (tem))
11798 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11800 /* Similarly for a NEGATE_EXPR. */
11801 if (TREE_CODE (arg0) == NEGATE_EXPR
11802 && TREE_CODE (arg1) == INTEGER_CST
11803 && 0 != (tem = negate_expr (arg1))
11804 && TREE_CODE (tem) == INTEGER_CST
11805 && !TREE_OVERFLOW (tem))
11806 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11808 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11809 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11810 && TREE_CODE (arg1) == INTEGER_CST
11811 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11812 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11813 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11814 fold_convert (TREE_TYPE (arg0), arg1),
11815 TREE_OPERAND (arg0, 1)));
11817 /* Transform comparisons of the form X +- C CMP X. */
11818 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11819 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11820 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11821 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11822 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11824 tree cst = TREE_OPERAND (arg0, 1);
11826 if (code == EQ_EXPR
11827 && !integer_zerop (cst))
11828 return omit_two_operands (type, boolean_false_node,
11829 TREE_OPERAND (arg0, 0), arg1);
11830 else
11831 return omit_two_operands (type, boolean_true_node,
11832 TREE_OPERAND (arg0, 0), arg1);
11835 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11836 for !=. Don't do this for ordered comparisons due to overflow. */
11837 if (TREE_CODE (arg0) == MINUS_EXPR
11838 && integer_zerop (arg1))
11839 return fold_build2 (code, type,
11840 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11842 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11843 if (TREE_CODE (arg0) == ABS_EXPR
11844 && (integer_zerop (arg1) || real_zerop (arg1)))
11845 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11847 /* If this is an EQ or NE comparison with zero and ARG0 is
11848 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11849 two operations, but the latter can be done in one less insn
11850 on machines that have only two-operand insns or on which a
11851 constant cannot be the first operand. */
11852 if (TREE_CODE (arg0) == BIT_AND_EXPR
11853 && integer_zerop (arg1))
11855 tree arg00 = TREE_OPERAND (arg0, 0);
11856 tree arg01 = TREE_OPERAND (arg0, 1);
11857 if (TREE_CODE (arg00) == LSHIFT_EXPR
11858 && integer_onep (TREE_OPERAND (arg00, 0)))
11860 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11861 arg01, TREE_OPERAND (arg00, 1));
11862 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11863 build_int_cst (TREE_TYPE (arg0), 1));
11864 return fold_build2 (code, type,
11865 fold_convert (TREE_TYPE (arg1), tem), arg1);
11867 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11868 && integer_onep (TREE_OPERAND (arg01, 0)))
11870 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11871 arg00, TREE_OPERAND (arg01, 1));
11872 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11873 build_int_cst (TREE_TYPE (arg0), 1));
11874 return fold_build2 (code, type,
11875 fold_convert (TREE_TYPE (arg1), tem), arg1);
11879 /* If this is an NE or EQ comparison of zero against the result of a
11880 signed MOD operation whose second operand is a power of 2, make
11881 the MOD operation unsigned since it is simpler and equivalent. */
11882 if (integer_zerop (arg1)
11883 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11884 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11885 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11886 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11887 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11888 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11890 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11891 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11892 fold_convert (newtype,
11893 TREE_OPERAND (arg0, 0)),
11894 fold_convert (newtype,
11895 TREE_OPERAND (arg0, 1)));
11897 return fold_build2 (code, type, newmod,
11898 fold_convert (newtype, arg1));
11901 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11902 C1 is a valid shift constant, and C2 is a power of two, i.e.
11903 a single bit. */
11904 if (TREE_CODE (arg0) == BIT_AND_EXPR
11905 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11906 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11907 == INTEGER_CST
11908 && integer_pow2p (TREE_OPERAND (arg0, 1))
11909 && integer_zerop (arg1))
11911 tree itype = TREE_TYPE (arg0);
11912 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11913 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11915 /* Check for a valid shift count. */
11916 if (TREE_INT_CST_HIGH (arg001) == 0
11917 && TREE_INT_CST_LOW (arg001) < prec)
11919 tree arg01 = TREE_OPERAND (arg0, 1);
11920 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11921 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11922 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11923 can be rewritten as (X & (C2 << C1)) != 0. */
11924 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11926 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11927 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11928 return fold_build2 (code, type, tem, arg1);
11930 /* Otherwise, for signed (arithmetic) shifts,
11931 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11932 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11933 else if (!TYPE_UNSIGNED (itype))
11934 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11935 arg000, build_int_cst (itype, 0));
11936 /* Otherwise, of unsigned (logical) shifts,
11937 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11938 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11939 else
11940 return omit_one_operand (type,
11941 code == EQ_EXPR ? integer_one_node
11942 : integer_zero_node,
11943 arg000);
11947 /* If this is an NE comparison of zero with an AND of one, remove the
11948 comparison since the AND will give the correct value. */
11949 if (code == NE_EXPR
11950 && integer_zerop (arg1)
11951 && TREE_CODE (arg0) == BIT_AND_EXPR
11952 && integer_onep (TREE_OPERAND (arg0, 1)))
11953 return fold_convert (type, arg0);
11955 /* If we have (A & C) == C where C is a power of 2, convert this into
11956 (A & C) != 0. Similarly for NE_EXPR. */
11957 if (TREE_CODE (arg0) == BIT_AND_EXPR
11958 && integer_pow2p (TREE_OPERAND (arg0, 1))
11959 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11960 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11961 arg0, fold_convert (TREE_TYPE (arg0),
11962 integer_zero_node));
11964 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11965 bit, then fold the expression into A < 0 or A >= 0. */
11966 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11967 if (tem)
11968 return tem;
11970 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11971 Similarly for NE_EXPR. */
11972 if (TREE_CODE (arg0) == BIT_AND_EXPR
11973 && TREE_CODE (arg1) == INTEGER_CST
11974 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11976 tree notc = fold_build1 (BIT_NOT_EXPR,
11977 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11978 TREE_OPERAND (arg0, 1));
11979 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11980 arg1, notc);
11981 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11982 if (integer_nonzerop (dandnotc))
11983 return omit_one_operand (type, rslt, arg0);
11986 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11987 Similarly for NE_EXPR. */
11988 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11989 && TREE_CODE (arg1) == INTEGER_CST
11990 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11992 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11993 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11994 TREE_OPERAND (arg0, 1), notd);
11995 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11996 if (integer_nonzerop (candnotd))
11997 return omit_one_operand (type, rslt, arg0);
12000 /* Optimize comparisons of strlen vs zero to a compare of the
12001 first character of the string vs zero. To wit,
12002 strlen(ptr) == 0 => *ptr == 0
12003 strlen(ptr) != 0 => *ptr != 0
12004 Other cases should reduce to one of these two (or a constant)
12005 due to the return value of strlen being unsigned. */
12006 if (TREE_CODE (arg0) == CALL_EXPR
12007 && integer_zerop (arg1))
12009 tree fndecl = get_callee_fndecl (arg0);
12011 if (fndecl
12012 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12013 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12014 && call_expr_nargs (arg0) == 1
12015 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12017 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12018 return fold_build2 (code, type, iref,
12019 build_int_cst (TREE_TYPE (iref), 0));
12023 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12024 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12025 if (TREE_CODE (arg0) == RSHIFT_EXPR
12026 && integer_zerop (arg1)
12027 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12029 tree arg00 = TREE_OPERAND (arg0, 0);
12030 tree arg01 = TREE_OPERAND (arg0, 1);
12031 tree itype = TREE_TYPE (arg00);
12032 if (TREE_INT_CST_HIGH (arg01) == 0
12033 && TREE_INT_CST_LOW (arg01)
12034 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12036 if (TYPE_UNSIGNED (itype))
12038 itype = signed_type_for (itype);
12039 arg00 = fold_convert (itype, arg00);
12041 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12042 type, arg00, build_int_cst (itype, 0));
12046 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12047 if (integer_zerop (arg1)
12048 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12049 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12050 TREE_OPERAND (arg0, 1));
12052 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12053 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12054 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12055 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12056 build_int_cst (TREE_TYPE (arg1), 0));
12057 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12058 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12059 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12060 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12061 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12062 build_int_cst (TREE_TYPE (arg1), 0));
12064 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12065 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12066 && TREE_CODE (arg1) == INTEGER_CST
12067 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12068 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12069 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12070 TREE_OPERAND (arg0, 1), arg1));
12072 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12073 (X & C) == 0 when C is a single bit. */
12074 if (TREE_CODE (arg0) == BIT_AND_EXPR
12075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12076 && integer_zerop (arg1)
12077 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12079 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12080 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12081 TREE_OPERAND (arg0, 1));
12082 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12083 type, tem, arg1);
12086 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12087 constant C is a power of two, i.e. a single bit. */
12088 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12089 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12090 && integer_zerop (arg1)
12091 && integer_pow2p (TREE_OPERAND (arg0, 1))
12092 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12093 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12095 tree arg00 = TREE_OPERAND (arg0, 0);
12096 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12097 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12100 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12101 when is C is a power of two, i.e. a single bit. */
12102 if (TREE_CODE (arg0) == BIT_AND_EXPR
12103 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12104 && integer_zerop (arg1)
12105 && integer_pow2p (TREE_OPERAND (arg0, 1))
12106 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12107 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12109 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12110 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12111 arg000, TREE_OPERAND (arg0, 1));
12112 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12113 tem, build_int_cst (TREE_TYPE (tem), 0));
12116 if (integer_zerop (arg1)
12117 && tree_expr_nonzero_p (arg0))
12119 tree res = constant_boolean_node (code==NE_EXPR, type);
12120 return omit_one_operand (type, res, arg0);
12123 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12124 if (TREE_CODE (arg0) == NEGATE_EXPR
12125 && TREE_CODE (arg1) == NEGATE_EXPR)
12126 return fold_build2 (code, type,
12127 TREE_OPERAND (arg0, 0),
12128 TREE_OPERAND (arg1, 0));
12130 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12131 if (TREE_CODE (arg0) == BIT_AND_EXPR
12132 && TREE_CODE (arg1) == BIT_AND_EXPR)
12134 tree arg00 = TREE_OPERAND (arg0, 0);
12135 tree arg01 = TREE_OPERAND (arg0, 1);
12136 tree arg10 = TREE_OPERAND (arg1, 0);
12137 tree arg11 = TREE_OPERAND (arg1, 1);
12138 tree itype = TREE_TYPE (arg0);
12140 if (operand_equal_p (arg01, arg11, 0))
12141 return fold_build2 (code, type,
12142 fold_build2 (BIT_AND_EXPR, itype,
12143 fold_build2 (BIT_XOR_EXPR, itype,
12144 arg00, arg10),
12145 arg01),
12146 build_int_cst (itype, 0));
12148 if (operand_equal_p (arg01, arg10, 0))
12149 return fold_build2 (code, type,
12150 fold_build2 (BIT_AND_EXPR, itype,
12151 fold_build2 (BIT_XOR_EXPR, itype,
12152 arg00, arg11),
12153 arg01),
12154 build_int_cst (itype, 0));
12156 if (operand_equal_p (arg00, arg11, 0))
12157 return fold_build2 (code, type,
12158 fold_build2 (BIT_AND_EXPR, itype,
12159 fold_build2 (BIT_XOR_EXPR, itype,
12160 arg01, arg10),
12161 arg00),
12162 build_int_cst (itype, 0));
12164 if (operand_equal_p (arg00, arg10, 0))
12165 return fold_build2 (code, type,
12166 fold_build2 (BIT_AND_EXPR, itype,
12167 fold_build2 (BIT_XOR_EXPR, itype,
12168 arg01, arg11),
12169 arg00),
12170 build_int_cst (itype, 0));
12173 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12174 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12176 tree arg00 = TREE_OPERAND (arg0, 0);
12177 tree arg01 = TREE_OPERAND (arg0, 1);
12178 tree arg10 = TREE_OPERAND (arg1, 0);
12179 tree arg11 = TREE_OPERAND (arg1, 1);
12180 tree itype = TREE_TYPE (arg0);
12182 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12183 operand_equal_p guarantees no side-effects so we don't need
12184 to use omit_one_operand on Z. */
12185 if (operand_equal_p (arg01, arg11, 0))
12186 return fold_build2 (code, type, arg00, arg10);
12187 if (operand_equal_p (arg01, arg10, 0))
12188 return fold_build2 (code, type, arg00, arg11);
12189 if (operand_equal_p (arg00, arg11, 0))
12190 return fold_build2 (code, type, arg01, arg10);
12191 if (operand_equal_p (arg00, arg10, 0))
12192 return fold_build2 (code, type, arg01, arg11);
12194 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12195 if (TREE_CODE (arg01) == INTEGER_CST
12196 && TREE_CODE (arg11) == INTEGER_CST)
12197 return fold_build2 (code, type,
12198 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12199 fold_build2 (BIT_XOR_EXPR, itype,
12200 arg01, arg11)),
12201 arg10);
12204 /* Attempt to simplify equality/inequality comparisons of complex
12205 values. Only lower the comparison if the result is known or
12206 can be simplified to a single scalar comparison. */
12207 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12208 || TREE_CODE (arg0) == COMPLEX_CST)
12209 && (TREE_CODE (arg1) == COMPLEX_EXPR
12210 || TREE_CODE (arg1) == COMPLEX_CST))
12212 tree real0, imag0, real1, imag1;
12213 tree rcond, icond;
12215 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12217 real0 = TREE_OPERAND (arg0, 0);
12218 imag0 = TREE_OPERAND (arg0, 1);
12220 else
12222 real0 = TREE_REALPART (arg0);
12223 imag0 = TREE_IMAGPART (arg0);
12226 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12228 real1 = TREE_OPERAND (arg1, 0);
12229 imag1 = TREE_OPERAND (arg1, 1);
12231 else
12233 real1 = TREE_REALPART (arg1);
12234 imag1 = TREE_IMAGPART (arg1);
12237 rcond = fold_binary (code, type, real0, real1);
12238 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12240 if (integer_zerop (rcond))
12242 if (code == EQ_EXPR)
12243 return omit_two_operands (type, boolean_false_node,
12244 imag0, imag1);
12245 return fold_build2 (NE_EXPR, type, imag0, imag1);
12247 else
12249 if (code == NE_EXPR)
12250 return omit_two_operands (type, boolean_true_node,
12251 imag0, imag1);
12252 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12256 icond = fold_binary (code, type, imag0, imag1);
12257 if (icond && TREE_CODE (icond) == INTEGER_CST)
12259 if (integer_zerop (icond))
12261 if (code == EQ_EXPR)
12262 return omit_two_operands (type, boolean_false_node,
12263 real0, real1);
12264 return fold_build2 (NE_EXPR, type, real0, real1);
12266 else
12268 if (code == NE_EXPR)
12269 return omit_two_operands (type, boolean_true_node,
12270 real0, real1);
12271 return fold_build2 (EQ_EXPR, type, real0, real1);
12276 return NULL_TREE;
12278 case LT_EXPR:
12279 case GT_EXPR:
12280 case LE_EXPR:
12281 case GE_EXPR:
12282 tem = fold_comparison (code, type, op0, op1);
12283 if (tem != NULL_TREE)
12284 return tem;
12286 /* Transform comparisons of the form X +- C CMP X. */
12287 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12288 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12289 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12290 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12291 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12292 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12294 tree arg01 = TREE_OPERAND (arg0, 1);
12295 enum tree_code code0 = TREE_CODE (arg0);
12296 int is_positive;
12298 if (TREE_CODE (arg01) == REAL_CST)
12299 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12300 else
12301 is_positive = tree_int_cst_sgn (arg01);
12303 /* (X - c) > X becomes false. */
12304 if (code == GT_EXPR
12305 && ((code0 == MINUS_EXPR && is_positive >= 0)
12306 || (code0 == PLUS_EXPR && is_positive <= 0)))
12308 if (TREE_CODE (arg01) == INTEGER_CST
12309 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12310 fold_overflow_warning (("assuming signed overflow does not "
12311 "occur when assuming that (X - c) > X "
12312 "is always false"),
12313 WARN_STRICT_OVERFLOW_ALL);
12314 return constant_boolean_node (0, type);
12317 /* Likewise (X + c) < X becomes false. */
12318 if (code == LT_EXPR
12319 && ((code0 == PLUS_EXPR && is_positive >= 0)
12320 || (code0 == MINUS_EXPR && is_positive <= 0)))
12322 if (TREE_CODE (arg01) == INTEGER_CST
12323 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12324 fold_overflow_warning (("assuming signed overflow does not "
12325 "occur when assuming that "
12326 "(X + c) < X is always false"),
12327 WARN_STRICT_OVERFLOW_ALL);
12328 return constant_boolean_node (0, type);
12331 /* Convert (X - c) <= X to true. */
12332 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12333 && code == LE_EXPR
12334 && ((code0 == MINUS_EXPR && is_positive >= 0)
12335 || (code0 == PLUS_EXPR && is_positive <= 0)))
12337 if (TREE_CODE (arg01) == INTEGER_CST
12338 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12339 fold_overflow_warning (("assuming signed overflow does not "
12340 "occur when assuming that "
12341 "(X - c) <= X is always true"),
12342 WARN_STRICT_OVERFLOW_ALL);
12343 return constant_boolean_node (1, type);
12346 /* Convert (X + c) >= X to true. */
12347 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12348 && code == GE_EXPR
12349 && ((code0 == PLUS_EXPR && is_positive >= 0)
12350 || (code0 == MINUS_EXPR && is_positive <= 0)))
12352 if (TREE_CODE (arg01) == INTEGER_CST
12353 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12354 fold_overflow_warning (("assuming signed overflow does not "
12355 "occur when assuming that "
12356 "(X + c) >= X is always true"),
12357 WARN_STRICT_OVERFLOW_ALL);
12358 return constant_boolean_node (1, type);
12361 if (TREE_CODE (arg01) == INTEGER_CST)
12363 /* Convert X + c > X and X - c < X to true for integers. */
12364 if (code == GT_EXPR
12365 && ((code0 == PLUS_EXPR && is_positive > 0)
12366 || (code0 == MINUS_EXPR && is_positive < 0)))
12368 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12369 fold_overflow_warning (("assuming signed overflow does "
12370 "not occur when assuming that "
12371 "(X + c) > X is always true"),
12372 WARN_STRICT_OVERFLOW_ALL);
12373 return constant_boolean_node (1, type);
12376 if (code == LT_EXPR
12377 && ((code0 == MINUS_EXPR && is_positive > 0)
12378 || (code0 == PLUS_EXPR && is_positive < 0)))
12380 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12381 fold_overflow_warning (("assuming signed overflow does "
12382 "not occur when assuming that "
12383 "(X - c) < X is always true"),
12384 WARN_STRICT_OVERFLOW_ALL);
12385 return constant_boolean_node (1, type);
12388 /* Convert X + c <= X and X - c >= X to false for integers. */
12389 if (code == LE_EXPR
12390 && ((code0 == PLUS_EXPR && is_positive > 0)
12391 || (code0 == MINUS_EXPR && is_positive < 0)))
12393 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12394 fold_overflow_warning (("assuming signed overflow does "
12395 "not occur when assuming that "
12396 "(X + c) <= X is always false"),
12397 WARN_STRICT_OVERFLOW_ALL);
12398 return constant_boolean_node (0, type);
12401 if (code == GE_EXPR
12402 && ((code0 == MINUS_EXPR && is_positive > 0)
12403 || (code0 == PLUS_EXPR && is_positive < 0)))
12405 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12406 fold_overflow_warning (("assuming signed overflow does "
12407 "not occur when assuming that "
12408 "(X - c) >= X is always false"),
12409 WARN_STRICT_OVERFLOW_ALL);
12410 return constant_boolean_node (0, type);
12415 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12416 This transformation affects the cases which are handled in later
12417 optimizations involving comparisons with non-negative constants. */
12418 if (TREE_CODE (arg1) == INTEGER_CST
12419 && TREE_CODE (arg0) != INTEGER_CST
12420 && tree_int_cst_sgn (arg1) > 0)
12422 if (code == GE_EXPR)
12424 arg1 = const_binop (MINUS_EXPR, arg1,
12425 build_int_cst (TREE_TYPE (arg1), 1), 0);
12426 return fold_build2 (GT_EXPR, type, arg0,
12427 fold_convert (TREE_TYPE (arg0), arg1));
12429 if (code == LT_EXPR)
12431 arg1 = const_binop (MINUS_EXPR, arg1,
12432 build_int_cst (TREE_TYPE (arg1), 1), 0);
12433 return fold_build2 (LE_EXPR, type, arg0,
12434 fold_convert (TREE_TYPE (arg0), arg1));
12438 /* Comparisons with the highest or lowest possible integer of
12439 the specified precision will have known values. */
12441 tree arg1_type = TREE_TYPE (arg1);
12442 unsigned int width = TYPE_PRECISION (arg1_type);
12444 if (TREE_CODE (arg1) == INTEGER_CST
12445 && !TREE_OVERFLOW (arg1)
12446 && width <= 2 * HOST_BITS_PER_WIDE_INT
12447 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12449 HOST_WIDE_INT signed_max_hi;
12450 unsigned HOST_WIDE_INT signed_max_lo;
12451 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12453 if (width <= HOST_BITS_PER_WIDE_INT)
12455 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12456 - 1;
12457 signed_max_hi = 0;
12458 max_hi = 0;
12460 if (TYPE_UNSIGNED (arg1_type))
12462 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12463 min_lo = 0;
12464 min_hi = 0;
12466 else
12468 max_lo = signed_max_lo;
12469 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12470 min_hi = -1;
12473 else
12475 width -= HOST_BITS_PER_WIDE_INT;
12476 signed_max_lo = -1;
12477 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12478 - 1;
12479 max_lo = -1;
12480 min_lo = 0;
12482 if (TYPE_UNSIGNED (arg1_type))
12484 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12485 min_hi = 0;
12487 else
12489 max_hi = signed_max_hi;
12490 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12494 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12495 && TREE_INT_CST_LOW (arg1) == max_lo)
12496 switch (code)
12498 case GT_EXPR:
12499 return omit_one_operand (type, integer_zero_node, arg0);
12501 case GE_EXPR:
12502 return fold_build2 (EQ_EXPR, type, op0, op1);
12504 case LE_EXPR:
12505 return omit_one_operand (type, integer_one_node, arg0);
12507 case LT_EXPR:
12508 return fold_build2 (NE_EXPR, type, op0, op1);
12510 /* The GE_EXPR and LT_EXPR cases above are not normally
12511 reached because of previous transformations. */
12513 default:
12514 break;
12516 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12517 == max_hi
12518 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12519 switch (code)
12521 case GT_EXPR:
12522 arg1 = const_binop (PLUS_EXPR, arg1,
12523 build_int_cst (TREE_TYPE (arg1), 1), 0);
12524 return fold_build2 (EQ_EXPR, type,
12525 fold_convert (TREE_TYPE (arg1), arg0),
12526 arg1);
12527 case LE_EXPR:
12528 arg1 = const_binop (PLUS_EXPR, arg1,
12529 build_int_cst (TREE_TYPE (arg1), 1), 0);
12530 return fold_build2 (NE_EXPR, type,
12531 fold_convert (TREE_TYPE (arg1), arg0),
12532 arg1);
12533 default:
12534 break;
12536 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12537 == min_hi
12538 && TREE_INT_CST_LOW (arg1) == min_lo)
12539 switch (code)
12541 case LT_EXPR:
12542 return omit_one_operand (type, integer_zero_node, arg0);
12544 case LE_EXPR:
12545 return fold_build2 (EQ_EXPR, type, op0, op1);
12547 case GE_EXPR:
12548 return omit_one_operand (type, integer_one_node, arg0);
12550 case GT_EXPR:
12551 return fold_build2 (NE_EXPR, type, op0, op1);
12553 default:
12554 break;
12556 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12557 == min_hi
12558 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12559 switch (code)
12561 case GE_EXPR:
12562 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12563 return fold_build2 (NE_EXPR, type,
12564 fold_convert (TREE_TYPE (arg1), arg0),
12565 arg1);
12566 case LT_EXPR:
12567 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12568 return fold_build2 (EQ_EXPR, type,
12569 fold_convert (TREE_TYPE (arg1), arg0),
12570 arg1);
12571 default:
12572 break;
12575 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12576 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12577 && TYPE_UNSIGNED (arg1_type)
12578 /* We will flip the signedness of the comparison operator
12579 associated with the mode of arg1, so the sign bit is
12580 specified by this mode. Check that arg1 is the signed
12581 max associated with this sign bit. */
12582 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12583 /* signed_type does not work on pointer types. */
12584 && INTEGRAL_TYPE_P (arg1_type))
12586 /* The following case also applies to X < signed_max+1
12587 and X >= signed_max+1 because previous transformations. */
12588 if (code == LE_EXPR || code == GT_EXPR)
12590 tree st;
12591 st = signed_type_for (TREE_TYPE (arg1));
12592 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12593 type, fold_convert (st, arg0),
12594 build_int_cst (st, 0));
12600 /* If we are comparing an ABS_EXPR with a constant, we can
12601 convert all the cases into explicit comparisons, but they may
12602 well not be faster than doing the ABS and one comparison.
12603 But ABS (X) <= C is a range comparison, which becomes a subtraction
12604 and a comparison, and is probably faster. */
12605 if (code == LE_EXPR
12606 && TREE_CODE (arg1) == INTEGER_CST
12607 && TREE_CODE (arg0) == ABS_EXPR
12608 && ! TREE_SIDE_EFFECTS (arg0)
12609 && (0 != (tem = negate_expr (arg1)))
12610 && TREE_CODE (tem) == INTEGER_CST
12611 && !TREE_OVERFLOW (tem))
12612 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12613 build2 (GE_EXPR, type,
12614 TREE_OPERAND (arg0, 0), tem),
12615 build2 (LE_EXPR, type,
12616 TREE_OPERAND (arg0, 0), arg1));
12618 /* Convert ABS_EXPR<x> >= 0 to true. */
12619 strict_overflow_p = false;
12620 if (code == GE_EXPR
12621 && (integer_zerop (arg1)
12622 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12623 && real_zerop (arg1)))
12624 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12626 if (strict_overflow_p)
12627 fold_overflow_warning (("assuming signed overflow does not occur "
12628 "when simplifying comparison of "
12629 "absolute value and zero"),
12630 WARN_STRICT_OVERFLOW_CONDITIONAL);
12631 return omit_one_operand (type, integer_one_node, arg0);
12634 /* Convert ABS_EXPR<x> < 0 to false. */
12635 strict_overflow_p = false;
12636 if (code == LT_EXPR
12637 && (integer_zerop (arg1) || real_zerop (arg1))
12638 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12640 if (strict_overflow_p)
12641 fold_overflow_warning (("assuming signed overflow does not occur "
12642 "when simplifying comparison of "
12643 "absolute value and zero"),
12644 WARN_STRICT_OVERFLOW_CONDITIONAL);
12645 return omit_one_operand (type, integer_zero_node, arg0);
12648 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12649 and similarly for >= into !=. */
12650 if ((code == LT_EXPR || code == GE_EXPR)
12651 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12652 && TREE_CODE (arg1) == LSHIFT_EXPR
12653 && integer_onep (TREE_OPERAND (arg1, 0)))
12654 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12655 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12656 TREE_OPERAND (arg1, 1)),
12657 build_int_cst (TREE_TYPE (arg0), 0));
12659 if ((code == LT_EXPR || code == GE_EXPR)
12660 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12661 && CONVERT_EXPR_P (arg1)
12662 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12663 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12664 return
12665 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12666 fold_convert (TREE_TYPE (arg0),
12667 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12668 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12669 1))),
12670 build_int_cst (TREE_TYPE (arg0), 0));
12672 return NULL_TREE;
12674 case UNORDERED_EXPR:
12675 case ORDERED_EXPR:
12676 case UNLT_EXPR:
12677 case UNLE_EXPR:
12678 case UNGT_EXPR:
12679 case UNGE_EXPR:
12680 case UNEQ_EXPR:
12681 case LTGT_EXPR:
12682 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12684 t1 = fold_relational_const (code, type, arg0, arg1);
12685 if (t1 != NULL_TREE)
12686 return t1;
12689 /* If the first operand is NaN, the result is constant. */
12690 if (TREE_CODE (arg0) == REAL_CST
12691 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12692 && (code != LTGT_EXPR || ! flag_trapping_math))
12694 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12695 ? integer_zero_node
12696 : integer_one_node;
12697 return omit_one_operand (type, t1, arg1);
12700 /* If the second operand is NaN, the result is constant. */
12701 if (TREE_CODE (arg1) == REAL_CST
12702 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12703 && (code != LTGT_EXPR || ! flag_trapping_math))
12705 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12706 ? integer_zero_node
12707 : integer_one_node;
12708 return omit_one_operand (type, t1, arg0);
12711 /* Simplify unordered comparison of something with itself. */
12712 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12713 && operand_equal_p (arg0, arg1, 0))
12714 return constant_boolean_node (1, type);
12716 if (code == LTGT_EXPR
12717 && !flag_trapping_math
12718 && operand_equal_p (arg0, arg1, 0))
12719 return constant_boolean_node (0, type);
12721 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12723 tree targ0 = strip_float_extensions (arg0);
12724 tree targ1 = strip_float_extensions (arg1);
12725 tree newtype = TREE_TYPE (targ0);
12727 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12728 newtype = TREE_TYPE (targ1);
12730 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12731 return fold_build2 (code, type, fold_convert (newtype, targ0),
12732 fold_convert (newtype, targ1));
12735 return NULL_TREE;
12737 case COMPOUND_EXPR:
12738 /* When pedantic, a compound expression can be neither an lvalue
12739 nor an integer constant expression. */
12740 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12741 return NULL_TREE;
12742 /* Don't let (0, 0) be null pointer constant. */
12743 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12744 : fold_convert (type, arg1);
12745 return pedantic_non_lvalue (tem);
12747 case COMPLEX_EXPR:
12748 if ((TREE_CODE (arg0) == REAL_CST
12749 && TREE_CODE (arg1) == REAL_CST)
12750 || (TREE_CODE (arg0) == INTEGER_CST
12751 && TREE_CODE (arg1) == INTEGER_CST))
12752 return build_complex (type, arg0, arg1);
12753 return NULL_TREE;
12755 case ASSERT_EXPR:
12756 /* An ASSERT_EXPR should never be passed to fold_binary. */
12757 gcc_unreachable ();
12759 default:
12760 return NULL_TREE;
12761 } /* switch (code) */
12764 /* Callback for walk_tree, looking for LABEL_EXPR.
12765 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12766 Do not check the sub-tree of GOTO_EXPR. */
12768 static tree
12769 contains_label_1 (tree *tp,
12770 int *walk_subtrees,
12771 void *data ATTRIBUTE_UNUSED)
12773 switch (TREE_CODE (*tp))
12775 case LABEL_EXPR:
12776 return *tp;
12777 case GOTO_EXPR:
12778 *walk_subtrees = 0;
12779 /* no break */
12780 default:
12781 return NULL_TREE;
12785 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12786 accessible from outside the sub-tree. Returns NULL_TREE if no
12787 addressable label is found. */
12789 static bool
12790 contains_label_p (tree st)
12792 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12795 /* Fold a ternary expression of code CODE and type TYPE with operands
12796 OP0, OP1, and OP2. Return the folded expression if folding is
12797 successful. Otherwise, return NULL_TREE. */
12799 tree
12800 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12802 tree tem;
12803 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12804 enum tree_code_class kind = TREE_CODE_CLASS (code);
12806 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12807 && TREE_CODE_LENGTH (code) == 3);
12809 /* Strip any conversions that don't change the mode. This is safe
12810 for every expression, except for a comparison expression because
12811 its signedness is derived from its operands. So, in the latter
12812 case, only strip conversions that don't change the signedness.
12814 Note that this is done as an internal manipulation within the
12815 constant folder, in order to find the simplest representation of
12816 the arguments so that their form can be studied. In any cases,
12817 the appropriate type conversions should be put back in the tree
12818 that will get out of the constant folder. */
12819 if (op0)
12821 arg0 = op0;
12822 STRIP_NOPS (arg0);
12825 if (op1)
12827 arg1 = op1;
12828 STRIP_NOPS (arg1);
12831 switch (code)
12833 case COMPONENT_REF:
12834 if (TREE_CODE (arg0) == CONSTRUCTOR
12835 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12837 unsigned HOST_WIDE_INT idx;
12838 tree field, value;
12839 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12840 if (field == arg1)
12841 return value;
12843 return NULL_TREE;
12845 case COND_EXPR:
12846 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12847 so all simple results must be passed through pedantic_non_lvalue. */
12848 if (TREE_CODE (arg0) == INTEGER_CST)
12850 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12851 tem = integer_zerop (arg0) ? op2 : op1;
12852 /* Only optimize constant conditions when the selected branch
12853 has the same type as the COND_EXPR. This avoids optimizing
12854 away "c ? x : throw", where the throw has a void type.
12855 Avoid throwing away that operand which contains label. */
12856 if ((!TREE_SIDE_EFFECTS (unused_op)
12857 || !contains_label_p (unused_op))
12858 && (! VOID_TYPE_P (TREE_TYPE (tem))
12859 || VOID_TYPE_P (type)))
12860 return pedantic_non_lvalue (tem);
12861 return NULL_TREE;
12863 if (operand_equal_p (arg1, op2, 0))
12864 return pedantic_omit_one_operand (type, arg1, arg0);
12866 /* If we have A op B ? A : C, we may be able to convert this to a
12867 simpler expression, depending on the operation and the values
12868 of B and C. Signed zeros prevent all of these transformations,
12869 for reasons given above each one.
12871 Also try swapping the arguments and inverting the conditional. */
12872 if (COMPARISON_CLASS_P (arg0)
12873 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12874 arg1, TREE_OPERAND (arg0, 1))
12875 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12877 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12878 if (tem)
12879 return tem;
12882 if (COMPARISON_CLASS_P (arg0)
12883 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12884 op2,
12885 TREE_OPERAND (arg0, 1))
12886 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12888 tem = fold_truth_not_expr (arg0);
12889 if (tem && COMPARISON_CLASS_P (tem))
12891 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12892 if (tem)
12893 return tem;
12897 /* If the second operand is simpler than the third, swap them
12898 since that produces better jump optimization results. */
12899 if (truth_value_p (TREE_CODE (arg0))
12900 && tree_swap_operands_p (op1, op2, false))
12902 /* See if this can be inverted. If it can't, possibly because
12903 it was a floating-point inequality comparison, don't do
12904 anything. */
12905 tem = fold_truth_not_expr (arg0);
12906 if (tem)
12907 return fold_build3 (code, type, tem, op2, op1);
12910 /* Convert A ? 1 : 0 to simply A. */
12911 if (integer_onep (op1)
12912 && integer_zerop (op2)
12913 /* If we try to convert OP0 to our type, the
12914 call to fold will try to move the conversion inside
12915 a COND, which will recurse. In that case, the COND_EXPR
12916 is probably the best choice, so leave it alone. */
12917 && type == TREE_TYPE (arg0))
12918 return pedantic_non_lvalue (arg0);
12920 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12921 over COND_EXPR in cases such as floating point comparisons. */
12922 if (integer_zerop (op1)
12923 && integer_onep (op2)
12924 && truth_value_p (TREE_CODE (arg0)))
12925 return pedantic_non_lvalue (fold_convert (type,
12926 invert_truthvalue (arg0)));
12928 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12929 if (TREE_CODE (arg0) == LT_EXPR
12930 && integer_zerop (TREE_OPERAND (arg0, 1))
12931 && integer_zerop (op2)
12932 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12934 /* sign_bit_p only checks ARG1 bits within A's precision.
12935 If <sign bit of A> has wider type than A, bits outside
12936 of A's precision in <sign bit of A> need to be checked.
12937 If they are all 0, this optimization needs to be done
12938 in unsigned A's type, if they are all 1 in signed A's type,
12939 otherwise this can't be done. */
12940 if (TYPE_PRECISION (TREE_TYPE (tem))
12941 < TYPE_PRECISION (TREE_TYPE (arg1))
12942 && TYPE_PRECISION (TREE_TYPE (tem))
12943 < TYPE_PRECISION (type))
12945 unsigned HOST_WIDE_INT mask_lo;
12946 HOST_WIDE_INT mask_hi;
12947 int inner_width, outer_width;
12948 tree tem_type;
12950 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12951 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12952 if (outer_width > TYPE_PRECISION (type))
12953 outer_width = TYPE_PRECISION (type);
12955 if (outer_width > HOST_BITS_PER_WIDE_INT)
12957 mask_hi = ((unsigned HOST_WIDE_INT) -1
12958 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12959 mask_lo = -1;
12961 else
12963 mask_hi = 0;
12964 mask_lo = ((unsigned HOST_WIDE_INT) -1
12965 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12967 if (inner_width > HOST_BITS_PER_WIDE_INT)
12969 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12970 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12971 mask_lo = 0;
12973 else
12974 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12975 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12977 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12978 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12980 tem_type = signed_type_for (TREE_TYPE (tem));
12981 tem = fold_convert (tem_type, tem);
12983 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12984 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12986 tem_type = unsigned_type_for (TREE_TYPE (tem));
12987 tem = fold_convert (tem_type, tem);
12989 else
12990 tem = NULL;
12993 if (tem)
12994 return fold_convert (type,
12995 fold_build2 (BIT_AND_EXPR,
12996 TREE_TYPE (tem), tem,
12997 fold_convert (TREE_TYPE (tem),
12998 arg1)));
13001 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13002 already handled above. */
13003 if (TREE_CODE (arg0) == BIT_AND_EXPR
13004 && integer_onep (TREE_OPERAND (arg0, 1))
13005 && integer_zerop (op2)
13006 && integer_pow2p (arg1))
13008 tree tem = TREE_OPERAND (arg0, 0);
13009 STRIP_NOPS (tem);
13010 if (TREE_CODE (tem) == RSHIFT_EXPR
13011 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13012 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13013 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13014 return fold_build2 (BIT_AND_EXPR, type,
13015 TREE_OPERAND (tem, 0), arg1);
13018 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13019 is probably obsolete because the first operand should be a
13020 truth value (that's why we have the two cases above), but let's
13021 leave it in until we can confirm this for all front-ends. */
13022 if (integer_zerop (op2)
13023 && TREE_CODE (arg0) == NE_EXPR
13024 && integer_zerop (TREE_OPERAND (arg0, 1))
13025 && integer_pow2p (arg1)
13026 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13028 arg1, OEP_ONLY_CONST))
13029 return pedantic_non_lvalue (fold_convert (type,
13030 TREE_OPERAND (arg0, 0)));
13032 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13033 if (integer_zerop (op2)
13034 && truth_value_p (TREE_CODE (arg0))
13035 && truth_value_p (TREE_CODE (arg1)))
13036 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13037 fold_convert (type, arg0),
13038 arg1);
13040 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13041 if (integer_onep (op2)
13042 && truth_value_p (TREE_CODE (arg0))
13043 && truth_value_p (TREE_CODE (arg1)))
13045 /* Only perform transformation if ARG0 is easily inverted. */
13046 tem = fold_truth_not_expr (arg0);
13047 if (tem)
13048 return fold_build2 (TRUTH_ORIF_EXPR, type,
13049 fold_convert (type, tem),
13050 arg1);
13053 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13054 if (integer_zerop (arg1)
13055 && truth_value_p (TREE_CODE (arg0))
13056 && truth_value_p (TREE_CODE (op2)))
13058 /* Only perform transformation if ARG0 is easily inverted. */
13059 tem = fold_truth_not_expr (arg0);
13060 if (tem)
13061 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13062 fold_convert (type, tem),
13063 op2);
13066 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13067 if (integer_onep (arg1)
13068 && truth_value_p (TREE_CODE (arg0))
13069 && truth_value_p (TREE_CODE (op2)))
13070 return fold_build2 (TRUTH_ORIF_EXPR, type,
13071 fold_convert (type, arg0),
13072 op2);
13074 return NULL_TREE;
13076 case CALL_EXPR:
13077 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13078 of fold_ternary on them. */
13079 gcc_unreachable ();
13081 case BIT_FIELD_REF:
13082 if ((TREE_CODE (arg0) == VECTOR_CST
13083 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13084 && type == TREE_TYPE (TREE_TYPE (arg0)))
13086 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13087 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13089 if (width != 0
13090 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13091 && (idx % width) == 0
13092 && (idx = idx / width)
13093 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13095 tree elements = NULL_TREE;
13097 if (TREE_CODE (arg0) == VECTOR_CST)
13098 elements = TREE_VECTOR_CST_ELTS (arg0);
13099 else
13101 unsigned HOST_WIDE_INT idx;
13102 tree value;
13104 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13105 elements = tree_cons (NULL_TREE, value, elements);
13107 while (idx-- > 0 && elements)
13108 elements = TREE_CHAIN (elements);
13109 if (elements)
13110 return TREE_VALUE (elements);
13111 else
13112 return fold_convert (type, integer_zero_node);
13115 return NULL_TREE;
13117 default:
13118 return NULL_TREE;
13119 } /* switch (code) */
13122 /* Perform constant folding and related simplification of EXPR.
13123 The related simplifications include x*1 => x, x*0 => 0, etc.,
13124 and application of the associative law.
13125 NOP_EXPR conversions may be removed freely (as long as we
13126 are careful not to change the type of the overall expression).
13127 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13128 but we can constant-fold them if they have constant operands. */
13130 #ifdef ENABLE_FOLD_CHECKING
13131 # define fold(x) fold_1 (x)
13132 static tree fold_1 (tree);
13133 static
13134 #endif
13135 tree
13136 fold (tree expr)
13138 const tree t = expr;
13139 enum tree_code code = TREE_CODE (t);
13140 enum tree_code_class kind = TREE_CODE_CLASS (code);
13141 tree tem;
13143 /* Return right away if a constant. */
13144 if (kind == tcc_constant)
13145 return t;
13147 /* CALL_EXPR-like objects with variable numbers of operands are
13148 treated specially. */
13149 if (kind == tcc_vl_exp)
13151 if (code == CALL_EXPR)
13153 tem = fold_call_expr (expr, false);
13154 return tem ? tem : expr;
13156 return expr;
13159 if (IS_EXPR_CODE_CLASS (kind)
13160 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13162 tree type = TREE_TYPE (t);
13163 tree op0, op1, op2;
13165 switch (TREE_CODE_LENGTH (code))
13167 case 1:
13168 op0 = TREE_OPERAND (t, 0);
13169 tem = fold_unary (code, type, op0);
13170 return tem ? tem : expr;
13171 case 2:
13172 op0 = TREE_OPERAND (t, 0);
13173 op1 = TREE_OPERAND (t, 1);
13174 tem = fold_binary (code, type, op0, op1);
13175 return tem ? tem : expr;
13176 case 3:
13177 op0 = TREE_OPERAND (t, 0);
13178 op1 = TREE_OPERAND (t, 1);
13179 op2 = TREE_OPERAND (t, 2);
13180 tem = fold_ternary (code, type, op0, op1, op2);
13181 return tem ? tem : expr;
13182 default:
13183 break;
13187 switch (code)
13189 case ARRAY_REF:
13191 tree op0 = TREE_OPERAND (t, 0);
13192 tree op1 = TREE_OPERAND (t, 1);
13194 if (TREE_CODE (op1) == INTEGER_CST
13195 && TREE_CODE (op0) == CONSTRUCTOR
13196 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13198 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13199 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13200 unsigned HOST_WIDE_INT begin = 0;
13202 /* Find a matching index by means of a binary search. */
13203 while (begin != end)
13205 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13206 tree index = VEC_index (constructor_elt, elts, middle)->index;
13208 if (TREE_CODE (index) == INTEGER_CST
13209 && tree_int_cst_lt (index, op1))
13210 begin = middle + 1;
13211 else if (TREE_CODE (index) == INTEGER_CST
13212 && tree_int_cst_lt (op1, index))
13213 end = middle;
13214 else if (TREE_CODE (index) == RANGE_EXPR
13215 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13216 begin = middle + 1;
13217 else if (TREE_CODE (index) == RANGE_EXPR
13218 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13219 end = middle;
13220 else
13221 return VEC_index (constructor_elt, elts, middle)->value;
13225 return t;
13228 case CONST_DECL:
13229 return fold (DECL_INITIAL (t));
13231 default:
13232 return t;
13233 } /* switch (code) */
13236 #ifdef ENABLE_FOLD_CHECKING
13237 #undef fold
13239 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13240 static void fold_check_failed (const_tree, const_tree);
13241 void print_fold_checksum (const_tree);
13243 /* When --enable-checking=fold, compute a digest of expr before
13244 and after actual fold call to see if fold did not accidentally
13245 change original expr. */
13247 tree
13248 fold (tree expr)
13250 tree ret;
13251 struct md5_ctx ctx;
13252 unsigned char checksum_before[16], checksum_after[16];
13253 htab_t ht;
13255 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13256 md5_init_ctx (&ctx);
13257 fold_checksum_tree (expr, &ctx, ht);
13258 md5_finish_ctx (&ctx, checksum_before);
13259 htab_empty (ht);
13261 ret = fold_1 (expr);
13263 md5_init_ctx (&ctx);
13264 fold_checksum_tree (expr, &ctx, ht);
13265 md5_finish_ctx (&ctx, checksum_after);
13266 htab_delete (ht);
13268 if (memcmp (checksum_before, checksum_after, 16))
13269 fold_check_failed (expr, ret);
13271 return ret;
13274 void
13275 print_fold_checksum (const_tree expr)
13277 struct md5_ctx ctx;
13278 unsigned char checksum[16], cnt;
13279 htab_t ht;
13281 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13282 md5_init_ctx (&ctx);
13283 fold_checksum_tree (expr, &ctx, ht);
13284 md5_finish_ctx (&ctx, checksum);
13285 htab_delete (ht);
13286 for (cnt = 0; cnt < 16; ++cnt)
13287 fprintf (stderr, "%02x", checksum[cnt]);
13288 putc ('\n', stderr);
13291 static void
13292 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13294 internal_error ("fold check: original tree changed by fold");
13297 static void
13298 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13300 const void **slot;
13301 enum tree_code code;
13302 struct tree_function_decl buf;
13303 int i, len;
13305 recursive_label:
13307 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13308 <= sizeof (struct tree_function_decl))
13309 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13310 if (expr == NULL)
13311 return;
13312 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13313 if (*slot != NULL)
13314 return;
13315 *slot = expr;
13316 code = TREE_CODE (expr);
13317 if (TREE_CODE_CLASS (code) == tcc_declaration
13318 && DECL_ASSEMBLER_NAME_SET_P (expr))
13320 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13321 memcpy ((char *) &buf, expr, tree_size (expr));
13322 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13323 expr = (tree) &buf;
13325 else if (TREE_CODE_CLASS (code) == tcc_type
13326 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13327 || TYPE_CACHED_VALUES_P (expr)
13328 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13330 /* Allow these fields to be modified. */
13331 tree tmp;
13332 memcpy ((char *) &buf, expr, tree_size (expr));
13333 expr = tmp = (tree) &buf;
13334 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13335 TYPE_POINTER_TO (tmp) = NULL;
13336 TYPE_REFERENCE_TO (tmp) = NULL;
13337 if (TYPE_CACHED_VALUES_P (tmp))
13339 TYPE_CACHED_VALUES_P (tmp) = 0;
13340 TYPE_CACHED_VALUES (tmp) = NULL;
13343 md5_process_bytes (expr, tree_size (expr), ctx);
13344 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13345 if (TREE_CODE_CLASS (code) != tcc_type
13346 && TREE_CODE_CLASS (code) != tcc_declaration
13347 && code != TREE_LIST
13348 && code != SSA_NAME)
13349 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13350 switch (TREE_CODE_CLASS (code))
13352 case tcc_constant:
13353 switch (code)
13355 case STRING_CST:
13356 md5_process_bytes (TREE_STRING_POINTER (expr),
13357 TREE_STRING_LENGTH (expr), ctx);
13358 break;
13359 case COMPLEX_CST:
13360 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13361 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13362 break;
13363 case VECTOR_CST:
13364 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13365 break;
13366 default:
13367 break;
13369 break;
13370 case tcc_exceptional:
13371 switch (code)
13373 case TREE_LIST:
13374 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13375 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13376 expr = TREE_CHAIN (expr);
13377 goto recursive_label;
13378 break;
13379 case TREE_VEC:
13380 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13381 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13382 break;
13383 default:
13384 break;
13386 break;
13387 case tcc_expression:
13388 case tcc_reference:
13389 case tcc_comparison:
13390 case tcc_unary:
13391 case tcc_binary:
13392 case tcc_statement:
13393 case tcc_vl_exp:
13394 len = TREE_OPERAND_LENGTH (expr);
13395 for (i = 0; i < len; ++i)
13396 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13397 break;
13398 case tcc_declaration:
13399 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13400 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13401 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13403 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13404 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13405 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13406 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13407 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13409 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13410 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13412 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13414 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13415 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13416 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13418 break;
13419 case tcc_type:
13420 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13421 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13422 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13423 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13424 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13425 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13426 if (INTEGRAL_TYPE_P (expr)
13427 || SCALAR_FLOAT_TYPE_P (expr))
13429 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13430 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13432 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13433 if (TREE_CODE (expr) == RECORD_TYPE
13434 || TREE_CODE (expr) == UNION_TYPE
13435 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13436 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13437 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13438 break;
13439 default:
13440 break;
13444 /* Helper function for outputting the checksum of a tree T. When
13445 debugging with gdb, you can "define mynext" to be "next" followed
13446 by "call debug_fold_checksum (op0)", then just trace down till the
13447 outputs differ. */
13449 void
13450 debug_fold_checksum (const_tree t)
13452 int i;
13453 unsigned char checksum[16];
13454 struct md5_ctx ctx;
13455 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13457 md5_init_ctx (&ctx);
13458 fold_checksum_tree (t, &ctx, ht);
13459 md5_finish_ctx (&ctx, checksum);
13460 htab_empty (ht);
13462 for (i = 0; i < 16; i++)
13463 fprintf (stderr, "%d ", checksum[i]);
13465 fprintf (stderr, "\n");
13468 #endif
13470 /* Fold a unary tree expression with code CODE of type TYPE with an
13471 operand OP0. Return a folded expression if successful. Otherwise,
13472 return a tree expression with code CODE of type TYPE with an
13473 operand OP0. */
13475 tree
13476 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13478 tree tem;
13479 #ifdef ENABLE_FOLD_CHECKING
13480 unsigned char checksum_before[16], checksum_after[16];
13481 struct md5_ctx ctx;
13482 htab_t ht;
13484 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13485 md5_init_ctx (&ctx);
13486 fold_checksum_tree (op0, &ctx, ht);
13487 md5_finish_ctx (&ctx, checksum_before);
13488 htab_empty (ht);
13489 #endif
13491 tem = fold_unary (code, type, op0);
13492 if (!tem)
13493 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13495 #ifdef ENABLE_FOLD_CHECKING
13496 md5_init_ctx (&ctx);
13497 fold_checksum_tree (op0, &ctx, ht);
13498 md5_finish_ctx (&ctx, checksum_after);
13499 htab_delete (ht);
13501 if (memcmp (checksum_before, checksum_after, 16))
13502 fold_check_failed (op0, tem);
13503 #endif
13504 return tem;
13507 /* Fold a binary tree expression with code CODE of type TYPE with
13508 operands OP0 and OP1. Return a folded expression if successful.
13509 Otherwise, return a tree expression with code CODE of type TYPE
13510 with operands OP0 and OP1. */
13512 tree
13513 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13514 MEM_STAT_DECL)
13516 tree tem;
13517 #ifdef ENABLE_FOLD_CHECKING
13518 unsigned char checksum_before_op0[16],
13519 checksum_before_op1[16],
13520 checksum_after_op0[16],
13521 checksum_after_op1[16];
13522 struct md5_ctx ctx;
13523 htab_t ht;
13525 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13526 md5_init_ctx (&ctx);
13527 fold_checksum_tree (op0, &ctx, ht);
13528 md5_finish_ctx (&ctx, checksum_before_op0);
13529 htab_empty (ht);
13531 md5_init_ctx (&ctx);
13532 fold_checksum_tree (op1, &ctx, ht);
13533 md5_finish_ctx (&ctx, checksum_before_op1);
13534 htab_empty (ht);
13535 #endif
13537 tem = fold_binary (code, type, op0, op1);
13538 if (!tem)
13539 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13541 #ifdef ENABLE_FOLD_CHECKING
13542 md5_init_ctx (&ctx);
13543 fold_checksum_tree (op0, &ctx, ht);
13544 md5_finish_ctx (&ctx, checksum_after_op0);
13545 htab_empty (ht);
13547 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13548 fold_check_failed (op0, tem);
13550 md5_init_ctx (&ctx);
13551 fold_checksum_tree (op1, &ctx, ht);
13552 md5_finish_ctx (&ctx, checksum_after_op1);
13553 htab_delete (ht);
13555 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13556 fold_check_failed (op1, tem);
13557 #endif
13558 return tem;
13561 /* Fold a ternary tree expression with code CODE of type TYPE with
13562 operands OP0, OP1, and OP2. Return a folded expression if
13563 successful. Otherwise, return a tree expression with code CODE of
13564 type TYPE with operands OP0, OP1, and OP2. */
13566 tree
13567 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13568 MEM_STAT_DECL)
13570 tree tem;
13571 #ifdef ENABLE_FOLD_CHECKING
13572 unsigned char checksum_before_op0[16],
13573 checksum_before_op1[16],
13574 checksum_before_op2[16],
13575 checksum_after_op0[16],
13576 checksum_after_op1[16],
13577 checksum_after_op2[16];
13578 struct md5_ctx ctx;
13579 htab_t ht;
13581 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13582 md5_init_ctx (&ctx);
13583 fold_checksum_tree (op0, &ctx, ht);
13584 md5_finish_ctx (&ctx, checksum_before_op0);
13585 htab_empty (ht);
13587 md5_init_ctx (&ctx);
13588 fold_checksum_tree (op1, &ctx, ht);
13589 md5_finish_ctx (&ctx, checksum_before_op1);
13590 htab_empty (ht);
13592 md5_init_ctx (&ctx);
13593 fold_checksum_tree (op2, &ctx, ht);
13594 md5_finish_ctx (&ctx, checksum_before_op2);
13595 htab_empty (ht);
13596 #endif
13598 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13599 tem = fold_ternary (code, type, op0, op1, op2);
13600 if (!tem)
13601 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13603 #ifdef ENABLE_FOLD_CHECKING
13604 md5_init_ctx (&ctx);
13605 fold_checksum_tree (op0, &ctx, ht);
13606 md5_finish_ctx (&ctx, checksum_after_op0);
13607 htab_empty (ht);
13609 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13610 fold_check_failed (op0, tem);
13612 md5_init_ctx (&ctx);
13613 fold_checksum_tree (op1, &ctx, ht);
13614 md5_finish_ctx (&ctx, checksum_after_op1);
13615 htab_empty (ht);
13617 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13618 fold_check_failed (op1, tem);
13620 md5_init_ctx (&ctx);
13621 fold_checksum_tree (op2, &ctx, ht);
13622 md5_finish_ctx (&ctx, checksum_after_op2);
13623 htab_delete (ht);
13625 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13626 fold_check_failed (op2, tem);
13627 #endif
13628 return tem;
13631 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13632 arguments in ARGARRAY, and a null static chain.
13633 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13634 of type TYPE from the given operands as constructed by build_call_array. */
13636 tree
13637 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13639 tree tem;
13640 #ifdef ENABLE_FOLD_CHECKING
13641 unsigned char checksum_before_fn[16],
13642 checksum_before_arglist[16],
13643 checksum_after_fn[16],
13644 checksum_after_arglist[16];
13645 struct md5_ctx ctx;
13646 htab_t ht;
13647 int i;
13649 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13650 md5_init_ctx (&ctx);
13651 fold_checksum_tree (fn, &ctx, ht);
13652 md5_finish_ctx (&ctx, checksum_before_fn);
13653 htab_empty (ht);
13655 md5_init_ctx (&ctx);
13656 for (i = 0; i < nargs; i++)
13657 fold_checksum_tree (argarray[i], &ctx, ht);
13658 md5_finish_ctx (&ctx, checksum_before_arglist);
13659 htab_empty (ht);
13660 #endif
13662 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13664 #ifdef ENABLE_FOLD_CHECKING
13665 md5_init_ctx (&ctx);
13666 fold_checksum_tree (fn, &ctx, ht);
13667 md5_finish_ctx (&ctx, checksum_after_fn);
13668 htab_empty (ht);
13670 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13671 fold_check_failed (fn, tem);
13673 md5_init_ctx (&ctx);
13674 for (i = 0; i < nargs; i++)
13675 fold_checksum_tree (argarray[i], &ctx, ht);
13676 md5_finish_ctx (&ctx, checksum_after_arglist);
13677 htab_delete (ht);
13679 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13680 fold_check_failed (NULL_TREE, tem);
13681 #endif
13682 return tem;
13685 /* Perform constant folding and related simplification of initializer
13686 expression EXPR. These behave identically to "fold_buildN" but ignore
13687 potential run-time traps and exceptions that fold must preserve. */
13689 #define START_FOLD_INIT \
13690 int saved_signaling_nans = flag_signaling_nans;\
13691 int saved_trapping_math = flag_trapping_math;\
13692 int saved_rounding_math = flag_rounding_math;\
13693 int saved_trapv = flag_trapv;\
13694 int saved_folding_initializer = folding_initializer;\
13695 flag_signaling_nans = 0;\
13696 flag_trapping_math = 0;\
13697 flag_rounding_math = 0;\
13698 flag_trapv = 0;\
13699 folding_initializer = 1;
13701 #define END_FOLD_INIT \
13702 flag_signaling_nans = saved_signaling_nans;\
13703 flag_trapping_math = saved_trapping_math;\
13704 flag_rounding_math = saved_rounding_math;\
13705 flag_trapv = saved_trapv;\
13706 folding_initializer = saved_folding_initializer;
13708 tree
13709 fold_build1_initializer (enum tree_code code, tree type, tree op)
13711 tree result;
13712 START_FOLD_INIT;
13714 result = fold_build1 (code, type, op);
13716 END_FOLD_INIT;
13717 return result;
13720 tree
13721 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13723 tree result;
13724 START_FOLD_INIT;
13726 result = fold_build2 (code, type, op0, op1);
13728 END_FOLD_INIT;
13729 return result;
13732 tree
13733 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13734 tree op2)
13736 tree result;
13737 START_FOLD_INIT;
13739 result = fold_build3 (code, type, op0, op1, op2);
13741 END_FOLD_INIT;
13742 return result;
13745 tree
13746 fold_build_call_array_initializer (tree type, tree fn,
13747 int nargs, tree *argarray)
13749 tree result;
13750 START_FOLD_INIT;
13752 result = fold_build_call_array (type, fn, nargs, argarray);
13754 END_FOLD_INIT;
13755 return result;
13758 #undef START_FOLD_INIT
13759 #undef END_FOLD_INIT
13761 /* Determine if first argument is a multiple of second argument. Return 0 if
13762 it is not, or we cannot easily determined it to be.
13764 An example of the sort of thing we care about (at this point; this routine
13765 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13766 fold cases do now) is discovering that
13768 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13770 is a multiple of
13772 SAVE_EXPR (J * 8)
13774 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13776 This code also handles discovering that
13778 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13780 is a multiple of 8 so we don't have to worry about dealing with a
13781 possible remainder.
13783 Note that we *look* inside a SAVE_EXPR only to determine how it was
13784 calculated; it is not safe for fold to do much of anything else with the
13785 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13786 at run time. For example, the latter example above *cannot* be implemented
13787 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13788 evaluation time of the original SAVE_EXPR is not necessarily the same at
13789 the time the new expression is evaluated. The only optimization of this
13790 sort that would be valid is changing
13792 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13794 divided by 8 to
13796 SAVE_EXPR (I) * SAVE_EXPR (J)
13798 (where the same SAVE_EXPR (J) is used in the original and the
13799 transformed version). */
13802 multiple_of_p (tree type, const_tree top, const_tree bottom)
13804 if (operand_equal_p (top, bottom, 0))
13805 return 1;
13807 if (TREE_CODE (type) != INTEGER_TYPE)
13808 return 0;
13810 switch (TREE_CODE (top))
13812 case BIT_AND_EXPR:
13813 /* Bitwise and provides a power of two multiple. If the mask is
13814 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13815 if (!integer_pow2p (bottom))
13816 return 0;
13817 /* FALLTHRU */
13819 case MULT_EXPR:
13820 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13821 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13823 case PLUS_EXPR:
13824 case MINUS_EXPR:
13825 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13826 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13828 case LSHIFT_EXPR:
13829 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13831 tree op1, t1;
13833 op1 = TREE_OPERAND (top, 1);
13834 /* const_binop may not detect overflow correctly,
13835 so check for it explicitly here. */
13836 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13837 > TREE_INT_CST_LOW (op1)
13838 && TREE_INT_CST_HIGH (op1) == 0
13839 && 0 != (t1 = fold_convert (type,
13840 const_binop (LSHIFT_EXPR,
13841 size_one_node,
13842 op1, 0)))
13843 && !TREE_OVERFLOW (t1))
13844 return multiple_of_p (type, t1, bottom);
13846 return 0;
13848 case NOP_EXPR:
13849 /* Can't handle conversions from non-integral or wider integral type. */
13850 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13851 || (TYPE_PRECISION (type)
13852 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13853 return 0;
13855 /* .. fall through ... */
13857 case SAVE_EXPR:
13858 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13860 case INTEGER_CST:
13861 if (TREE_CODE (bottom) != INTEGER_CST
13862 || integer_zerop (bottom)
13863 || (TYPE_UNSIGNED (type)
13864 && (tree_int_cst_sgn (top) < 0
13865 || tree_int_cst_sgn (bottom) < 0)))
13866 return 0;
13867 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13868 top, bottom, 0));
13870 default:
13871 return 0;
13875 /* Return true if CODE or TYPE is known to be non-negative. */
13877 static bool
13878 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13880 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13881 && truth_value_p (code))
13882 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13883 have a signed:1 type (where the value is -1 and 0). */
13884 return true;
13885 return false;
13888 /* Return true if (CODE OP0) is known to be non-negative. If the return
13889 value is based on the assumption that signed overflow is undefined,
13890 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13891 *STRICT_OVERFLOW_P. */
13893 bool
13894 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13895 bool *strict_overflow_p)
13897 if (TYPE_UNSIGNED (type))
13898 return true;
13900 switch (code)
13902 case ABS_EXPR:
13903 /* We can't return 1 if flag_wrapv is set because
13904 ABS_EXPR<INT_MIN> = INT_MIN. */
13905 if (!INTEGRAL_TYPE_P (type))
13906 return true;
13907 if (TYPE_OVERFLOW_UNDEFINED (type))
13909 *strict_overflow_p = true;
13910 return true;
13912 break;
13914 case NON_LVALUE_EXPR:
13915 case FLOAT_EXPR:
13916 case FIX_TRUNC_EXPR:
13917 return tree_expr_nonnegative_warnv_p (op0,
13918 strict_overflow_p);
13920 case NOP_EXPR:
13922 tree inner_type = TREE_TYPE (op0);
13923 tree outer_type = type;
13925 if (TREE_CODE (outer_type) == REAL_TYPE)
13927 if (TREE_CODE (inner_type) == REAL_TYPE)
13928 return tree_expr_nonnegative_warnv_p (op0,
13929 strict_overflow_p);
13930 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13932 if (TYPE_UNSIGNED (inner_type))
13933 return true;
13934 return tree_expr_nonnegative_warnv_p (op0,
13935 strict_overflow_p);
13938 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13940 if (TREE_CODE (inner_type) == REAL_TYPE)
13941 return tree_expr_nonnegative_warnv_p (op0,
13942 strict_overflow_p);
13943 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13944 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13945 && TYPE_UNSIGNED (inner_type);
13948 break;
13950 default:
13951 return tree_simple_nonnegative_warnv_p (code, type);
13954 /* We don't know sign of `t', so be conservative and return false. */
13955 return false;
13958 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13959 value is based on the assumption that signed overflow is undefined,
13960 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13961 *STRICT_OVERFLOW_P. */
13963 bool
13964 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13965 tree op1, bool *strict_overflow_p)
13967 if (TYPE_UNSIGNED (type))
13968 return true;
13970 switch (code)
13972 case POINTER_PLUS_EXPR:
13973 case PLUS_EXPR:
13974 if (FLOAT_TYPE_P (type))
13975 return (tree_expr_nonnegative_warnv_p (op0,
13976 strict_overflow_p)
13977 && tree_expr_nonnegative_warnv_p (op1,
13978 strict_overflow_p));
13980 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13981 both unsigned and at least 2 bits shorter than the result. */
13982 if (TREE_CODE (type) == INTEGER_TYPE
13983 && TREE_CODE (op0) == NOP_EXPR
13984 && TREE_CODE (op1) == NOP_EXPR)
13986 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13987 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13988 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13989 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13991 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13992 TYPE_PRECISION (inner2)) + 1;
13993 return prec < TYPE_PRECISION (type);
13996 break;
13998 case MULT_EXPR:
13999 if (FLOAT_TYPE_P (type))
14001 /* x * x for floating point x is always non-negative. */
14002 if (operand_equal_p (op0, op1, 0))
14003 return true;
14004 return (tree_expr_nonnegative_warnv_p (op0,
14005 strict_overflow_p)
14006 && tree_expr_nonnegative_warnv_p (op1,
14007 strict_overflow_p));
14010 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14011 both unsigned and their total bits is shorter than the result. */
14012 if (TREE_CODE (type) == INTEGER_TYPE
14013 && TREE_CODE (op0) == NOP_EXPR
14014 && TREE_CODE (op1) == NOP_EXPR)
14016 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14017 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14018 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14019 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14020 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14021 < TYPE_PRECISION (type);
14023 return false;
14025 case BIT_AND_EXPR:
14026 case MAX_EXPR:
14027 return (tree_expr_nonnegative_warnv_p (op0,
14028 strict_overflow_p)
14029 || tree_expr_nonnegative_warnv_p (op1,
14030 strict_overflow_p));
14032 case BIT_IOR_EXPR:
14033 case BIT_XOR_EXPR:
14034 case MIN_EXPR:
14035 case RDIV_EXPR:
14036 case TRUNC_DIV_EXPR:
14037 case CEIL_DIV_EXPR:
14038 case FLOOR_DIV_EXPR:
14039 case ROUND_DIV_EXPR:
14040 return (tree_expr_nonnegative_warnv_p (op0,
14041 strict_overflow_p)
14042 && tree_expr_nonnegative_warnv_p (op1,
14043 strict_overflow_p));
14045 case TRUNC_MOD_EXPR:
14046 case CEIL_MOD_EXPR:
14047 case FLOOR_MOD_EXPR:
14048 case ROUND_MOD_EXPR:
14049 return tree_expr_nonnegative_warnv_p (op0,
14050 strict_overflow_p);
14051 default:
14052 return tree_simple_nonnegative_warnv_p (code, type);
14055 /* We don't know sign of `t', so be conservative and return false. */
14056 return false;
14059 /* Return true if T is known to be non-negative. If the return
14060 value is based on the assumption that signed overflow is undefined,
14061 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14062 *STRICT_OVERFLOW_P. */
14064 bool
14065 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14067 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14068 return true;
14070 switch (TREE_CODE (t))
14072 case SSA_NAME:
14073 /* Query VRP to see if it has recorded any information about
14074 the range of this object. */
14075 return ssa_name_nonnegative_p (t);
14077 case INTEGER_CST:
14078 return tree_int_cst_sgn (t) >= 0;
14080 case REAL_CST:
14081 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14083 case FIXED_CST:
14084 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14086 case COND_EXPR:
14087 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14088 strict_overflow_p)
14089 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14090 strict_overflow_p));
14091 default:
14092 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14093 TREE_TYPE (t));
14095 /* We don't know sign of `t', so be conservative and return false. */
14096 return false;
14099 /* Return true if T is known to be non-negative. If the return
14100 value is based on the assumption that signed overflow is undefined,
14101 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14102 *STRICT_OVERFLOW_P. */
14104 bool
14105 tree_call_nonnegative_warnv_p (enum tree_code code, tree type, tree fndecl,
14106 tree arg0, tree arg1, bool *strict_overflow_p)
14108 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14109 switch (DECL_FUNCTION_CODE (fndecl))
14111 CASE_FLT_FN (BUILT_IN_ACOS):
14112 CASE_FLT_FN (BUILT_IN_ACOSH):
14113 CASE_FLT_FN (BUILT_IN_CABS):
14114 CASE_FLT_FN (BUILT_IN_COSH):
14115 CASE_FLT_FN (BUILT_IN_ERFC):
14116 CASE_FLT_FN (BUILT_IN_EXP):
14117 CASE_FLT_FN (BUILT_IN_EXP10):
14118 CASE_FLT_FN (BUILT_IN_EXP2):
14119 CASE_FLT_FN (BUILT_IN_FABS):
14120 CASE_FLT_FN (BUILT_IN_FDIM):
14121 CASE_FLT_FN (BUILT_IN_HYPOT):
14122 CASE_FLT_FN (BUILT_IN_POW10):
14123 CASE_INT_FN (BUILT_IN_FFS):
14124 CASE_INT_FN (BUILT_IN_PARITY):
14125 CASE_INT_FN (BUILT_IN_POPCOUNT):
14126 case BUILT_IN_BSWAP32:
14127 case BUILT_IN_BSWAP64:
14128 /* Always true. */
14129 return true;
14131 CASE_FLT_FN (BUILT_IN_SQRT):
14132 /* sqrt(-0.0) is -0.0. */
14133 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14134 return true;
14135 return tree_expr_nonnegative_warnv_p (arg0,
14136 strict_overflow_p);
14138 CASE_FLT_FN (BUILT_IN_ASINH):
14139 CASE_FLT_FN (BUILT_IN_ATAN):
14140 CASE_FLT_FN (BUILT_IN_ATANH):
14141 CASE_FLT_FN (BUILT_IN_CBRT):
14142 CASE_FLT_FN (BUILT_IN_CEIL):
14143 CASE_FLT_FN (BUILT_IN_ERF):
14144 CASE_FLT_FN (BUILT_IN_EXPM1):
14145 CASE_FLT_FN (BUILT_IN_FLOOR):
14146 CASE_FLT_FN (BUILT_IN_FMOD):
14147 CASE_FLT_FN (BUILT_IN_FREXP):
14148 CASE_FLT_FN (BUILT_IN_LCEIL):
14149 CASE_FLT_FN (BUILT_IN_LDEXP):
14150 CASE_FLT_FN (BUILT_IN_LFLOOR):
14151 CASE_FLT_FN (BUILT_IN_LLCEIL):
14152 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14153 CASE_FLT_FN (BUILT_IN_LLRINT):
14154 CASE_FLT_FN (BUILT_IN_LLROUND):
14155 CASE_FLT_FN (BUILT_IN_LRINT):
14156 CASE_FLT_FN (BUILT_IN_LROUND):
14157 CASE_FLT_FN (BUILT_IN_MODF):
14158 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14159 CASE_FLT_FN (BUILT_IN_RINT):
14160 CASE_FLT_FN (BUILT_IN_ROUND):
14161 CASE_FLT_FN (BUILT_IN_SCALB):
14162 CASE_FLT_FN (BUILT_IN_SCALBLN):
14163 CASE_FLT_FN (BUILT_IN_SCALBN):
14164 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14165 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14166 CASE_FLT_FN (BUILT_IN_SINH):
14167 CASE_FLT_FN (BUILT_IN_TANH):
14168 CASE_FLT_FN (BUILT_IN_TRUNC):
14169 /* True if the 1st argument is nonnegative. */
14170 return tree_expr_nonnegative_warnv_p (arg0,
14171 strict_overflow_p);
14173 CASE_FLT_FN (BUILT_IN_FMAX):
14174 /* True if the 1st OR 2nd arguments are nonnegative. */
14175 return (tree_expr_nonnegative_warnv_p (arg0,
14176 strict_overflow_p)
14177 || (tree_expr_nonnegative_warnv_p (arg1,
14178 strict_overflow_p)));
14180 CASE_FLT_FN (BUILT_IN_FMIN):
14181 /* True if the 1st AND 2nd arguments are nonnegative. */
14182 return (tree_expr_nonnegative_warnv_p (arg0,
14183 strict_overflow_p)
14184 && (tree_expr_nonnegative_warnv_p (arg1,
14185 strict_overflow_p)));
14187 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14188 /* True if the 2nd argument is nonnegative. */
14189 return tree_expr_nonnegative_warnv_p (arg1,
14190 strict_overflow_p);
14192 CASE_FLT_FN (BUILT_IN_POWI):
14193 /* True if the 1st argument is nonnegative or the second
14194 argument is an even integer. */
14195 if (TREE_CODE (arg1) == INTEGER_CST
14196 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14197 return true;
14198 return tree_expr_nonnegative_warnv_p (arg0,
14199 strict_overflow_p);
14201 CASE_FLT_FN (BUILT_IN_POW):
14202 /* True if the 1st argument is nonnegative or the second
14203 argument is an even integer valued real. */
14204 if (TREE_CODE (arg1) == REAL_CST)
14206 REAL_VALUE_TYPE c;
14207 HOST_WIDE_INT n;
14209 c = TREE_REAL_CST (arg1);
14210 n = real_to_integer (&c);
14211 if ((n & 1) == 0)
14213 REAL_VALUE_TYPE cint;
14214 real_from_integer (&cint, VOIDmode, n,
14215 n < 0 ? -1 : 0, 0);
14216 if (real_identical (&c, &cint))
14217 return true;
14220 return tree_expr_nonnegative_warnv_p (arg0,
14221 strict_overflow_p);
14223 default:
14224 break;
14226 return tree_simple_nonnegative_warnv_p (code,
14227 type);
14230 /* Return true if T is known to be non-negative. If the return
14231 value is based on the assumption that signed overflow is undefined,
14232 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14233 *STRICT_OVERFLOW_P. */
14235 bool
14236 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14238 enum tree_code code = TREE_CODE (t);
14239 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14240 return true;
14242 switch (code)
14244 case TARGET_EXPR:
14246 tree temp = TARGET_EXPR_SLOT (t);
14247 t = TARGET_EXPR_INITIAL (t);
14249 /* If the initializer is non-void, then it's a normal expression
14250 that will be assigned to the slot. */
14251 if (!VOID_TYPE_P (t))
14252 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14254 /* Otherwise, the initializer sets the slot in some way. One common
14255 way is an assignment statement at the end of the initializer. */
14256 while (1)
14258 if (TREE_CODE (t) == BIND_EXPR)
14259 t = expr_last (BIND_EXPR_BODY (t));
14260 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14261 || TREE_CODE (t) == TRY_CATCH_EXPR)
14262 t = expr_last (TREE_OPERAND (t, 0));
14263 else if (TREE_CODE (t) == STATEMENT_LIST)
14264 t = expr_last (t);
14265 else
14266 break;
14268 if ((TREE_CODE (t) == MODIFY_EXPR
14269 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14270 && GENERIC_TREE_OPERAND (t, 0) == temp)
14271 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14272 strict_overflow_p);
14274 return false;
14277 case CALL_EXPR:
14279 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14280 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14282 return tree_call_nonnegative_warnv_p (TREE_CODE (t),
14283 TREE_TYPE (t),
14284 get_callee_fndecl (t),
14285 arg0,
14286 arg1,
14287 strict_overflow_p);
14289 case COMPOUND_EXPR:
14290 case MODIFY_EXPR:
14291 case GIMPLE_MODIFY_STMT:
14292 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14293 strict_overflow_p);
14294 case BIND_EXPR:
14295 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14296 strict_overflow_p);
14297 case SAVE_EXPR:
14298 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14299 strict_overflow_p);
14301 default:
14302 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14303 TREE_TYPE (t));
14306 /* We don't know sign of `t', so be conservative and return false. */
14307 return false;
14310 /* Return true if T is known to be non-negative. If the return
14311 value is based on the assumption that signed overflow is undefined,
14312 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14313 *STRICT_OVERFLOW_P. */
14315 bool
14316 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14318 enum tree_code code;
14319 if (t == error_mark_node)
14320 return false;
14322 code = TREE_CODE (t);
14323 switch (TREE_CODE_CLASS (code))
14325 case tcc_binary:
14326 case tcc_comparison:
14327 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14328 TREE_TYPE (t),
14329 TREE_OPERAND (t, 0),
14330 TREE_OPERAND (t, 1),
14331 strict_overflow_p);
14333 case tcc_unary:
14334 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14335 TREE_TYPE (t),
14336 TREE_OPERAND (t, 0),
14337 strict_overflow_p);
14339 case tcc_constant:
14340 case tcc_declaration:
14341 case tcc_reference:
14342 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14344 default:
14345 break;
14348 switch (code)
14350 case TRUTH_AND_EXPR:
14351 case TRUTH_OR_EXPR:
14352 case TRUTH_XOR_EXPR:
14353 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14354 TREE_TYPE (t),
14355 TREE_OPERAND (t, 0),
14356 TREE_OPERAND (t, 1),
14357 strict_overflow_p);
14358 case TRUTH_NOT_EXPR:
14359 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14360 TREE_TYPE (t),
14361 TREE_OPERAND (t, 0),
14362 strict_overflow_p);
14364 case COND_EXPR:
14365 case CONSTRUCTOR:
14366 case OBJ_TYPE_REF:
14367 case ASSERT_EXPR:
14368 case ADDR_EXPR:
14369 case WITH_SIZE_EXPR:
14370 case EXC_PTR_EXPR:
14371 case SSA_NAME:
14372 case FILTER_EXPR:
14373 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14375 default:
14376 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14380 /* Return true if `t' is known to be non-negative. Handle warnings
14381 about undefined signed overflow. */
14383 bool
14384 tree_expr_nonnegative_p (tree t)
14386 bool ret, strict_overflow_p;
14388 strict_overflow_p = false;
14389 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14390 if (strict_overflow_p)
14391 fold_overflow_warning (("assuming signed overflow does not occur when "
14392 "determining that expression is always "
14393 "non-negative"),
14394 WARN_STRICT_OVERFLOW_MISC);
14395 return ret;
14399 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14400 For floating point we further ensure that T is not denormal.
14401 Similar logic is present in nonzero_address in rtlanal.h.
14403 If the return value is based on the assumption that signed overflow
14404 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14405 change *STRICT_OVERFLOW_P. */
14407 bool
14408 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14409 bool *strict_overflow_p)
14411 switch (code)
14413 case ABS_EXPR:
14414 return tree_expr_nonzero_warnv_p (op0,
14415 strict_overflow_p);
14417 case NOP_EXPR:
14419 tree inner_type = TREE_TYPE (op0);
14420 tree outer_type = type;
14422 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14423 && tree_expr_nonzero_warnv_p (op0,
14424 strict_overflow_p));
14426 break;
14428 case NON_LVALUE_EXPR:
14429 return tree_expr_nonzero_warnv_p (op0,
14430 strict_overflow_p);
14432 default:
14433 break;
14436 return false;
14439 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14440 For floating point we further ensure that T is not denormal.
14441 Similar logic is present in nonzero_address in rtlanal.h.
14443 If the return value is based on the assumption that signed overflow
14444 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14445 change *STRICT_OVERFLOW_P. */
14447 bool
14448 tree_binary_nonzero_warnv_p (enum tree_code code,
14449 tree type,
14450 tree op0,
14451 tree op1, bool *strict_overflow_p)
14453 bool sub_strict_overflow_p;
14454 switch (code)
14456 case POINTER_PLUS_EXPR:
14457 case PLUS_EXPR:
14458 if (TYPE_OVERFLOW_UNDEFINED (type))
14460 /* With the presence of negative values it is hard
14461 to say something. */
14462 sub_strict_overflow_p = false;
14463 if (!tree_expr_nonnegative_warnv_p (op0,
14464 &sub_strict_overflow_p)
14465 || !tree_expr_nonnegative_warnv_p (op1,
14466 &sub_strict_overflow_p))
14467 return false;
14468 /* One of operands must be positive and the other non-negative. */
14469 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14470 overflows, on a twos-complement machine the sum of two
14471 nonnegative numbers can never be zero. */
14472 return (tree_expr_nonzero_warnv_p (op0,
14473 strict_overflow_p)
14474 || tree_expr_nonzero_warnv_p (op1,
14475 strict_overflow_p));
14477 break;
14479 case MULT_EXPR:
14480 if (TYPE_OVERFLOW_UNDEFINED (type))
14482 if (tree_expr_nonzero_warnv_p (op0,
14483 strict_overflow_p)
14484 && tree_expr_nonzero_warnv_p (op1,
14485 strict_overflow_p))
14487 *strict_overflow_p = true;
14488 return true;
14491 break;
14493 case MIN_EXPR:
14494 sub_strict_overflow_p = false;
14495 if (tree_expr_nonzero_warnv_p (op0,
14496 &sub_strict_overflow_p)
14497 && tree_expr_nonzero_warnv_p (op1,
14498 &sub_strict_overflow_p))
14500 if (sub_strict_overflow_p)
14501 *strict_overflow_p = true;
14503 break;
14505 case MAX_EXPR:
14506 sub_strict_overflow_p = false;
14507 if (tree_expr_nonzero_warnv_p (op0,
14508 &sub_strict_overflow_p))
14510 if (sub_strict_overflow_p)
14511 *strict_overflow_p = true;
14513 /* When both operands are nonzero, then MAX must be too. */
14514 if (tree_expr_nonzero_warnv_p (op1,
14515 strict_overflow_p))
14516 return true;
14518 /* MAX where operand 0 is positive is positive. */
14519 return tree_expr_nonnegative_warnv_p (op0,
14520 strict_overflow_p);
14522 /* MAX where operand 1 is positive is positive. */
14523 else if (tree_expr_nonzero_warnv_p (op1,
14524 &sub_strict_overflow_p)
14525 && tree_expr_nonnegative_warnv_p (op1,
14526 &sub_strict_overflow_p))
14528 if (sub_strict_overflow_p)
14529 *strict_overflow_p = true;
14530 return true;
14532 break;
14534 case BIT_IOR_EXPR:
14535 return (tree_expr_nonzero_warnv_p (op1,
14536 strict_overflow_p)
14537 || tree_expr_nonzero_warnv_p (op0,
14538 strict_overflow_p));
14540 default:
14541 break;
14544 return false;
14547 /* Return true when T is an address and is known to be nonzero.
14548 For floating point we further ensure that T is not denormal.
14549 Similar logic is present in nonzero_address in rtlanal.h.
14551 If the return value is based on the assumption that signed overflow
14552 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14553 change *STRICT_OVERFLOW_P. */
14555 bool
14556 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14558 bool sub_strict_overflow_p;
14559 switch (TREE_CODE (t))
14561 case SSA_NAME:
14562 /* Query VRP to see if it has recorded any information about
14563 the range of this object. */
14564 return ssa_name_nonzero_p (t);
14566 case INTEGER_CST:
14567 return !integer_zerop (t);
14569 case ADDR_EXPR:
14571 tree base = get_base_address (TREE_OPERAND (t, 0));
14573 if (!base)
14574 return false;
14576 /* Weak declarations may link to NULL. */
14577 if (VAR_OR_FUNCTION_DECL_P (base))
14578 return !DECL_WEAK (base);
14580 /* Constants are never weak. */
14581 if (CONSTANT_CLASS_P (base))
14582 return true;
14584 return false;
14587 case COND_EXPR:
14588 sub_strict_overflow_p = false;
14589 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14590 &sub_strict_overflow_p)
14591 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14592 &sub_strict_overflow_p))
14594 if (sub_strict_overflow_p)
14595 *strict_overflow_p = true;
14596 return true;
14598 break;
14600 default:
14601 break;
14603 return false;
14606 /* Return true when T is an address and is known to be nonzero.
14607 For floating point we further ensure that T is not denormal.
14608 Similar logic is present in nonzero_address in rtlanal.h.
14610 If the return value is based on the assumption that signed overflow
14611 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14612 change *STRICT_OVERFLOW_P. */
14614 bool
14615 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14617 tree type = TREE_TYPE (t);
14618 enum tree_code code;
14620 /* Doing something useful for floating point would need more work. */
14621 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14622 return false;
14624 code = TREE_CODE (t);
14625 switch (TREE_CODE_CLASS (code))
14627 case tcc_unary:
14628 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14629 strict_overflow_p);
14630 case tcc_binary:
14631 case tcc_comparison:
14632 return tree_binary_nonzero_warnv_p (code, type,
14633 TREE_OPERAND (t, 0),
14634 TREE_OPERAND (t, 1),
14635 strict_overflow_p);
14636 case tcc_constant:
14637 case tcc_declaration:
14638 case tcc_reference:
14639 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14641 default:
14642 break;
14645 switch (code)
14647 case TRUTH_NOT_EXPR:
14648 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14649 strict_overflow_p);
14651 case TRUTH_AND_EXPR:
14652 case TRUTH_OR_EXPR:
14653 case TRUTH_XOR_EXPR:
14654 return tree_binary_nonzero_warnv_p (code, type,
14655 TREE_OPERAND (t, 0),
14656 TREE_OPERAND (t, 1),
14657 strict_overflow_p);
14659 case COND_EXPR:
14660 case CONSTRUCTOR:
14661 case OBJ_TYPE_REF:
14662 case ASSERT_EXPR:
14663 case ADDR_EXPR:
14664 case WITH_SIZE_EXPR:
14665 case EXC_PTR_EXPR:
14666 case SSA_NAME:
14667 case FILTER_EXPR:
14668 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14670 case COMPOUND_EXPR:
14671 case MODIFY_EXPR:
14672 case GIMPLE_MODIFY_STMT:
14673 case BIND_EXPR:
14674 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14675 strict_overflow_p);
14677 case SAVE_EXPR:
14678 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14679 strict_overflow_p);
14681 case CALL_EXPR:
14682 return alloca_call_p (t);
14684 default:
14685 break;
14687 return false;
14690 /* Return true when T is an address and is known to be nonzero.
14691 Handle warnings about undefined signed overflow. */
14693 bool
14694 tree_expr_nonzero_p (tree t)
14696 bool ret, strict_overflow_p;
14698 strict_overflow_p = false;
14699 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14700 if (strict_overflow_p)
14701 fold_overflow_warning (("assuming signed overflow does not occur when "
14702 "determining that expression is always "
14703 "non-zero"),
14704 WARN_STRICT_OVERFLOW_MISC);
14705 return ret;
14708 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14709 attempt to fold the expression to a constant without modifying TYPE,
14710 OP0 or OP1.
14712 If the expression could be simplified to a constant, then return
14713 the constant. If the expression would not be simplified to a
14714 constant, then return NULL_TREE. */
14716 tree
14717 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14719 tree tem = fold_binary (code, type, op0, op1);
14720 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14723 /* Given the components of a unary expression CODE, TYPE and OP0,
14724 attempt to fold the expression to a constant without modifying
14725 TYPE or OP0.
14727 If the expression could be simplified to a constant, then return
14728 the constant. If the expression would not be simplified to a
14729 constant, then return NULL_TREE. */
14731 tree
14732 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14734 tree tem = fold_unary (code, type, op0);
14735 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14738 /* If EXP represents referencing an element in a constant string
14739 (either via pointer arithmetic or array indexing), return the
14740 tree representing the value accessed, otherwise return NULL. */
14742 tree
14743 fold_read_from_constant_string (tree exp)
14745 if ((TREE_CODE (exp) == INDIRECT_REF
14746 || TREE_CODE (exp) == ARRAY_REF)
14747 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14749 tree exp1 = TREE_OPERAND (exp, 0);
14750 tree index;
14751 tree string;
14753 if (TREE_CODE (exp) == INDIRECT_REF)
14754 string = string_constant (exp1, &index);
14755 else
14757 tree low_bound = array_ref_low_bound (exp);
14758 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14760 /* Optimize the special-case of a zero lower bound.
14762 We convert the low_bound to sizetype to avoid some problems
14763 with constant folding. (E.g. suppose the lower bound is 1,
14764 and its mode is QI. Without the conversion,l (ARRAY
14765 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14766 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14767 if (! integer_zerop (low_bound))
14768 index = size_diffop (index, fold_convert (sizetype, low_bound));
14770 string = exp1;
14773 if (string
14774 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14775 && TREE_CODE (string) == STRING_CST
14776 && TREE_CODE (index) == INTEGER_CST
14777 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14778 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14779 == MODE_INT)
14780 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14781 return build_int_cst_type (TREE_TYPE (exp),
14782 (TREE_STRING_POINTER (string)
14783 [TREE_INT_CST_LOW (index)]));
14785 return NULL;
14788 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14789 an integer constant, real, or fixed-point constant.
14791 TYPE is the type of the result. */
14793 static tree
14794 fold_negate_const (tree arg0, tree type)
14796 tree t = NULL_TREE;
14798 switch (TREE_CODE (arg0))
14800 case INTEGER_CST:
14802 unsigned HOST_WIDE_INT low;
14803 HOST_WIDE_INT high;
14804 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14805 TREE_INT_CST_HIGH (arg0),
14806 &low, &high);
14807 t = force_fit_type_double (type, low, high, 1,
14808 (overflow | TREE_OVERFLOW (arg0))
14809 && !TYPE_UNSIGNED (type));
14810 break;
14813 case REAL_CST:
14814 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14815 break;
14817 case FIXED_CST:
14819 FIXED_VALUE_TYPE f;
14820 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14821 &(TREE_FIXED_CST (arg0)), NULL,
14822 TYPE_SATURATING (type));
14823 t = build_fixed (type, f);
14824 /* Propagate overflow flags. */
14825 if (overflow_p | TREE_OVERFLOW (arg0))
14827 TREE_OVERFLOW (t) = 1;
14828 TREE_CONSTANT_OVERFLOW (t) = 1;
14830 else if (TREE_CONSTANT_OVERFLOW (arg0))
14831 TREE_CONSTANT_OVERFLOW (t) = 1;
14832 break;
14835 default:
14836 gcc_unreachable ();
14839 return t;
14842 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14843 an integer constant or real constant.
14845 TYPE is the type of the result. */
14847 tree
14848 fold_abs_const (tree arg0, tree type)
14850 tree t = NULL_TREE;
14852 switch (TREE_CODE (arg0))
14854 case INTEGER_CST:
14855 /* If the value is unsigned, then the absolute value is
14856 the same as the ordinary value. */
14857 if (TYPE_UNSIGNED (type))
14858 t = arg0;
14859 /* Similarly, if the value is non-negative. */
14860 else if (INT_CST_LT (integer_minus_one_node, arg0))
14861 t = arg0;
14862 /* If the value is negative, then the absolute value is
14863 its negation. */
14864 else
14866 unsigned HOST_WIDE_INT low;
14867 HOST_WIDE_INT high;
14868 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14869 TREE_INT_CST_HIGH (arg0),
14870 &low, &high);
14871 t = force_fit_type_double (type, low, high, -1,
14872 overflow | TREE_OVERFLOW (arg0));
14874 break;
14876 case REAL_CST:
14877 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14878 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14879 else
14880 t = arg0;
14881 break;
14883 default:
14884 gcc_unreachable ();
14887 return t;
14890 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14891 constant. TYPE is the type of the result. */
14893 static tree
14894 fold_not_const (tree arg0, tree type)
14896 tree t = NULL_TREE;
14898 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14900 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14901 ~TREE_INT_CST_HIGH (arg0), 0,
14902 TREE_OVERFLOW (arg0));
14904 return t;
14907 /* Given CODE, a relational operator, the target type, TYPE and two
14908 constant operands OP0 and OP1, return the result of the
14909 relational operation. If the result is not a compile time
14910 constant, then return NULL_TREE. */
14912 static tree
14913 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14915 int result, invert;
14917 /* From here on, the only cases we handle are when the result is
14918 known to be a constant. */
14920 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14922 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14923 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14925 /* Handle the cases where either operand is a NaN. */
14926 if (real_isnan (c0) || real_isnan (c1))
14928 switch (code)
14930 case EQ_EXPR:
14931 case ORDERED_EXPR:
14932 result = 0;
14933 break;
14935 case NE_EXPR:
14936 case UNORDERED_EXPR:
14937 case UNLT_EXPR:
14938 case UNLE_EXPR:
14939 case UNGT_EXPR:
14940 case UNGE_EXPR:
14941 case UNEQ_EXPR:
14942 result = 1;
14943 break;
14945 case LT_EXPR:
14946 case LE_EXPR:
14947 case GT_EXPR:
14948 case GE_EXPR:
14949 case LTGT_EXPR:
14950 if (flag_trapping_math)
14951 return NULL_TREE;
14952 result = 0;
14953 break;
14955 default:
14956 gcc_unreachable ();
14959 return constant_boolean_node (result, type);
14962 return constant_boolean_node (real_compare (code, c0, c1), type);
14965 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14967 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14968 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14969 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14972 /* Handle equality/inequality of complex constants. */
14973 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14975 tree rcond = fold_relational_const (code, type,
14976 TREE_REALPART (op0),
14977 TREE_REALPART (op1));
14978 tree icond = fold_relational_const (code, type,
14979 TREE_IMAGPART (op0),
14980 TREE_IMAGPART (op1));
14981 if (code == EQ_EXPR)
14982 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14983 else if (code == NE_EXPR)
14984 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14985 else
14986 return NULL_TREE;
14989 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14991 To compute GT, swap the arguments and do LT.
14992 To compute GE, do LT and invert the result.
14993 To compute LE, swap the arguments, do LT and invert the result.
14994 To compute NE, do EQ and invert the result.
14996 Therefore, the code below must handle only EQ and LT. */
14998 if (code == LE_EXPR || code == GT_EXPR)
15000 tree tem = op0;
15001 op0 = op1;
15002 op1 = tem;
15003 code = swap_tree_comparison (code);
15006 /* Note that it is safe to invert for real values here because we
15007 have already handled the one case that it matters. */
15009 invert = 0;
15010 if (code == NE_EXPR || code == GE_EXPR)
15012 invert = 1;
15013 code = invert_tree_comparison (code, false);
15016 /* Compute a result for LT or EQ if args permit;
15017 Otherwise return T. */
15018 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15020 if (code == EQ_EXPR)
15021 result = tree_int_cst_equal (op0, op1);
15022 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15023 result = INT_CST_LT_UNSIGNED (op0, op1);
15024 else
15025 result = INT_CST_LT (op0, op1);
15027 else
15028 return NULL_TREE;
15030 if (invert)
15031 result ^= 1;
15032 return constant_boolean_node (result, type);
15035 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15036 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15037 itself. */
15039 tree
15040 fold_build_cleanup_point_expr (tree type, tree expr)
15042 /* If the expression does not have side effects then we don't have to wrap
15043 it with a cleanup point expression. */
15044 if (!TREE_SIDE_EFFECTS (expr))
15045 return expr;
15047 /* If the expression is a return, check to see if the expression inside the
15048 return has no side effects or the right hand side of the modify expression
15049 inside the return. If either don't have side effects set we don't need to
15050 wrap the expression in a cleanup point expression. Note we don't check the
15051 left hand side of the modify because it should always be a return decl. */
15052 if (TREE_CODE (expr) == RETURN_EXPR)
15054 tree op = TREE_OPERAND (expr, 0);
15055 if (!op || !TREE_SIDE_EFFECTS (op))
15056 return expr;
15057 op = TREE_OPERAND (op, 1);
15058 if (!TREE_SIDE_EFFECTS (op))
15059 return expr;
15062 return build1 (CLEANUP_POINT_EXPR, type, expr);
15065 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15066 of an indirection through OP0, or NULL_TREE if no simplification is
15067 possible. */
15069 tree
15070 fold_indirect_ref_1 (tree type, tree op0)
15072 tree sub = op0;
15073 tree subtype;
15075 STRIP_NOPS (sub);
15076 subtype = TREE_TYPE (sub);
15077 if (!POINTER_TYPE_P (subtype))
15078 return NULL_TREE;
15080 if (TREE_CODE (sub) == ADDR_EXPR)
15082 tree op = TREE_OPERAND (sub, 0);
15083 tree optype = TREE_TYPE (op);
15084 /* *&CONST_DECL -> to the value of the const decl. */
15085 if (TREE_CODE (op) == CONST_DECL)
15086 return DECL_INITIAL (op);
15087 /* *&p => p; make sure to handle *&"str"[cst] here. */
15088 if (type == optype)
15090 tree fop = fold_read_from_constant_string (op);
15091 if (fop)
15092 return fop;
15093 else
15094 return op;
15096 /* *(foo *)&fooarray => fooarray[0] */
15097 else if (TREE_CODE (optype) == ARRAY_TYPE
15098 && type == TREE_TYPE (optype))
15100 tree type_domain = TYPE_DOMAIN (optype);
15101 tree min_val = size_zero_node;
15102 if (type_domain && TYPE_MIN_VALUE (type_domain))
15103 min_val = TYPE_MIN_VALUE (type_domain);
15104 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15106 /* *(foo *)&complexfoo => __real__ complexfoo */
15107 else if (TREE_CODE (optype) == COMPLEX_TYPE
15108 && type == TREE_TYPE (optype))
15109 return fold_build1 (REALPART_EXPR, type, op);
15110 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15111 else if (TREE_CODE (optype) == VECTOR_TYPE
15112 && type == TREE_TYPE (optype))
15114 tree part_width = TYPE_SIZE (type);
15115 tree index = bitsize_int (0);
15116 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15120 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15121 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15122 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15124 tree op00 = TREE_OPERAND (sub, 0);
15125 tree op01 = TREE_OPERAND (sub, 1);
15126 tree op00type;
15128 STRIP_NOPS (op00);
15129 op00type = TREE_TYPE (op00);
15130 if (TREE_CODE (op00) == ADDR_EXPR
15131 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15132 && type == TREE_TYPE (TREE_TYPE (op00type)))
15134 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15135 tree part_width = TYPE_SIZE (type);
15136 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15137 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15138 tree index = bitsize_int (indexi);
15140 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15141 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15142 part_width, index);
15148 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15149 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15150 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15152 tree op00 = TREE_OPERAND (sub, 0);
15153 tree op01 = TREE_OPERAND (sub, 1);
15154 tree op00type;
15156 STRIP_NOPS (op00);
15157 op00type = TREE_TYPE (op00);
15158 if (TREE_CODE (op00) == ADDR_EXPR
15159 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15160 && type == TREE_TYPE (TREE_TYPE (op00type)))
15162 tree size = TYPE_SIZE_UNIT (type);
15163 if (tree_int_cst_equal (size, op01))
15164 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15168 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15169 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15170 && type == TREE_TYPE (TREE_TYPE (subtype)))
15172 tree type_domain;
15173 tree min_val = size_zero_node;
15174 sub = build_fold_indirect_ref (sub);
15175 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15176 if (type_domain && TYPE_MIN_VALUE (type_domain))
15177 min_val = TYPE_MIN_VALUE (type_domain);
15178 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15181 return NULL_TREE;
15184 /* Builds an expression for an indirection through T, simplifying some
15185 cases. */
15187 tree
15188 build_fold_indirect_ref (tree t)
15190 tree type = TREE_TYPE (TREE_TYPE (t));
15191 tree sub = fold_indirect_ref_1 (type, t);
15193 if (sub)
15194 return sub;
15195 else
15196 return build1 (INDIRECT_REF, type, t);
15199 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15201 tree
15202 fold_indirect_ref (tree t)
15204 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15206 if (sub)
15207 return sub;
15208 else
15209 return t;
15212 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15213 whose result is ignored. The type of the returned tree need not be
15214 the same as the original expression. */
15216 tree
15217 fold_ignored_result (tree t)
15219 if (!TREE_SIDE_EFFECTS (t))
15220 return integer_zero_node;
15222 for (;;)
15223 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15225 case tcc_unary:
15226 t = TREE_OPERAND (t, 0);
15227 break;
15229 case tcc_binary:
15230 case tcc_comparison:
15231 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15232 t = TREE_OPERAND (t, 0);
15233 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15234 t = TREE_OPERAND (t, 1);
15235 else
15236 return t;
15237 break;
15239 case tcc_expression:
15240 switch (TREE_CODE (t))
15242 case COMPOUND_EXPR:
15243 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15244 return t;
15245 t = TREE_OPERAND (t, 0);
15246 break;
15248 case COND_EXPR:
15249 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15250 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15251 return t;
15252 t = TREE_OPERAND (t, 0);
15253 break;
15255 default:
15256 return t;
15258 break;
15260 default:
15261 return t;
15265 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15266 This can only be applied to objects of a sizetype. */
15268 tree
15269 round_up (tree value, int divisor)
15271 tree div = NULL_TREE;
15273 gcc_assert (divisor > 0);
15274 if (divisor == 1)
15275 return value;
15277 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15278 have to do anything. Only do this when we are not given a const,
15279 because in that case, this check is more expensive than just
15280 doing it. */
15281 if (TREE_CODE (value) != INTEGER_CST)
15283 div = build_int_cst (TREE_TYPE (value), divisor);
15285 if (multiple_of_p (TREE_TYPE (value), value, div))
15286 return value;
15289 /* If divisor is a power of two, simplify this to bit manipulation. */
15290 if (divisor == (divisor & -divisor))
15292 if (TREE_CODE (value) == INTEGER_CST)
15294 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15295 unsigned HOST_WIDE_INT high;
15296 bool overflow_p;
15298 if ((low & (divisor - 1)) == 0)
15299 return value;
15301 overflow_p = TREE_OVERFLOW (value);
15302 high = TREE_INT_CST_HIGH (value);
15303 low &= ~(divisor - 1);
15304 low += divisor;
15305 if (low == 0)
15307 high++;
15308 if (high == 0)
15309 overflow_p = true;
15312 return force_fit_type_double (TREE_TYPE (value), low, high,
15313 -1, overflow_p);
15315 else
15317 tree t;
15319 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15320 value = size_binop (PLUS_EXPR, value, t);
15321 t = build_int_cst (TREE_TYPE (value), -divisor);
15322 value = size_binop (BIT_AND_EXPR, value, t);
15325 else
15327 if (!div)
15328 div = build_int_cst (TREE_TYPE (value), divisor);
15329 value = size_binop (CEIL_DIV_EXPR, value, div);
15330 value = size_binop (MULT_EXPR, value, div);
15333 return value;
15336 /* Likewise, but round down. */
15338 tree
15339 round_down (tree value, int divisor)
15341 tree div = NULL_TREE;
15343 gcc_assert (divisor > 0);
15344 if (divisor == 1)
15345 return value;
15347 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15348 have to do anything. Only do this when we are not given a const,
15349 because in that case, this check is more expensive than just
15350 doing it. */
15351 if (TREE_CODE (value) != INTEGER_CST)
15353 div = build_int_cst (TREE_TYPE (value), divisor);
15355 if (multiple_of_p (TREE_TYPE (value), value, div))
15356 return value;
15359 /* If divisor is a power of two, simplify this to bit manipulation. */
15360 if (divisor == (divisor & -divisor))
15362 tree t;
15364 t = build_int_cst (TREE_TYPE (value), -divisor);
15365 value = size_binop (BIT_AND_EXPR, value, t);
15367 else
15369 if (!div)
15370 div = build_int_cst (TREE_TYPE (value), divisor);
15371 value = size_binop (FLOOR_DIV_EXPR, value, div);
15372 value = size_binop (MULT_EXPR, value, div);
15375 return value;
15378 /* Returns the pointer to the base of the object addressed by EXP and
15379 extracts the information about the offset of the access, storing it
15380 to PBITPOS and POFFSET. */
15382 static tree
15383 split_address_to_core_and_offset (tree exp,
15384 HOST_WIDE_INT *pbitpos, tree *poffset)
15386 tree core;
15387 enum machine_mode mode;
15388 int unsignedp, volatilep;
15389 HOST_WIDE_INT bitsize;
15391 if (TREE_CODE (exp) == ADDR_EXPR)
15393 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15394 poffset, &mode, &unsignedp, &volatilep,
15395 false);
15396 core = fold_addr_expr (core);
15398 else
15400 core = exp;
15401 *pbitpos = 0;
15402 *poffset = NULL_TREE;
15405 return core;
15408 /* Returns true if addresses of E1 and E2 differ by a constant, false
15409 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15411 bool
15412 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15414 tree core1, core2;
15415 HOST_WIDE_INT bitpos1, bitpos2;
15416 tree toffset1, toffset2, tdiff, type;
15418 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15419 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15421 if (bitpos1 % BITS_PER_UNIT != 0
15422 || bitpos2 % BITS_PER_UNIT != 0
15423 || !operand_equal_p (core1, core2, 0))
15424 return false;
15426 if (toffset1 && toffset2)
15428 type = TREE_TYPE (toffset1);
15429 if (type != TREE_TYPE (toffset2))
15430 toffset2 = fold_convert (type, toffset2);
15432 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15433 if (!cst_and_fits_in_hwi (tdiff))
15434 return false;
15436 *diff = int_cst_value (tdiff);
15438 else if (toffset1 || toffset2)
15440 /* If only one of the offsets is non-constant, the difference cannot
15441 be a constant. */
15442 return false;
15444 else
15445 *diff = 0;
15447 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15448 return true;
15451 /* Simplify the floating point expression EXP when the sign of the
15452 result is not significant. Return NULL_TREE if no simplification
15453 is possible. */
15455 tree
15456 fold_strip_sign_ops (tree exp)
15458 tree arg0, arg1;
15460 switch (TREE_CODE (exp))
15462 case ABS_EXPR:
15463 case NEGATE_EXPR:
15464 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15465 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15467 case MULT_EXPR:
15468 case RDIV_EXPR:
15469 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15470 return NULL_TREE;
15471 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15472 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15473 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15474 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15475 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15476 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15477 break;
15479 case COMPOUND_EXPR:
15480 arg0 = TREE_OPERAND (exp, 0);
15481 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15482 if (arg1)
15483 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15484 break;
15486 case COND_EXPR:
15487 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15488 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15489 if (arg0 || arg1)
15490 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15491 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15492 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15493 break;
15495 case CALL_EXPR:
15497 const enum built_in_function fcode = builtin_mathfn_code (exp);
15498 switch (fcode)
15500 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15501 /* Strip copysign function call, return the 1st argument. */
15502 arg0 = CALL_EXPR_ARG (exp, 0);
15503 arg1 = CALL_EXPR_ARG (exp, 1);
15504 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15506 default:
15507 /* Strip sign ops from the argument of "odd" math functions. */
15508 if (negate_mathfn_p (fcode))
15510 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15511 if (arg0)
15512 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15514 break;
15517 break;
15519 default:
15520 break;
15522 return NULL_TREE;