2008-07-07 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / fold-const.c
blobe1e04bb625da9b11ed9fa363c002a31833c5a55a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
70 otherwise. */
71 int folding_initializer = 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
77 COMPCODE_FALSE = 0,
78 COMPCODE_LT = 1,
79 COMPCODE_EQ = 2,
80 COMPCODE_LE = 3,
81 COMPCODE_GT = 4,
82 COMPCODE_LTGT = 5,
83 COMPCODE_GE = 6,
84 COMPCODE_ORD = 7,
85 COMPCODE_UNORD = 8,
86 COMPCODE_UNLT = 9,
87 COMPCODE_UNEQ = 10,
88 COMPCODE_UNLE = 11,
89 COMPCODE_UNGT = 12,
90 COMPCODE_NE = 13,
91 COMPCODE_UNGE = 14,
92 COMPCODE_TRUE = 15
95 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
96 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
97 static bool negate_mathfn_p (enum built_in_function);
98 static bool negate_expr_p (tree);
99 static tree negate_expr (tree);
100 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
101 static tree associate_trees (tree, tree, enum tree_code, tree);
102 static tree const_binop (enum tree_code, tree, tree, int);
103 static enum comparison_code comparison_to_compcode (enum tree_code);
104 static enum tree_code compcode_to_comparison (enum comparison_code);
105 static tree combine_comparisons (enum tree_code, enum tree_code,
106 enum tree_code, tree, tree, tree);
107 static int truth_value_p (enum tree_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *, bool *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136 tree, tree, tree);
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 #define LOWPART(x) \
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
170 static void
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 static void
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
185 HOST_WIDE_INT *hi)
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
198 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 unsigned HOST_WIDE_INT low0 = l1;
201 HOST_WIDE_INT high0 = h1;
202 unsigned int prec;
203 int sign_extended_type;
205 if (POINTER_TYPE_P (type)
206 || TREE_CODE (type) == OFFSET_TYPE)
207 prec = POINTER_SIZE;
208 else
209 prec = TYPE_PRECISION (type);
211 /* Size types *are* sign extended. */
212 sign_extended_type = (!TYPE_UNSIGNED (type)
213 || (TREE_CODE (type) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
221 else
223 h1 = 0;
224 if (prec < HOST_BITS_PER_WIDE_INT)
225 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type)
230 /* No sign extension */;
231 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
232 /* Correct width already. */;
233 else if (prec > HOST_BITS_PER_WIDE_INT)
235 /* Sign extend top half? */
236 if (h1 & ((unsigned HOST_WIDE_INT)1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 else if (prec == HOST_BITS_PER_WIDE_INT)
242 if ((HOST_WIDE_INT)l1 < 0)
243 h1 = -1;
245 else
247 /* Sign extend bottom half? */
248 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250 h1 = -1;
251 l1 |= (HOST_WIDE_INT)(-1) << prec;
255 *lv = l1;
256 *hv = h1;
258 /* If the value didn't fit, signal overflow. */
259 return l1 != low0 || h1 != high0;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
277 tree
278 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
279 HOST_WIDE_INT high, int overflowable,
280 bool overflowed)
282 int sign_extended_type;
283 bool overflow;
285 /* Size types *are* sign extended. */
286 sign_extended_type = (!TYPE_UNSIGNED (type)
287 || (TREE_CODE (type) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type)));
290 overflow = fit_double_type (low, high, &low, &high, type);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed || overflow)
295 if (overflowed
296 || overflowable < 0
297 || (overflowable > 0 && sign_extended_type))
299 tree t = make_node (INTEGER_CST);
300 TREE_INT_CST_LOW (t) = low;
301 TREE_INT_CST_HIGH (t) = high;
302 TREE_TYPE (t) = type;
303 TREE_OVERFLOW (t) = 1;
304 return t;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type, low, high);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
322 bool unsigned_p)
324 unsigned HOST_WIDE_INT l;
325 HOST_WIDE_INT h;
327 l = l1 + l2;
328 h = h1 + h2 + (l < l1);
330 *lv = l;
331 *hv = h;
333 if (unsigned_p)
334 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
335 else
336 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 if (l1 == 0)
350 *lv = 0;
351 *hv = - h1;
352 return (*hv & h1) < 0;
354 else
356 *lv = -l1;
357 *hv = ~h1;
358 return 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
372 bool unsigned_p)
374 HOST_WIDE_INT arg1[4];
375 HOST_WIDE_INT arg2[4];
376 HOST_WIDE_INT prod[4 * 2];
377 unsigned HOST_WIDE_INT carry;
378 int i, j, k;
379 unsigned HOST_WIDE_INT toplow, neglow;
380 HOST_WIDE_INT tophigh, neghigh;
382 encode (arg1, l1, h1);
383 encode (arg2, l2, h2);
385 memset (prod, 0, sizeof prod);
387 for (i = 0; i < 4; i++)
389 carry = 0;
390 for (j = 0; j < 4; j++)
392 k = i + j;
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry += arg1[i] * arg2[j];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
396 carry += prod[k];
397 prod[k] = LOWPART (carry);
398 carry = HIGHPART (carry);
400 prod[i + 4] = carry;
403 decode (prod, lv, hv);
404 decode (prod + 4, &toplow, &tophigh);
406 /* Unsigned overflow is immediate. */
407 if (unsigned_p)
408 return (toplow | tophigh) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
412 if (h1 < 0)
414 neg_double (l2, h2, &neglow, &neghigh);
415 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
417 if (h2 < 0)
419 neg_double (l1, h1, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431 void
432 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
433 HOST_WIDE_INT count, unsigned int prec,
434 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 unsigned HOST_WIDE_INT signmask;
438 if (count < 0)
440 rshift_double (l1, h1, -count, prec, lv, hv, arith);
441 return;
444 if (SHIFT_COUNT_TRUNCATED)
445 count %= prec;
447 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
451 *hv = 0;
452 *lv = 0;
454 else if (count >= HOST_BITS_PER_WIDE_INT)
456 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
457 *lv = 0;
459 else
461 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
462 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
463 *lv = l1 << count;
466 /* Sign extend all bits that are beyond the precision. */
468 signmask = -((prec > HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT) *hv
470 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
471 : (*lv >> (prec - 1))) & 1);
473 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
478 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
480 else
482 *hv = signmask;
483 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
484 *lv |= signmask << prec;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
493 void
494 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
495 HOST_WIDE_INT count, unsigned int prec,
496 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
497 int arith)
499 unsigned HOST_WIDE_INT signmask;
501 signmask = (arith
502 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
503 : 0);
505 if (SHIFT_COUNT_TRUNCATED)
506 count %= prec;
508 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
512 *hv = 0;
513 *lv = 0;
515 else if (count >= HOST_BITS_PER_WIDE_INT)
517 *hv = 0;
518 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
520 else
522 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
523 *lv = ((l1 >> count)
524 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count >= (HOST_WIDE_INT)prec)
531 *hv = signmask;
532 *lv = signmask;
534 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
539 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
541 else
543 *hv = signmask;
544 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
545 *lv |= signmask << (prec - count);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
554 void
555 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
556 HOST_WIDE_INT count, unsigned int prec,
557 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 unsigned HOST_WIDE_INT s1l, s2l;
560 HOST_WIDE_INT s1h, s2h;
562 count %= prec;
563 if (count < 0)
564 count += prec;
566 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
567 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
568 *lv = s1l | s2l;
569 *hv = s1h | s2h;
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
576 void
577 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
578 HOST_WIDE_INT count, unsigned int prec,
579 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 unsigned HOST_WIDE_INT s1l, s2l;
582 HOST_WIDE_INT s1h, s2h;
584 count %= prec;
585 if (count < 0)
586 count += prec;
588 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
589 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
590 *lv = s1l | s2l;
591 *hv = s1h | s2h;
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
598 or EXACT_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code, int uns,
605 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig,
607 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig,
609 unsigned HOST_WIDE_INT *lquo,
610 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
611 HOST_WIDE_INT *hrem)
613 int quo_neg = 0;
614 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den[4], quo[4];
616 int i, j;
617 unsigned HOST_WIDE_INT work;
618 unsigned HOST_WIDE_INT carry = 0;
619 unsigned HOST_WIDE_INT lnum = lnum_orig;
620 HOST_WIDE_INT hnum = hnum_orig;
621 unsigned HOST_WIDE_INT lden = lden_orig;
622 HOST_WIDE_INT hden = hden_orig;
623 int overflow = 0;
625 if (hden == 0 && lden == 0)
626 overflow = 1, lden = 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
629 if (!uns)
631 if (hnum < 0)
633 quo_neg = ~ quo_neg;
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum, hnum, &lnum, &hnum)
636 && ((HOST_WIDE_INT) lden & hden) == -1)
637 overflow = 1;
639 if (hden < 0)
641 quo_neg = ~ quo_neg;
642 neg_double (lden, hden, &lden, &hden);
646 if (hnum == 0 && hden == 0)
647 { /* single precision */
648 *hquo = *hrem = 0;
649 /* This unsigned division rounds toward zero. */
650 *lquo = lnum / lden;
651 goto finish_up;
654 if (hnum == 0)
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
657 *hquo = *lquo = 0;
658 *hrem = hnum;
659 *lrem = lnum;
660 goto finish_up;
663 memset (quo, 0, sizeof quo);
665 memset (num, 0, sizeof num); /* to zero 9th element */
666 memset (den, 0, sizeof den);
668 encode (num, lnum, hnum);
669 encode (den, lden, hden);
671 /* Special code for when the divisor < BASE. */
672 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 /* hnum != 0 already checked. */
675 for (i = 4 - 1; i >= 0; i--)
677 work = num[i] + carry * BASE;
678 quo[i] = work / lden;
679 carry = work % lden;
682 else
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig, den_hi_sig;
687 unsigned HOST_WIDE_INT quo_est, scale;
689 /* Find the highest nonzero divisor digit. */
690 for (i = 4 - 1;; i--)
691 if (den[i] != 0)
693 den_hi_sig = i;
694 break;
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale = BASE / (den[den_hi_sig] + 1);
701 if (scale > 1)
702 { /* scale divisor and dividend */
703 carry = 0;
704 for (i = 0; i <= 4 - 1; i++)
706 work = (num[i] * scale) + carry;
707 num[i] = LOWPART (work);
708 carry = HIGHPART (work);
711 num[4] = carry;
712 carry = 0;
713 for (i = 0; i <= 4 - 1; i++)
715 work = (den[i] * scale) + carry;
716 den[i] = LOWPART (work);
717 carry = HIGHPART (work);
718 if (den[i] != 0) den_hi_sig = i;
722 num_hi_sig = 4;
724 /* Main loop */
725 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp;
732 num_hi_sig = i + den_hi_sig + 1;
733 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
734 if (num[num_hi_sig] != den[den_hi_sig])
735 quo_est = work / den[den_hi_sig];
736 else
737 quo_est = BASE - 1;
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp = work - quo_est * den[den_hi_sig];
741 if (tmp < BASE
742 && (den[den_hi_sig - 1] * quo_est
743 > (tmp * BASE + num[num_hi_sig - 2])))
744 quo_est--;
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
750 carry = 0;
751 for (j = 0; j <= den_hi_sig; j++)
753 work = quo_est * den[j] + carry;
754 carry = HIGHPART (work);
755 work = num[i + j] - LOWPART (work);
756 num[i + j] = LOWPART (work);
757 carry += HIGHPART (work) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
764 quo_est--;
765 carry = 0; /* add divisor back in */
766 for (j = 0; j <= den_hi_sig; j++)
768 work = num[i + j] + den[j] + carry;
769 carry = HIGHPART (work);
770 num[i + j] = LOWPART (work);
773 num [num_hi_sig] += carry;
776 /* Store the quotient digit. */
777 quo[i] = quo_est;
781 decode (quo, lquo, hquo);
783 finish_up:
784 /* If result is negative, make it so. */
785 if (quo_neg)
786 neg_double (*lquo, *hquo, lquo, hquo);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
790 neg_double (*lrem, *hrem, lrem, hrem);
791 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
793 switch (code)
795 case TRUNC_DIV_EXPR:
796 case TRUNC_MOD_EXPR: /* round toward zero */
797 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
798 return overflow;
800 case FLOOR_DIV_EXPR:
801 case FLOOR_MOD_EXPR: /* round toward negative infinity */
802 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
804 /* quo = quo - 1; */
805 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
806 lquo, hquo);
808 else
809 return overflow;
810 break;
812 case CEIL_DIV_EXPR:
813 case CEIL_MOD_EXPR: /* round toward positive infinity */
814 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
817 lquo, hquo);
819 else
820 return overflow;
821 break;
823 case ROUND_DIV_EXPR:
824 case ROUND_MOD_EXPR: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem = *lrem;
827 HOST_WIDE_INT habs_rem = *hrem;
828 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
829 HOST_WIDE_INT habs_den = hden, htwice;
831 /* Get absolute values. */
832 if (*hrem < 0)
833 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
834 if (hden < 0)
835 neg_double (lden, hden, &labs_den, &habs_den);
837 /* If (2 * abs (lrem) >= abs (lden)) */
838 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
839 labs_rem, habs_rem, &ltwice, &htwice);
841 if (((unsigned HOST_WIDE_INT) habs_den
842 < (unsigned HOST_WIDE_INT) htwice)
843 || (((unsigned HOST_WIDE_INT) habs_den
844 == (unsigned HOST_WIDE_INT) htwice)
845 && (labs_den < ltwice)))
847 if (*hquo < 0)
848 /* quo = quo - 1; */
849 add_double (*lquo, *hquo,
850 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
851 else
852 /* quo = quo + 1; */
853 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
854 lquo, hquo);
856 else
857 return overflow;
859 break;
861 default:
862 gcc_unreachable ();
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
867 neg_double (*lrem, *hrem, lrem, hrem);
868 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
869 return overflow;
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
876 static tree
877 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 unsigned HOST_WIDE_INT int1l, int2l;
880 HOST_WIDE_INT int1h, int2h;
881 unsigned HOST_WIDE_INT quol, reml;
882 HOST_WIDE_INT quoh, remh;
883 tree type = TREE_TYPE (arg1);
884 int uns = TYPE_UNSIGNED (type);
886 int1l = TREE_INT_CST_LOW (arg1);
887 int1h = TREE_INT_CST_HIGH (arg1);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type))
892 uns = false;
893 type = signed_type_for (type);
894 fit_double_type (int1l, int1h, &int1l, &int1h,
895 type);
897 else
898 fit_double_type (int1l, int1h, &int1l, &int1h, type);
899 int2l = TREE_INT_CST_LOW (arg2);
900 int2h = TREE_INT_CST_HIGH (arg2);
902 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
903 &quol, &quoh, &reml, &remh);
904 if (remh != 0 || reml != 0)
905 return NULL_TREE;
907 return build_int_cst_wide (type, quol, quoh);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
917 used. */
919 static int fold_deferring_overflow_warnings;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
936 void
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
949 deferred code. */
951 void
952 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
954 const char *warnmsg;
955 location_t locus;
957 gcc_assert (fold_deferring_overflow_warnings > 0);
958 --fold_deferring_overflow_warnings;
959 if (fold_deferring_overflow_warnings > 0)
961 if (fold_deferred_overflow_warning != NULL
962 && code != 0
963 && code < (int) fold_deferred_overflow_code)
964 fold_deferred_overflow_code = code;
965 return;
968 warnmsg = fold_deferred_overflow_warning;
969 fold_deferred_overflow_warning = NULL;
971 if (!issue || warnmsg == NULL)
972 return;
974 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
975 return;
977 /* Use the smallest code level when deciding to issue the
978 warning. */
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
983 return;
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
987 else
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
993 warnings. */
995 void
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1003 bool
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1012 static void
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1031 static bool
1032 negate_mathfn_p (enum built_in_function code)
1034 switch (code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1059 return true;
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1067 default:
1068 break;
1070 return false;
1073 /* Check whether we may negate an integer constant T without causing
1074 overflow. */
1076 bool
1077 may_negate_without_overflow_p (const_tree t)
1079 unsigned HOST_WIDE_INT val;
1080 unsigned int prec;
1081 tree type;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1087 return false;
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1093 return true;
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1097 else
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1107 static bool
1108 negate_expr_p (tree t)
1110 tree type;
1112 if (t == 0)
1113 return false;
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1120 case INTEGER_CST:
1121 if (TYPE_OVERFLOW_WRAPS (type))
1122 return true;
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1126 case BIT_NOT_EXPR:
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1130 case FIXED_CST:
1131 case REAL_CST:
1132 case NEGATE_EXPR:
1133 return true;
1135 case COMPLEX_CST:
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1139 case COMPLEX_EXPR:
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1143 case CONJ_EXPR:
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1146 case PLUS_EXPR:
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1149 return false;
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1154 return true;
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1158 case MINUS_EXPR:
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1165 case MULT_EXPR:
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1167 break;
1169 /* Fall through. */
1171 case RDIV_EXPR:
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1175 break;
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1180 case CEIL_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1186 overflow. */
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1189 break;
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1193 case NOP_EXPR:
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1198 if (tem != t)
1199 return negate_expr_p (tem);
1201 break;
1203 case CALL_EXPR:
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1207 break;
1209 case RSHIFT_EXPR:
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1217 return true;
1219 break;
1221 default:
1222 break;
1224 return false;
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1230 returned. */
1232 static tree
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1236 tree tem;
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1241 case BIT_NOT_EXPR:
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1245 break;
1247 case INTEGER_CST:
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1251 return tem;
1252 break;
1254 case REAL_CST:
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1258 return tem;
1259 break;
1261 case FIXED_CST:
1262 tem = fold_negate_const (t, type);
1263 return tem;
1265 case COMPLEX_CST:
1267 tree rpart = negate_expr (TREE_REALPART (t));
1268 tree ipart = negate_expr (TREE_IMAGPART (t));
1270 if ((TREE_CODE (rpart) == REAL_CST
1271 && TREE_CODE (ipart) == REAL_CST)
1272 || (TREE_CODE (rpart) == INTEGER_CST
1273 && TREE_CODE (ipart) == INTEGER_CST))
1274 return build_complex (type, rpart, ipart);
1276 break;
1278 case COMPLEX_EXPR:
1279 if (negate_expr_p (t))
1280 return fold_build2 (COMPLEX_EXPR, type,
1281 fold_negate_expr (TREE_OPERAND (t, 0)),
1282 fold_negate_expr (TREE_OPERAND (t, 1)));
1283 break;
1285 case CONJ_EXPR:
1286 if (negate_expr_p (t))
1287 return fold_build1 (CONJ_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)));
1289 break;
1291 case NEGATE_EXPR:
1292 return TREE_OPERAND (t, 0);
1294 case PLUS_EXPR:
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t, 1))
1300 && reorder_operands_p (TREE_OPERAND (t, 0),
1301 TREE_OPERAND (t, 1)))
1303 tem = negate_expr (TREE_OPERAND (t, 1));
1304 return fold_build2 (MINUS_EXPR, type,
1305 tem, TREE_OPERAND (t, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 tem = negate_expr (TREE_OPERAND (t, 0));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 1));
1316 break;
1318 case MINUS_EXPR:
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1322 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1323 return fold_build2 (MINUS_EXPR, type,
1324 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1325 break;
1327 case MULT_EXPR:
1328 if (TYPE_UNSIGNED (type))
1329 break;
1331 /* Fall through. */
1333 case RDIV_EXPR:
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 tem = TREE_OPERAND (t, 1);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 TREE_OPERAND (t, 0), negate_expr (tem));
1340 tem = TREE_OPERAND (t, 0);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 negate_expr (tem), TREE_OPERAND (t, 1));
1345 break;
1347 case TRUNC_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 case FLOOR_DIV_EXPR:
1350 case CEIL_DIV_EXPR:
1351 case EXACT_DIV_EXPR:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1356 overflow. */
1357 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 const char * const warnmsg = G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem = TREE_OPERAND (t, 1);
1362 if (negate_expr_p (tem))
1364 if (INTEGRAL_TYPE_P (type)
1365 && (TREE_CODE (tem) != INTEGER_CST
1366 || integer_onep (tem)))
1367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1374 if (INTEGRAL_TYPE_P (type)
1375 && (TREE_CODE (tem) != INTEGER_CST
1376 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1377 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1378 return fold_build2 (TREE_CODE (t), type,
1379 negate_expr (tem), TREE_OPERAND (t, 1));
1382 break;
1384 case NOP_EXPR:
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type) == REAL_TYPE)
1388 tem = strip_float_extensions (t);
1389 if (tem != t && negate_expr_p (tem))
1390 return fold_convert (type, negate_expr (tem));
1392 break;
1394 case CALL_EXPR:
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t))
1397 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1399 tree fndecl, arg;
1401 fndecl = get_callee_fndecl (t);
1402 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1403 return build_call_expr (fndecl, 1, arg);
1405 break;
1407 case RSHIFT_EXPR:
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 tree op1 = TREE_OPERAND (t, 1);
1412 if (TREE_INT_CST_HIGH (op1) == 0
1413 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1414 == TREE_INT_CST_LOW (op1))
1416 tree ntype = TYPE_UNSIGNED (type)
1417 ? signed_type_for (type)
1418 : unsigned_type_for (type);
1419 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1420 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1421 return fold_convert (type, temp);
1424 break;
1426 default:
1427 break;
1430 return NULL_TREE;
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1437 static tree
1438 negate_expr (tree t)
1440 tree type, tem;
1442 if (t == NULL_TREE)
1443 return NULL_TREE;
1445 type = TREE_TYPE (t);
1446 STRIP_SIGN_NOPS (t);
1448 tem = fold_negate_expr (t);
1449 if (!tem)
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 return fold_convert (type, tem);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1474 static tree
1475 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1476 tree *minus_litp, int negate_p)
1478 tree var = 0;
1480 *conp = 0;
1481 *litp = 0;
1482 *minus_litp = 0;
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in);
1487 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1488 || TREE_CODE (in) == FIXED_CST)
1489 *litp = in;
1490 else if (TREE_CODE (in) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1498 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 tree op0 = TREE_OPERAND (in, 0);
1501 tree op1 = TREE_OPERAND (in, 1);
1502 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1503 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1507 || TREE_CODE (op0) == FIXED_CST)
1508 *litp = op0, op0 = 0;
1509 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1510 || TREE_CODE (op1) == FIXED_CST)
1511 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513 if (op0 != 0 && TREE_CONSTANT (op0))
1514 *conp = op0, op0 = 0;
1515 else if (op1 != 0 && TREE_CONSTANT (op1))
1516 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0 != 0 && op1 != 0)
1521 var = in;
1522 else if (op0 != 0)
1523 var = op0;
1524 else
1525 var = op1, neg_var_p = neg1_p;
1527 /* Now do any needed negations. */
1528 if (neg_litp_p)
1529 *minus_litp = *litp, *litp = 0;
1530 if (neg_conp_p)
1531 *conp = negate_expr (*conp);
1532 if (neg_var_p)
1533 var = negate_expr (var);
1535 else if (TREE_CONSTANT (in))
1536 *conp = in;
1537 else
1538 var = in;
1540 if (negate_p)
1542 if (*litp)
1543 *minus_litp = *litp, *litp = 0;
1544 else if (*minus_litp)
1545 *litp = *minus_litp, *minus_litp = 0;
1546 *conp = negate_expr (*conp);
1547 var = negate_expr (var);
1550 return var;
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1557 static tree
1558 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1560 if (t1 == 0)
1561 return t2;
1562 else if (t2 == 0)
1563 return t1;
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1569 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 if (code == PLUS_EXPR)
1573 if (TREE_CODE (t1) == NEGATE_EXPR)
1574 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1575 fold_convert (type, TREE_OPERAND (t1, 0)));
1576 else if (TREE_CODE (t2) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1578 fold_convert (type, TREE_OPERAND (t2, 0)));
1579 else if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1582 else if (code == MINUS_EXPR)
1584 if (integer_zerop (t2))
1585 return fold_convert (type, t1);
1588 return build2 (code, type, fold_convert (type, t1),
1589 fold_convert (type, t2));
1592 return fold_build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1599 static bool
1600 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1602 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1603 return false;
1604 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1605 return false;
1607 switch (code)
1609 case LSHIFT_EXPR:
1610 case RSHIFT_EXPR:
1611 case LROTATE_EXPR:
1612 case RROTATE_EXPR:
1613 return true;
1615 default:
1616 break;
1619 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1620 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1621 && TYPE_MODE (type1) == TYPE_MODE (type2);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1631 tree
1632 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 unsigned HOST_WIDE_INT int1l, int2l;
1635 HOST_WIDE_INT int1h, int2h;
1636 unsigned HOST_WIDE_INT low;
1637 HOST_WIDE_INT hi;
1638 unsigned HOST_WIDE_INT garbagel;
1639 HOST_WIDE_INT garbageh;
1640 tree t;
1641 tree type = TREE_TYPE (arg1);
1642 int uns = TYPE_UNSIGNED (type);
1643 int is_sizetype
1644 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1645 int overflow = 0;
1647 int1l = TREE_INT_CST_LOW (arg1);
1648 int1h = TREE_INT_CST_HIGH (arg1);
1649 int2l = TREE_INT_CST_LOW (arg2);
1650 int2h = TREE_INT_CST_HIGH (arg2);
1652 switch (code)
1654 case BIT_IOR_EXPR:
1655 low = int1l | int2l, hi = int1h | int2h;
1656 break;
1658 case BIT_XOR_EXPR:
1659 low = int1l ^ int2l, hi = int1h ^ int2h;
1660 break;
1662 case BIT_AND_EXPR:
1663 low = int1l & int2l, hi = int1h & int2h;
1664 break;
1666 case RSHIFT_EXPR:
1667 int2l = -int2l;
1668 case LSHIFT_EXPR:
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1673 &low, &hi, !uns);
1674 break;
1676 case RROTATE_EXPR:
1677 int2l = - int2l;
1678 case LROTATE_EXPR:
1679 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 &low, &hi);
1681 break;
1683 case PLUS_EXPR:
1684 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1685 break;
1687 case MINUS_EXPR:
1688 neg_double (int2l, int2h, &low, &hi);
1689 add_double (int1l, int1h, low, hi, &low, &hi);
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1691 break;
1693 case MULT_EXPR:
1694 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1695 break;
1697 case TRUNC_DIV_EXPR:
1698 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1699 case EXACT_DIV_EXPR:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1702 && !TREE_OVERFLOW (arg1)
1703 && !TREE_OVERFLOW (arg2)
1704 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 if (code == CEIL_DIV_EXPR)
1707 int1l += int2l - 1;
1709 low = int1l / int2l, hi = 0;
1710 break;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR:
1716 if (int2h == 0 && int2l == 0)
1717 return NULL_TREE;
1718 if (int2h == 0 && int2l == 1)
1720 low = int1l, hi = int1h;
1721 break;
1723 if (int1l == int2l && int1h == int2h
1724 && ! (int1l == 0 && int1h == 0))
1726 low = 1, hi = 0;
1727 break;
1729 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1730 &low, &hi, &garbagel, &garbageh);
1731 break;
1733 case TRUNC_MOD_EXPR:
1734 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1737 && !TREE_OVERFLOW (arg1)
1738 && !TREE_OVERFLOW (arg2)
1739 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 if (code == CEIL_MOD_EXPR)
1742 int1l += int2l - 1;
1743 low = int1l % int2l, hi = 0;
1744 break;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR:
1750 if (int2h == 0 && int2l == 0)
1751 return NULL_TREE;
1752 overflow = div_and_round_double (code, uns,
1753 int1l, int1h, int2l, int2h,
1754 &garbagel, &garbageh, &low, &hi);
1755 break;
1757 case MIN_EXPR:
1758 case MAX_EXPR:
1759 if (uns)
1760 low = (((unsigned HOST_WIDE_INT) int1h
1761 < (unsigned HOST_WIDE_INT) int2h)
1762 || (((unsigned HOST_WIDE_INT) int1h
1763 == (unsigned HOST_WIDE_INT) int2h)
1764 && int1l < int2l));
1765 else
1766 low = (int1h < int2h
1767 || (int1h == int2h && int1l < int2l));
1769 if (low == (code == MIN_EXPR))
1770 low = int1l, hi = int1h;
1771 else
1772 low = int2l, hi = int2h;
1773 break;
1775 default:
1776 return NULL_TREE;
1779 if (notrunc)
1781 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1787 t = copy_node (t);
1788 TREE_OVERFLOW (t) = 1;
1791 else
1792 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1793 ((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1796 return t;
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1806 static tree
1807 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 /* Sanity check for the recursive cases. */
1810 if (!arg1 || !arg2)
1811 return NULL_TREE;
1813 STRIP_NOPS (arg1);
1814 STRIP_NOPS (arg2);
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return int_const_binop (code, arg1, arg2, notrunc);
1819 if (TREE_CODE (arg1) == REAL_CST)
1821 enum machine_mode mode;
1822 REAL_VALUE_TYPE d1;
1823 REAL_VALUE_TYPE d2;
1824 REAL_VALUE_TYPE value;
1825 REAL_VALUE_TYPE result;
1826 bool inexact;
1827 tree t, type;
1829 /* The following codes are handled by real_arithmetic. */
1830 switch (code)
1832 case PLUS_EXPR:
1833 case MINUS_EXPR:
1834 case MULT_EXPR:
1835 case RDIV_EXPR:
1836 case MIN_EXPR:
1837 case MAX_EXPR:
1838 break;
1840 default:
1841 return NULL_TREE;
1844 d1 = TREE_REAL_CST (arg1);
1845 d2 = TREE_REAL_CST (arg2);
1847 type = TREE_TYPE (arg1);
1848 mode = TYPE_MODE (type);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode)
1853 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1854 return NULL_TREE;
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code == RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2, dconst0)
1860 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1861 return NULL_TREE;
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1))
1866 return arg1;
1867 else if (REAL_VALUE_ISNAN (d2))
1868 return arg2;
1870 inexact = real_arithmetic (&value, code, &d1, &d2);
1871 real_convert (&result, mode, &value);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode)
1877 && REAL_VALUE_ISINF (result)
1878 && !REAL_VALUE_ISINF (d1)
1879 && !REAL_VALUE_ISINF (d2))
1880 return NULL_TREE;
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1888 && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1890 return NULL_TREE;
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1895 return t;
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1903 tree t, type;
1904 int sat_p;
1905 bool overflow_p;
1907 /* The following codes are handled by fixed_arithmetic. */
1908 switch (code)
1910 case PLUS_EXPR:
1911 case MINUS_EXPR:
1912 case MULT_EXPR:
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1915 break;
1917 case LSHIFT_EXPR:
1918 case RSHIFT_EXPR:
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1921 f2.mode = SImode;
1922 break;
1924 default:
1925 return NULL_TREE;
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1941 return t;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1951 tree real, imag;
1953 switch (code)
1955 case PLUS_EXPR:
1956 case MINUS_EXPR:
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1959 break;
1961 case MULT_EXPR:
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1965 notrunc);
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1969 notrunc);
1970 break;
1972 case RDIV_EXPR:
1974 tree magsquared
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1978 notrunc);
1979 tree t1
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1983 notrunc);
1984 tree t2
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1988 notrunc);
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
1996 break;
1998 default:
1999 return NULL_TREE;
2002 if (real && imag)
2003 return build_complex (type, real, imag);
2006 return NULL_TREE;
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2012 tree
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2023 tree
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2032 TREE_TYPE (arg1)));
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2041 return arg1;
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2043 return arg0;
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2048 return arg0;
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2053 return arg1;
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2067 tree
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2071 tree ctype;
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2081 ctype = ssizetype;
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2084 else
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2102 else
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2105 arg1, arg0)));
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2111 static tree
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2114 tree t;
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2142 return t;
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2148 static tree
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2151 int overflow = 0;
2152 tree t;
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2164 REAL_VALUE_TYPE r;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2167 switch (code)
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2171 break;
2173 default:
2174 gcc_unreachable ();
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2180 overflow = 1;
2181 high = 0;
2182 low = 0;
2185 /* See if R is less than the lower bound or greater than the
2186 upper bound. */
2188 if (! overflow)
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2194 overflow = 1;
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2200 if (! overflow)
2202 tree ut = TYPE_MAX_VALUE (type);
2203 if (ut)
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2208 overflow = 1;
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2215 if (! overflow)
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2220 return t;
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2226 static tree
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2229 tree t;
2230 double_int temp, temp_trunc;
2231 unsigned int mode;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2248 else
2250 temp.low = 0;
2251 temp.high = 0;
2252 temp_trunc.low = 0;
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2261 double_int one;
2262 one.low = 1;
2263 one.high = 0;
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2270 (temp.high < 0
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2275 return t;
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2281 static tree
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2285 tree t;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2291 return t;
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2297 static tree
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2301 tree t;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2309 return t;
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2315 static tree
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2319 tree t;
2320 bool overflow_p;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2334 return t;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2340 static tree
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2344 tree t;
2345 bool overflow_p;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2361 return t;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2367 static tree
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2371 tree t;
2372 bool overflow_p;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2387 return t;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2393 static tree
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2397 return arg1;
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2400 || TREE_CODE (type) == OFFSET_TYPE)
2402 if (TREE_CODE (arg1) == INTEGER_CST)
2403 return fold_convert_const_int_from_int (type, arg1);
2404 else if (TREE_CODE (arg1) == REAL_CST)
2405 return fold_convert_const_int_from_real (code, type, arg1);
2406 else if (TREE_CODE (arg1) == FIXED_CST)
2407 return fold_convert_const_int_from_fixed (type, arg1);
2409 else if (TREE_CODE (type) == REAL_TYPE)
2411 if (TREE_CODE (arg1) == INTEGER_CST)
2412 return build_real_from_int_cst (type, arg1);
2413 else if (TREE_CODE (arg1) == REAL_CST)
2414 return fold_convert_const_real_from_real (type, arg1);
2415 else if (TREE_CODE (arg1) == FIXED_CST)
2416 return fold_convert_const_real_from_fixed (type, arg1);
2418 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2420 if (TREE_CODE (arg1) == FIXED_CST)
2421 return fold_convert_const_fixed_from_fixed (type, arg1);
2422 else if (TREE_CODE (arg1) == INTEGER_CST)
2423 return fold_convert_const_fixed_from_int (type, arg1);
2424 else if (TREE_CODE (arg1) == REAL_CST)
2425 return fold_convert_const_fixed_from_real (type, arg1);
2427 return NULL_TREE;
2430 /* Construct a vector of zero elements of vector type TYPE. */
2432 static tree
2433 build_zero_vector (tree type)
2435 tree elem, list;
2436 int i, units;
2438 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2439 units = TYPE_VECTOR_SUBPARTS (type);
2441 list = NULL_TREE;
2442 for (i = 0; i < units; i++)
2443 list = tree_cons (NULL_TREE, elem, list);
2444 return build_vector (type, list);
2447 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2449 bool
2450 fold_convertible_p (const_tree type, const_tree arg)
2452 tree orig = TREE_TYPE (arg);
2454 if (type == orig)
2455 return true;
2457 if (TREE_CODE (arg) == ERROR_MARK
2458 || TREE_CODE (type) == ERROR_MARK
2459 || TREE_CODE (orig) == ERROR_MARK)
2460 return false;
2462 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2463 return true;
2465 switch (TREE_CODE (type))
2467 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2468 case POINTER_TYPE: case REFERENCE_TYPE:
2469 case OFFSET_TYPE:
2470 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2471 || TREE_CODE (orig) == OFFSET_TYPE)
2472 return true;
2473 return (TREE_CODE (orig) == VECTOR_TYPE
2474 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2476 case REAL_TYPE:
2477 case FIXED_POINT_TYPE:
2478 case COMPLEX_TYPE:
2479 case VECTOR_TYPE:
2480 case VOID_TYPE:
2481 return TREE_CODE (type) == TREE_CODE (orig);
2483 default:
2484 return false;
2488 /* Convert expression ARG to type TYPE. Used by the middle-end for
2489 simple conversions in preference to calling the front-end's convert. */
2491 tree
2492 fold_convert (tree type, tree arg)
2494 tree orig = TREE_TYPE (arg);
2495 tree tem;
2497 if (type == orig)
2498 return arg;
2500 if (TREE_CODE (arg) == ERROR_MARK
2501 || TREE_CODE (type) == ERROR_MARK
2502 || TREE_CODE (orig) == ERROR_MARK)
2503 return error_mark_node;
2505 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2506 return fold_build1 (NOP_EXPR, type, arg);
2508 switch (TREE_CODE (type))
2510 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case OFFSET_TYPE:
2513 if (TREE_CODE (arg) == INTEGER_CST)
2515 tem = fold_convert_const (NOP_EXPR, type, arg);
2516 if (tem != NULL_TREE)
2517 return tem;
2519 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 || TREE_CODE (orig) == OFFSET_TYPE)
2521 return fold_build1 (NOP_EXPR, type, arg);
2522 if (TREE_CODE (orig) == COMPLEX_TYPE)
2524 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2525 return fold_convert (type, tem);
2527 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2528 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2529 return fold_build1 (NOP_EXPR, type, arg);
2531 case REAL_TYPE:
2532 if (TREE_CODE (arg) == INTEGER_CST)
2534 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2535 if (tem != NULL_TREE)
2536 return tem;
2538 else if (TREE_CODE (arg) == REAL_CST)
2540 tem = fold_convert_const (NOP_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2542 return tem;
2544 else if (TREE_CODE (arg) == FIXED_CST)
2546 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2548 return tem;
2551 switch (TREE_CODE (orig))
2553 case INTEGER_TYPE:
2554 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2555 case POINTER_TYPE: case REFERENCE_TYPE:
2556 return fold_build1 (FLOAT_EXPR, type, arg);
2558 case REAL_TYPE:
2559 return fold_build1 (NOP_EXPR, type, arg);
2561 case FIXED_POINT_TYPE:
2562 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2564 case COMPLEX_TYPE:
2565 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2566 return fold_convert (type, tem);
2568 default:
2569 gcc_unreachable ();
2572 case FIXED_POINT_TYPE:
2573 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2574 || TREE_CODE (arg) == REAL_CST)
2576 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2577 if (tem != NULL_TREE)
2578 return tem;
2581 switch (TREE_CODE (orig))
2583 case FIXED_POINT_TYPE:
2584 case INTEGER_TYPE:
2585 case ENUMERAL_TYPE:
2586 case BOOLEAN_TYPE:
2587 case REAL_TYPE:
2588 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2590 case COMPLEX_TYPE:
2591 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2592 return fold_convert (type, tem);
2594 default:
2595 gcc_unreachable ();
2598 case COMPLEX_TYPE:
2599 switch (TREE_CODE (orig))
2601 case INTEGER_TYPE:
2602 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2604 case REAL_TYPE:
2605 case FIXED_POINT_TYPE:
2606 return build2 (COMPLEX_EXPR, type,
2607 fold_convert (TREE_TYPE (type), arg),
2608 fold_convert (TREE_TYPE (type), integer_zero_node));
2609 case COMPLEX_TYPE:
2611 tree rpart, ipart;
2613 if (TREE_CODE (arg) == COMPLEX_EXPR)
2615 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2616 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2617 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2620 arg = save_expr (arg);
2621 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2622 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2623 rpart = fold_convert (TREE_TYPE (type), rpart);
2624 ipart = fold_convert (TREE_TYPE (type), ipart);
2625 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2628 default:
2629 gcc_unreachable ();
2632 case VECTOR_TYPE:
2633 if (integer_zerop (arg))
2634 return build_zero_vector (type);
2635 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2636 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2637 || TREE_CODE (orig) == VECTOR_TYPE);
2638 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2640 case VOID_TYPE:
2641 tem = fold_ignored_result (arg);
2642 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2643 return tem;
2644 return fold_build1 (NOP_EXPR, type, tem);
2646 default:
2647 gcc_unreachable ();
2651 /* Return false if expr can be assumed not to be an lvalue, true
2652 otherwise. */
2654 static bool
2655 maybe_lvalue_p (const_tree x)
2657 /* We only need to wrap lvalue tree codes. */
2658 switch (TREE_CODE (x))
2660 case VAR_DECL:
2661 case PARM_DECL:
2662 case RESULT_DECL:
2663 case LABEL_DECL:
2664 case FUNCTION_DECL:
2665 case SSA_NAME:
2667 case COMPONENT_REF:
2668 case INDIRECT_REF:
2669 case ALIGN_INDIRECT_REF:
2670 case MISALIGNED_INDIRECT_REF:
2671 case ARRAY_REF:
2672 case ARRAY_RANGE_REF:
2673 case BIT_FIELD_REF:
2674 case OBJ_TYPE_REF:
2676 case REALPART_EXPR:
2677 case IMAGPART_EXPR:
2678 case PREINCREMENT_EXPR:
2679 case PREDECREMENT_EXPR:
2680 case SAVE_EXPR:
2681 case TRY_CATCH_EXPR:
2682 case WITH_CLEANUP_EXPR:
2683 case COMPOUND_EXPR:
2684 case MODIFY_EXPR:
2685 case GIMPLE_MODIFY_STMT:
2686 case TARGET_EXPR:
2687 case COND_EXPR:
2688 case BIND_EXPR:
2689 case MIN_EXPR:
2690 case MAX_EXPR:
2691 break;
2693 default:
2694 /* Assume the worst for front-end tree codes. */
2695 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2696 break;
2697 return false;
2700 return true;
2703 /* Return an expr equal to X but certainly not valid as an lvalue. */
2705 tree
2706 non_lvalue (tree x)
2708 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2709 us. */
2710 if (in_gimple_form)
2711 return x;
2713 if (! maybe_lvalue_p (x))
2714 return x;
2715 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2718 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2719 Zero means allow extended lvalues. */
2721 int pedantic_lvalues;
2723 /* When pedantic, return an expr equal to X but certainly not valid as a
2724 pedantic lvalue. Otherwise, return X. */
2726 static tree
2727 pedantic_non_lvalue (tree x)
2729 if (pedantic_lvalues)
2730 return non_lvalue (x);
2731 else
2732 return x;
2735 /* Given a tree comparison code, return the code that is the logical inverse
2736 of the given code. It is not safe to do this for floating-point
2737 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2738 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2740 enum tree_code
2741 invert_tree_comparison (enum tree_code code, bool honor_nans)
2743 if (honor_nans && flag_trapping_math)
2744 return ERROR_MARK;
2746 switch (code)
2748 case EQ_EXPR:
2749 return NE_EXPR;
2750 case NE_EXPR:
2751 return EQ_EXPR;
2752 case GT_EXPR:
2753 return honor_nans ? UNLE_EXPR : LE_EXPR;
2754 case GE_EXPR:
2755 return honor_nans ? UNLT_EXPR : LT_EXPR;
2756 case LT_EXPR:
2757 return honor_nans ? UNGE_EXPR : GE_EXPR;
2758 case LE_EXPR:
2759 return honor_nans ? UNGT_EXPR : GT_EXPR;
2760 case LTGT_EXPR:
2761 return UNEQ_EXPR;
2762 case UNEQ_EXPR:
2763 return LTGT_EXPR;
2764 case UNGT_EXPR:
2765 return LE_EXPR;
2766 case UNGE_EXPR:
2767 return LT_EXPR;
2768 case UNLT_EXPR:
2769 return GE_EXPR;
2770 case UNLE_EXPR:
2771 return GT_EXPR;
2772 case ORDERED_EXPR:
2773 return UNORDERED_EXPR;
2774 case UNORDERED_EXPR:
2775 return ORDERED_EXPR;
2776 default:
2777 gcc_unreachable ();
2781 /* Similar, but return the comparison that results if the operands are
2782 swapped. This is safe for floating-point. */
2784 enum tree_code
2785 swap_tree_comparison (enum tree_code code)
2787 switch (code)
2789 case EQ_EXPR:
2790 case NE_EXPR:
2791 case ORDERED_EXPR:
2792 case UNORDERED_EXPR:
2793 case LTGT_EXPR:
2794 case UNEQ_EXPR:
2795 return code;
2796 case GT_EXPR:
2797 return LT_EXPR;
2798 case GE_EXPR:
2799 return LE_EXPR;
2800 case LT_EXPR:
2801 return GT_EXPR;
2802 case LE_EXPR:
2803 return GE_EXPR;
2804 case UNGT_EXPR:
2805 return UNLT_EXPR;
2806 case UNGE_EXPR:
2807 return UNLE_EXPR;
2808 case UNLT_EXPR:
2809 return UNGT_EXPR;
2810 case UNLE_EXPR:
2811 return UNGE_EXPR;
2812 default:
2813 gcc_unreachable ();
2818 /* Convert a comparison tree code from an enum tree_code representation
2819 into a compcode bit-based encoding. This function is the inverse of
2820 compcode_to_comparison. */
2822 static enum comparison_code
2823 comparison_to_compcode (enum tree_code code)
2825 switch (code)
2827 case LT_EXPR:
2828 return COMPCODE_LT;
2829 case EQ_EXPR:
2830 return COMPCODE_EQ;
2831 case LE_EXPR:
2832 return COMPCODE_LE;
2833 case GT_EXPR:
2834 return COMPCODE_GT;
2835 case NE_EXPR:
2836 return COMPCODE_NE;
2837 case GE_EXPR:
2838 return COMPCODE_GE;
2839 case ORDERED_EXPR:
2840 return COMPCODE_ORD;
2841 case UNORDERED_EXPR:
2842 return COMPCODE_UNORD;
2843 case UNLT_EXPR:
2844 return COMPCODE_UNLT;
2845 case UNEQ_EXPR:
2846 return COMPCODE_UNEQ;
2847 case UNLE_EXPR:
2848 return COMPCODE_UNLE;
2849 case UNGT_EXPR:
2850 return COMPCODE_UNGT;
2851 case LTGT_EXPR:
2852 return COMPCODE_LTGT;
2853 case UNGE_EXPR:
2854 return COMPCODE_UNGE;
2855 default:
2856 gcc_unreachable ();
2860 /* Convert a compcode bit-based encoding of a comparison operator back
2861 to GCC's enum tree_code representation. This function is the
2862 inverse of comparison_to_compcode. */
2864 static enum tree_code
2865 compcode_to_comparison (enum comparison_code code)
2867 switch (code)
2869 case COMPCODE_LT:
2870 return LT_EXPR;
2871 case COMPCODE_EQ:
2872 return EQ_EXPR;
2873 case COMPCODE_LE:
2874 return LE_EXPR;
2875 case COMPCODE_GT:
2876 return GT_EXPR;
2877 case COMPCODE_NE:
2878 return NE_EXPR;
2879 case COMPCODE_GE:
2880 return GE_EXPR;
2881 case COMPCODE_ORD:
2882 return ORDERED_EXPR;
2883 case COMPCODE_UNORD:
2884 return UNORDERED_EXPR;
2885 case COMPCODE_UNLT:
2886 return UNLT_EXPR;
2887 case COMPCODE_UNEQ:
2888 return UNEQ_EXPR;
2889 case COMPCODE_UNLE:
2890 return UNLE_EXPR;
2891 case COMPCODE_UNGT:
2892 return UNGT_EXPR;
2893 case COMPCODE_LTGT:
2894 return LTGT_EXPR;
2895 case COMPCODE_UNGE:
2896 return UNGE_EXPR;
2897 default:
2898 gcc_unreachable ();
2902 /* Return a tree for the comparison which is the combination of
2903 doing the AND or OR (depending on CODE) of the two operations LCODE
2904 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2905 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2906 if this makes the transformation invalid. */
2908 tree
2909 combine_comparisons (enum tree_code code, enum tree_code lcode,
2910 enum tree_code rcode, tree truth_type,
2911 tree ll_arg, tree lr_arg)
2913 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2914 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2915 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2916 enum comparison_code compcode;
2918 switch (code)
2920 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2921 compcode = lcompcode & rcompcode;
2922 break;
2924 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2925 compcode = lcompcode | rcompcode;
2926 break;
2928 default:
2929 return NULL_TREE;
2932 if (!honor_nans)
2934 /* Eliminate unordered comparisons, as well as LTGT and ORD
2935 which are not used unless the mode has NaNs. */
2936 compcode &= ~COMPCODE_UNORD;
2937 if (compcode == COMPCODE_LTGT)
2938 compcode = COMPCODE_NE;
2939 else if (compcode == COMPCODE_ORD)
2940 compcode = COMPCODE_TRUE;
2942 else if (flag_trapping_math)
2944 /* Check that the original operation and the optimized ones will trap
2945 under the same condition. */
2946 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2947 && (lcompcode != COMPCODE_EQ)
2948 && (lcompcode != COMPCODE_ORD);
2949 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2950 && (rcompcode != COMPCODE_EQ)
2951 && (rcompcode != COMPCODE_ORD);
2952 bool trap = (compcode & COMPCODE_UNORD) == 0
2953 && (compcode != COMPCODE_EQ)
2954 && (compcode != COMPCODE_ORD);
2956 /* In a short-circuited boolean expression the LHS might be
2957 such that the RHS, if evaluated, will never trap. For
2958 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2959 if neither x nor y is NaN. (This is a mixed blessing: for
2960 example, the expression above will never trap, hence
2961 optimizing it to x < y would be invalid). */
2962 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2963 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2964 rtrap = false;
2966 /* If the comparison was short-circuited, and only the RHS
2967 trapped, we may now generate a spurious trap. */
2968 if (rtrap && !ltrap
2969 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2970 return NULL_TREE;
2972 /* If we changed the conditions that cause a trap, we lose. */
2973 if ((ltrap || rtrap) != trap)
2974 return NULL_TREE;
2977 if (compcode == COMPCODE_TRUE)
2978 return constant_boolean_node (true, truth_type);
2979 else if (compcode == COMPCODE_FALSE)
2980 return constant_boolean_node (false, truth_type);
2981 else
2982 return fold_build2 (compcode_to_comparison (compcode),
2983 truth_type, ll_arg, lr_arg);
2986 /* Return nonzero if CODE is a tree code that represents a truth value. */
2988 static int
2989 truth_value_p (enum tree_code code)
2991 return (TREE_CODE_CLASS (code) == tcc_comparison
2992 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2993 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2994 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2997 /* Return nonzero if two operands (typically of the same tree node)
2998 are necessarily equal. If either argument has side-effects this
2999 function returns zero. FLAGS modifies behavior as follows:
3001 If OEP_ONLY_CONST is set, only return nonzero for constants.
3002 This function tests whether the operands are indistinguishable;
3003 it does not test whether they are equal using C's == operation.
3004 The distinction is important for IEEE floating point, because
3005 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3006 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3008 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3009 even though it may hold multiple values during a function.
3010 This is because a GCC tree node guarantees that nothing else is
3011 executed between the evaluation of its "operands" (which may often
3012 be evaluated in arbitrary order). Hence if the operands themselves
3013 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3014 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3015 unset means assuming isochronic (or instantaneous) tree equivalence.
3016 Unless comparing arbitrary expression trees, such as from different
3017 statements, this flag can usually be left unset.
3019 If OEP_PURE_SAME is set, then pure functions with identical arguments
3020 are considered the same. It is used when the caller has other ways
3021 to ensure that global memory is unchanged in between. */
3024 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3026 /* If either is ERROR_MARK, they aren't equal. */
3027 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3028 return 0;
3030 /* Check equality of integer constants before bailing out due to
3031 precision differences. */
3032 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3033 return tree_int_cst_equal (arg0, arg1);
3035 /* If both types don't have the same signedness, then we can't consider
3036 them equal. We must check this before the STRIP_NOPS calls
3037 because they may change the signedness of the arguments. As pointers
3038 strictly don't have a signedness, require either two pointers or
3039 two non-pointers as well. */
3040 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3041 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3042 return 0;
3044 /* If both types don't have the same precision, then it is not safe
3045 to strip NOPs. */
3046 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3047 return 0;
3049 STRIP_NOPS (arg0);
3050 STRIP_NOPS (arg1);
3052 /* In case both args are comparisons but with different comparison
3053 code, try to swap the comparison operands of one arg to produce
3054 a match and compare that variant. */
3055 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3056 && COMPARISON_CLASS_P (arg0)
3057 && COMPARISON_CLASS_P (arg1))
3059 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3061 if (TREE_CODE (arg0) == swap_code)
3062 return operand_equal_p (TREE_OPERAND (arg0, 0),
3063 TREE_OPERAND (arg1, 1), flags)
3064 && operand_equal_p (TREE_OPERAND (arg0, 1),
3065 TREE_OPERAND (arg1, 0), flags);
3068 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3069 /* This is needed for conversions and for COMPONENT_REF.
3070 Might as well play it safe and always test this. */
3071 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3072 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3073 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3074 return 0;
3076 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3077 We don't care about side effects in that case because the SAVE_EXPR
3078 takes care of that for us. In all other cases, two expressions are
3079 equal if they have no side effects. If we have two identical
3080 expressions with side effects that should be treated the same due
3081 to the only side effects being identical SAVE_EXPR's, that will
3082 be detected in the recursive calls below. */
3083 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3084 && (TREE_CODE (arg0) == SAVE_EXPR
3085 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3086 return 1;
3088 /* Next handle constant cases, those for which we can return 1 even
3089 if ONLY_CONST is set. */
3090 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3091 switch (TREE_CODE (arg0))
3093 case INTEGER_CST:
3094 return tree_int_cst_equal (arg0, arg1);
3096 case FIXED_CST:
3097 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3098 TREE_FIXED_CST (arg1));
3100 case REAL_CST:
3101 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3102 TREE_REAL_CST (arg1)))
3103 return 1;
3106 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3108 /* If we do not distinguish between signed and unsigned zero,
3109 consider them equal. */
3110 if (real_zerop (arg0) && real_zerop (arg1))
3111 return 1;
3113 return 0;
3115 case VECTOR_CST:
3117 tree v1, v2;
3119 v1 = TREE_VECTOR_CST_ELTS (arg0);
3120 v2 = TREE_VECTOR_CST_ELTS (arg1);
3121 while (v1 && v2)
3123 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3124 flags))
3125 return 0;
3126 v1 = TREE_CHAIN (v1);
3127 v2 = TREE_CHAIN (v2);
3130 return v1 == v2;
3133 case COMPLEX_CST:
3134 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3135 flags)
3136 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3137 flags));
3139 case STRING_CST:
3140 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3141 && ! memcmp (TREE_STRING_POINTER (arg0),
3142 TREE_STRING_POINTER (arg1),
3143 TREE_STRING_LENGTH (arg0)));
3145 case ADDR_EXPR:
3146 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3148 default:
3149 break;
3152 if (flags & OEP_ONLY_CONST)
3153 return 0;
3155 /* Define macros to test an operand from arg0 and arg1 for equality and a
3156 variant that allows null and views null as being different from any
3157 non-null value. In the latter case, if either is null, the both
3158 must be; otherwise, do the normal comparison. */
3159 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3160 TREE_OPERAND (arg1, N), flags)
3162 #define OP_SAME_WITH_NULL(N) \
3163 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3164 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3166 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3168 case tcc_unary:
3169 /* Two conversions are equal only if signedness and modes match. */
3170 switch (TREE_CODE (arg0))
3172 CASE_CONVERT:
3173 case FIX_TRUNC_EXPR:
3174 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3175 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3176 return 0;
3177 break;
3178 default:
3179 break;
3182 return OP_SAME (0);
3185 case tcc_comparison:
3186 case tcc_binary:
3187 if (OP_SAME (0) && OP_SAME (1))
3188 return 1;
3190 /* For commutative ops, allow the other order. */
3191 return (commutative_tree_code (TREE_CODE (arg0))
3192 && operand_equal_p (TREE_OPERAND (arg0, 0),
3193 TREE_OPERAND (arg1, 1), flags)
3194 && operand_equal_p (TREE_OPERAND (arg0, 1),
3195 TREE_OPERAND (arg1, 0), flags));
3197 case tcc_reference:
3198 /* If either of the pointer (or reference) expressions we are
3199 dereferencing contain a side effect, these cannot be equal. */
3200 if (TREE_SIDE_EFFECTS (arg0)
3201 || TREE_SIDE_EFFECTS (arg1))
3202 return 0;
3204 switch (TREE_CODE (arg0))
3206 case INDIRECT_REF:
3207 case ALIGN_INDIRECT_REF:
3208 case MISALIGNED_INDIRECT_REF:
3209 case REALPART_EXPR:
3210 case IMAGPART_EXPR:
3211 return OP_SAME (0);
3213 case ARRAY_REF:
3214 case ARRAY_RANGE_REF:
3215 /* Operands 2 and 3 may be null.
3216 Compare the array index by value if it is constant first as we
3217 may have different types but same value here. */
3218 return (OP_SAME (0)
3219 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3220 TREE_OPERAND (arg1, 1))
3221 || OP_SAME (1))
3222 && OP_SAME_WITH_NULL (2)
3223 && OP_SAME_WITH_NULL (3));
3225 case COMPONENT_REF:
3226 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3227 may be NULL when we're called to compare MEM_EXPRs. */
3228 return OP_SAME_WITH_NULL (0)
3229 && OP_SAME (1)
3230 && OP_SAME_WITH_NULL (2);
3232 case BIT_FIELD_REF:
3233 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3235 default:
3236 return 0;
3239 case tcc_expression:
3240 switch (TREE_CODE (arg0))
3242 case ADDR_EXPR:
3243 case TRUTH_NOT_EXPR:
3244 return OP_SAME (0);
3246 case TRUTH_ANDIF_EXPR:
3247 case TRUTH_ORIF_EXPR:
3248 return OP_SAME (0) && OP_SAME (1);
3250 case TRUTH_AND_EXPR:
3251 case TRUTH_OR_EXPR:
3252 case TRUTH_XOR_EXPR:
3253 if (OP_SAME (0) && OP_SAME (1))
3254 return 1;
3256 /* Otherwise take into account this is a commutative operation. */
3257 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3258 TREE_OPERAND (arg1, 1), flags)
3259 && operand_equal_p (TREE_OPERAND (arg0, 1),
3260 TREE_OPERAND (arg1, 0), flags));
3262 case COND_EXPR:
3263 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3265 default:
3266 return 0;
3269 case tcc_vl_exp:
3270 switch (TREE_CODE (arg0))
3272 case CALL_EXPR:
3273 /* If the CALL_EXPRs call different functions, then they
3274 clearly can not be equal. */
3275 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3276 flags))
3277 return 0;
3280 unsigned int cef = call_expr_flags (arg0);
3281 if (flags & OEP_PURE_SAME)
3282 cef &= ECF_CONST | ECF_PURE;
3283 else
3284 cef &= ECF_CONST;
3285 if (!cef)
3286 return 0;
3289 /* Now see if all the arguments are the same. */
3291 const_call_expr_arg_iterator iter0, iter1;
3292 const_tree a0, a1;
3293 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3294 a1 = first_const_call_expr_arg (arg1, &iter1);
3295 a0 && a1;
3296 a0 = next_const_call_expr_arg (&iter0),
3297 a1 = next_const_call_expr_arg (&iter1))
3298 if (! operand_equal_p (a0, a1, flags))
3299 return 0;
3301 /* If we get here and both argument lists are exhausted
3302 then the CALL_EXPRs are equal. */
3303 return ! (a0 || a1);
3305 default:
3306 return 0;
3309 case tcc_declaration:
3310 /* Consider __builtin_sqrt equal to sqrt. */
3311 return (TREE_CODE (arg0) == FUNCTION_DECL
3312 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3313 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3314 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3316 default:
3317 return 0;
3320 #undef OP_SAME
3321 #undef OP_SAME_WITH_NULL
3324 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3325 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3327 When in doubt, return 0. */
3329 static int
3330 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3332 int unsignedp1, unsignedpo;
3333 tree primarg0, primarg1, primother;
3334 unsigned int correct_width;
3336 if (operand_equal_p (arg0, arg1, 0))
3337 return 1;
3339 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3340 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3341 return 0;
3343 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3344 and see if the inner values are the same. This removes any
3345 signedness comparison, which doesn't matter here. */
3346 primarg0 = arg0, primarg1 = arg1;
3347 STRIP_NOPS (primarg0);
3348 STRIP_NOPS (primarg1);
3349 if (operand_equal_p (primarg0, primarg1, 0))
3350 return 1;
3352 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3353 actual comparison operand, ARG0.
3355 First throw away any conversions to wider types
3356 already present in the operands. */
3358 primarg1 = get_narrower (arg1, &unsignedp1);
3359 primother = get_narrower (other, &unsignedpo);
3361 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3362 if (unsignedp1 == unsignedpo
3363 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3364 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3366 tree type = TREE_TYPE (arg0);
3368 /* Make sure shorter operand is extended the right way
3369 to match the longer operand. */
3370 primarg1 = fold_convert (signed_or_unsigned_type_for
3371 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3373 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3374 return 1;
3377 return 0;
3380 /* See if ARG is an expression that is either a comparison or is performing
3381 arithmetic on comparisons. The comparisons must only be comparing
3382 two different values, which will be stored in *CVAL1 and *CVAL2; if
3383 they are nonzero it means that some operands have already been found.
3384 No variables may be used anywhere else in the expression except in the
3385 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3386 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3388 If this is true, return 1. Otherwise, return zero. */
3390 static int
3391 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3393 enum tree_code code = TREE_CODE (arg);
3394 enum tree_code_class class = TREE_CODE_CLASS (code);
3396 /* We can handle some of the tcc_expression cases here. */
3397 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3398 class = tcc_unary;
3399 else if (class == tcc_expression
3400 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3401 || code == COMPOUND_EXPR))
3402 class = tcc_binary;
3404 else if (class == tcc_expression && code == SAVE_EXPR
3405 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3407 /* If we've already found a CVAL1 or CVAL2, this expression is
3408 two complex to handle. */
3409 if (*cval1 || *cval2)
3410 return 0;
3412 class = tcc_unary;
3413 *save_p = 1;
3416 switch (class)
3418 case tcc_unary:
3419 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3421 case tcc_binary:
3422 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3423 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3424 cval1, cval2, save_p));
3426 case tcc_constant:
3427 return 1;
3429 case tcc_expression:
3430 if (code == COND_EXPR)
3431 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3432 cval1, cval2, save_p)
3433 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3434 cval1, cval2, save_p)
3435 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3436 cval1, cval2, save_p));
3437 return 0;
3439 case tcc_comparison:
3440 /* First see if we can handle the first operand, then the second. For
3441 the second operand, we know *CVAL1 can't be zero. It must be that
3442 one side of the comparison is each of the values; test for the
3443 case where this isn't true by failing if the two operands
3444 are the same. */
3446 if (operand_equal_p (TREE_OPERAND (arg, 0),
3447 TREE_OPERAND (arg, 1), 0))
3448 return 0;
3450 if (*cval1 == 0)
3451 *cval1 = TREE_OPERAND (arg, 0);
3452 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3454 else if (*cval2 == 0)
3455 *cval2 = TREE_OPERAND (arg, 0);
3456 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3458 else
3459 return 0;
3461 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3463 else if (*cval2 == 0)
3464 *cval2 = TREE_OPERAND (arg, 1);
3465 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3467 else
3468 return 0;
3470 return 1;
3472 default:
3473 return 0;
3477 /* ARG is a tree that is known to contain just arithmetic operations and
3478 comparisons. Evaluate the operations in the tree substituting NEW0 for
3479 any occurrence of OLD0 as an operand of a comparison and likewise for
3480 NEW1 and OLD1. */
3482 static tree
3483 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3485 tree type = TREE_TYPE (arg);
3486 enum tree_code code = TREE_CODE (arg);
3487 enum tree_code_class class = TREE_CODE_CLASS (code);
3489 /* We can handle some of the tcc_expression cases here. */
3490 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3491 class = tcc_unary;
3492 else if (class == tcc_expression
3493 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3494 class = tcc_binary;
3496 switch (class)
3498 case tcc_unary:
3499 return fold_build1 (code, type,
3500 eval_subst (TREE_OPERAND (arg, 0),
3501 old0, new0, old1, new1));
3503 case tcc_binary:
3504 return fold_build2 (code, type,
3505 eval_subst (TREE_OPERAND (arg, 0),
3506 old0, new0, old1, new1),
3507 eval_subst (TREE_OPERAND (arg, 1),
3508 old0, new0, old1, new1));
3510 case tcc_expression:
3511 switch (code)
3513 case SAVE_EXPR:
3514 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3516 case COMPOUND_EXPR:
3517 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3519 case COND_EXPR:
3520 return fold_build3 (code, type,
3521 eval_subst (TREE_OPERAND (arg, 0),
3522 old0, new0, old1, new1),
3523 eval_subst (TREE_OPERAND (arg, 1),
3524 old0, new0, old1, new1),
3525 eval_subst (TREE_OPERAND (arg, 2),
3526 old0, new0, old1, new1));
3527 default:
3528 break;
3530 /* Fall through - ??? */
3532 case tcc_comparison:
3534 tree arg0 = TREE_OPERAND (arg, 0);
3535 tree arg1 = TREE_OPERAND (arg, 1);
3537 /* We need to check both for exact equality and tree equality. The
3538 former will be true if the operand has a side-effect. In that
3539 case, we know the operand occurred exactly once. */
3541 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3542 arg0 = new0;
3543 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3544 arg0 = new1;
3546 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3547 arg1 = new0;
3548 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3549 arg1 = new1;
3551 return fold_build2 (code, type, arg0, arg1);
3554 default:
3555 return arg;
3559 /* Return a tree for the case when the result of an expression is RESULT
3560 converted to TYPE and OMITTED was previously an operand of the expression
3561 but is now not needed (e.g., we folded OMITTED * 0).
3563 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3564 the conversion of RESULT to TYPE. */
3566 tree
3567 omit_one_operand (tree type, tree result, tree omitted)
3569 tree t = fold_convert (type, result);
3571 /* If the resulting operand is an empty statement, just return the omitted
3572 statement casted to void. */
3573 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3574 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3576 if (TREE_SIDE_EFFECTS (omitted))
3577 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3579 return non_lvalue (t);
3582 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3584 static tree
3585 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3587 tree t = fold_convert (type, result);
3589 /* If the resulting operand is an empty statement, just return the omitted
3590 statement casted to void. */
3591 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3592 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3594 if (TREE_SIDE_EFFECTS (omitted))
3595 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3597 return pedantic_non_lvalue (t);
3600 /* Return a tree for the case when the result of an expression is RESULT
3601 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3602 of the expression but are now not needed.
3604 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3605 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3606 evaluated before OMITTED2. Otherwise, if neither has side effects,
3607 just do the conversion of RESULT to TYPE. */
3609 tree
3610 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3612 tree t = fold_convert (type, result);
3614 if (TREE_SIDE_EFFECTS (omitted2))
3615 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3616 if (TREE_SIDE_EFFECTS (omitted1))
3617 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3619 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3623 /* Return a simplified tree node for the truth-negation of ARG. This
3624 never alters ARG itself. We assume that ARG is an operation that
3625 returns a truth value (0 or 1).
3627 FIXME: one would think we would fold the result, but it causes
3628 problems with the dominator optimizer. */
3630 tree
3631 fold_truth_not_expr (tree arg)
3633 tree type = TREE_TYPE (arg);
3634 enum tree_code code = TREE_CODE (arg);
3636 /* If this is a comparison, we can simply invert it, except for
3637 floating-point non-equality comparisons, in which case we just
3638 enclose a TRUTH_NOT_EXPR around what we have. */
3640 if (TREE_CODE_CLASS (code) == tcc_comparison)
3642 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3643 if (FLOAT_TYPE_P (op_type)
3644 && flag_trapping_math
3645 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3646 && code != NE_EXPR && code != EQ_EXPR)
3647 return NULL_TREE;
3648 else
3650 code = invert_tree_comparison (code,
3651 HONOR_NANS (TYPE_MODE (op_type)));
3652 if (code == ERROR_MARK)
3653 return NULL_TREE;
3654 else
3655 return build2 (code, type,
3656 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3660 switch (code)
3662 case INTEGER_CST:
3663 return constant_boolean_node (integer_zerop (arg), type);
3665 case TRUTH_AND_EXPR:
3666 return build2 (TRUTH_OR_EXPR, type,
3667 invert_truthvalue (TREE_OPERAND (arg, 0)),
3668 invert_truthvalue (TREE_OPERAND (arg, 1)));
3670 case TRUTH_OR_EXPR:
3671 return build2 (TRUTH_AND_EXPR, type,
3672 invert_truthvalue (TREE_OPERAND (arg, 0)),
3673 invert_truthvalue (TREE_OPERAND (arg, 1)));
3675 case TRUTH_XOR_EXPR:
3676 /* Here we can invert either operand. We invert the first operand
3677 unless the second operand is a TRUTH_NOT_EXPR in which case our
3678 result is the XOR of the first operand with the inside of the
3679 negation of the second operand. */
3681 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3682 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3683 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3684 else
3685 return build2 (TRUTH_XOR_EXPR, type,
3686 invert_truthvalue (TREE_OPERAND (arg, 0)),
3687 TREE_OPERAND (arg, 1));
3689 case TRUTH_ANDIF_EXPR:
3690 return build2 (TRUTH_ORIF_EXPR, type,
3691 invert_truthvalue (TREE_OPERAND (arg, 0)),
3692 invert_truthvalue (TREE_OPERAND (arg, 1)));
3694 case TRUTH_ORIF_EXPR:
3695 return build2 (TRUTH_ANDIF_EXPR, type,
3696 invert_truthvalue (TREE_OPERAND (arg, 0)),
3697 invert_truthvalue (TREE_OPERAND (arg, 1)));
3699 case TRUTH_NOT_EXPR:
3700 return TREE_OPERAND (arg, 0);
3702 case COND_EXPR:
3704 tree arg1 = TREE_OPERAND (arg, 1);
3705 tree arg2 = TREE_OPERAND (arg, 2);
3706 /* A COND_EXPR may have a throw as one operand, which
3707 then has void type. Just leave void operands
3708 as they are. */
3709 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3710 VOID_TYPE_P (TREE_TYPE (arg1))
3711 ? arg1 : invert_truthvalue (arg1),
3712 VOID_TYPE_P (TREE_TYPE (arg2))
3713 ? arg2 : invert_truthvalue (arg2));
3716 case COMPOUND_EXPR:
3717 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3718 invert_truthvalue (TREE_OPERAND (arg, 1)));
3720 case NON_LVALUE_EXPR:
3721 return invert_truthvalue (TREE_OPERAND (arg, 0));
3723 case NOP_EXPR:
3724 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3725 return build1 (TRUTH_NOT_EXPR, type, arg);
3727 case CONVERT_EXPR:
3728 case FLOAT_EXPR:
3729 return build1 (TREE_CODE (arg), type,
3730 invert_truthvalue (TREE_OPERAND (arg, 0)));
3732 case BIT_AND_EXPR:
3733 if (!integer_onep (TREE_OPERAND (arg, 1)))
3734 break;
3735 return build2 (EQ_EXPR, type, arg,
3736 build_int_cst (type, 0));
3738 case SAVE_EXPR:
3739 return build1 (TRUTH_NOT_EXPR, type, arg);
3741 case CLEANUP_POINT_EXPR:
3742 return build1 (CLEANUP_POINT_EXPR, type,
3743 invert_truthvalue (TREE_OPERAND (arg, 0)));
3745 default:
3746 break;
3749 return NULL_TREE;
3752 /* Return a simplified tree node for the truth-negation of ARG. This
3753 never alters ARG itself. We assume that ARG is an operation that
3754 returns a truth value (0 or 1).
3756 FIXME: one would think we would fold the result, but it causes
3757 problems with the dominator optimizer. */
3759 tree
3760 invert_truthvalue (tree arg)
3762 tree tem;
3764 if (TREE_CODE (arg) == ERROR_MARK)
3765 return arg;
3767 tem = fold_truth_not_expr (arg);
3768 if (!tem)
3769 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3771 return tem;
3774 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3775 operands are another bit-wise operation with a common input. If so,
3776 distribute the bit operations to save an operation and possibly two if
3777 constants are involved. For example, convert
3778 (A | B) & (A | C) into A | (B & C)
3779 Further simplification will occur if B and C are constants.
3781 If this optimization cannot be done, 0 will be returned. */
3783 static tree
3784 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3786 tree common;
3787 tree left, right;
3789 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3790 || TREE_CODE (arg0) == code
3791 || (TREE_CODE (arg0) != BIT_AND_EXPR
3792 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3793 return 0;
3795 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3797 common = TREE_OPERAND (arg0, 0);
3798 left = TREE_OPERAND (arg0, 1);
3799 right = TREE_OPERAND (arg1, 1);
3801 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3803 common = TREE_OPERAND (arg0, 0);
3804 left = TREE_OPERAND (arg0, 1);
3805 right = TREE_OPERAND (arg1, 0);
3807 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3809 common = TREE_OPERAND (arg0, 1);
3810 left = TREE_OPERAND (arg0, 0);
3811 right = TREE_OPERAND (arg1, 1);
3813 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3815 common = TREE_OPERAND (arg0, 1);
3816 left = TREE_OPERAND (arg0, 0);
3817 right = TREE_OPERAND (arg1, 0);
3819 else
3820 return 0;
3822 return fold_build2 (TREE_CODE (arg0), type, common,
3823 fold_build2 (code, type, left, right));
3826 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3827 with code CODE. This optimization is unsafe. */
3828 static tree
3829 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3831 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3832 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3834 /* (A / C) +- (B / C) -> (A +- B) / C. */
3835 if (mul0 == mul1
3836 && operand_equal_p (TREE_OPERAND (arg0, 1),
3837 TREE_OPERAND (arg1, 1), 0))
3838 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3839 fold_build2 (code, type,
3840 TREE_OPERAND (arg0, 0),
3841 TREE_OPERAND (arg1, 0)),
3842 TREE_OPERAND (arg0, 1));
3844 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3845 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3846 TREE_OPERAND (arg1, 0), 0)
3847 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3848 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3850 REAL_VALUE_TYPE r0, r1;
3851 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3852 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3853 if (!mul0)
3854 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3855 if (!mul1)
3856 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3857 real_arithmetic (&r0, code, &r0, &r1);
3858 return fold_build2 (MULT_EXPR, type,
3859 TREE_OPERAND (arg0, 0),
3860 build_real (type, r0));
3863 return NULL_TREE;
3866 /* Subroutine for fold_truthop: decode a field reference.
3868 If EXP is a comparison reference, we return the innermost reference.
3870 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3871 set to the starting bit number.
3873 If the innermost field can be completely contained in a mode-sized
3874 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3876 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3877 otherwise it is not changed.
3879 *PUNSIGNEDP is set to the signedness of the field.
3881 *PMASK is set to the mask used. This is either contained in a
3882 BIT_AND_EXPR or derived from the width of the field.
3884 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3886 Return 0 if this is not a component reference or is one that we can't
3887 do anything with. */
3889 static tree
3890 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3891 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3892 int *punsignedp, int *pvolatilep,
3893 tree *pmask, tree *pand_mask)
3895 tree outer_type = 0;
3896 tree and_mask = 0;
3897 tree mask, inner, offset;
3898 tree unsigned_type;
3899 unsigned int precision;
3901 /* All the optimizations using this function assume integer fields.
3902 There are problems with FP fields since the type_for_size call
3903 below can fail for, e.g., XFmode. */
3904 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3905 return 0;
3907 /* We are interested in the bare arrangement of bits, so strip everything
3908 that doesn't affect the machine mode. However, record the type of the
3909 outermost expression if it may matter below. */
3910 if (CONVERT_EXPR_P (exp)
3911 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3912 outer_type = TREE_TYPE (exp);
3913 STRIP_NOPS (exp);
3915 if (TREE_CODE (exp) == BIT_AND_EXPR)
3917 and_mask = TREE_OPERAND (exp, 1);
3918 exp = TREE_OPERAND (exp, 0);
3919 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3920 if (TREE_CODE (and_mask) != INTEGER_CST)
3921 return 0;
3924 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3925 punsignedp, pvolatilep, false);
3926 if ((inner == exp && and_mask == 0)
3927 || *pbitsize < 0 || offset != 0
3928 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3929 return 0;
3931 /* If the number of bits in the reference is the same as the bitsize of
3932 the outer type, then the outer type gives the signedness. Otherwise
3933 (in case of a small bitfield) the signedness is unchanged. */
3934 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3935 *punsignedp = TYPE_UNSIGNED (outer_type);
3937 /* Compute the mask to access the bitfield. */
3938 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3939 precision = TYPE_PRECISION (unsigned_type);
3941 mask = build_int_cst_type (unsigned_type, -1);
3943 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3944 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3946 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3947 if (and_mask != 0)
3948 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3949 fold_convert (unsigned_type, and_mask), mask);
3951 *pmask = mask;
3952 *pand_mask = and_mask;
3953 return inner;
3956 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3957 represents the sign bit of EXP's type. If EXP represents a sign
3958 or zero extension, also test VAL against the unextended type.
3959 The return value is the (sub)expression whose sign bit is VAL,
3960 or NULL_TREE otherwise. */
3962 static tree
3963 sign_bit_p (tree exp, const_tree val)
3965 unsigned HOST_WIDE_INT mask_lo, lo;
3966 HOST_WIDE_INT mask_hi, hi;
3967 int width;
3968 tree t;
3970 /* Tree EXP must have an integral type. */
3971 t = TREE_TYPE (exp);
3972 if (! INTEGRAL_TYPE_P (t))
3973 return NULL_TREE;
3975 /* Tree VAL must be an integer constant. */
3976 if (TREE_CODE (val) != INTEGER_CST
3977 || TREE_OVERFLOW (val))
3978 return NULL_TREE;
3980 width = TYPE_PRECISION (t);
3981 if (width > HOST_BITS_PER_WIDE_INT)
3983 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3984 lo = 0;
3986 mask_hi = ((unsigned HOST_WIDE_INT) -1
3987 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3988 mask_lo = -1;
3990 else
3992 hi = 0;
3993 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3995 mask_hi = 0;
3996 mask_lo = ((unsigned HOST_WIDE_INT) -1
3997 >> (HOST_BITS_PER_WIDE_INT - width));
4000 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4001 treat VAL as if it were unsigned. */
4002 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4003 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4004 return exp;
4006 /* Handle extension from a narrower type. */
4007 if (TREE_CODE (exp) == NOP_EXPR
4008 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4009 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4011 return NULL_TREE;
4014 /* Subroutine for fold_truthop: determine if an operand is simple enough
4015 to be evaluated unconditionally. */
4017 static int
4018 simple_operand_p (const_tree exp)
4020 /* Strip any conversions that don't change the machine mode. */
4021 STRIP_NOPS (exp);
4023 return (CONSTANT_CLASS_P (exp)
4024 || TREE_CODE (exp) == SSA_NAME
4025 || (DECL_P (exp)
4026 && ! TREE_ADDRESSABLE (exp)
4027 && ! TREE_THIS_VOLATILE (exp)
4028 && ! DECL_NONLOCAL (exp)
4029 /* Don't regard global variables as simple. They may be
4030 allocated in ways unknown to the compiler (shared memory,
4031 #pragma weak, etc). */
4032 && ! TREE_PUBLIC (exp)
4033 && ! DECL_EXTERNAL (exp)
4034 /* Loading a static variable is unduly expensive, but global
4035 registers aren't expensive. */
4036 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4039 /* The following functions are subroutines to fold_range_test and allow it to
4040 try to change a logical combination of comparisons into a range test.
4042 For example, both
4043 X == 2 || X == 3 || X == 4 || X == 5
4045 X >= 2 && X <= 5
4046 are converted to
4047 (unsigned) (X - 2) <= 3
4049 We describe each set of comparisons as being either inside or outside
4050 a range, using a variable named like IN_P, and then describe the
4051 range with a lower and upper bound. If one of the bounds is omitted,
4052 it represents either the highest or lowest value of the type.
4054 In the comments below, we represent a range by two numbers in brackets
4055 preceded by a "+" to designate being inside that range, or a "-" to
4056 designate being outside that range, so the condition can be inverted by
4057 flipping the prefix. An omitted bound is represented by a "-". For
4058 example, "- [-, 10]" means being outside the range starting at the lowest
4059 possible value and ending at 10, in other words, being greater than 10.
4060 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4061 always false.
4063 We set up things so that the missing bounds are handled in a consistent
4064 manner so neither a missing bound nor "true" and "false" need to be
4065 handled using a special case. */
4067 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4068 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4069 and UPPER1_P are nonzero if the respective argument is an upper bound
4070 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4071 must be specified for a comparison. ARG1 will be converted to ARG0's
4072 type if both are specified. */
4074 static tree
4075 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4076 tree arg1, int upper1_p)
4078 tree tem;
4079 int result;
4080 int sgn0, sgn1;
4082 /* If neither arg represents infinity, do the normal operation.
4083 Else, if not a comparison, return infinity. Else handle the special
4084 comparison rules. Note that most of the cases below won't occur, but
4085 are handled for consistency. */
4087 if (arg0 != 0 && arg1 != 0)
4089 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4090 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4091 STRIP_NOPS (tem);
4092 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4095 if (TREE_CODE_CLASS (code) != tcc_comparison)
4096 return 0;
4098 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4099 for neither. In real maths, we cannot assume open ended ranges are
4100 the same. But, this is computer arithmetic, where numbers are finite.
4101 We can therefore make the transformation of any unbounded range with
4102 the value Z, Z being greater than any representable number. This permits
4103 us to treat unbounded ranges as equal. */
4104 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4105 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4106 switch (code)
4108 case EQ_EXPR:
4109 result = sgn0 == sgn1;
4110 break;
4111 case NE_EXPR:
4112 result = sgn0 != sgn1;
4113 break;
4114 case LT_EXPR:
4115 result = sgn0 < sgn1;
4116 break;
4117 case LE_EXPR:
4118 result = sgn0 <= sgn1;
4119 break;
4120 case GT_EXPR:
4121 result = sgn0 > sgn1;
4122 break;
4123 case GE_EXPR:
4124 result = sgn0 >= sgn1;
4125 break;
4126 default:
4127 gcc_unreachable ();
4130 return constant_boolean_node (result, type);
4133 /* Given EXP, a logical expression, set the range it is testing into
4134 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4135 actually being tested. *PLOW and *PHIGH will be made of the same
4136 type as the returned expression. If EXP is not a comparison, we
4137 will most likely not be returning a useful value and range. Set
4138 *STRICT_OVERFLOW_P to true if the return value is only valid
4139 because signed overflow is undefined; otherwise, do not change
4140 *STRICT_OVERFLOW_P. */
4142 static tree
4143 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4144 bool *strict_overflow_p)
4146 enum tree_code code;
4147 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4148 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4149 int in_p, n_in_p;
4150 tree low, high, n_low, n_high;
4152 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4153 and see if we can refine the range. Some of the cases below may not
4154 happen, but it doesn't seem worth worrying about this. We "continue"
4155 the outer loop when we've changed something; otherwise we "break"
4156 the switch, which will "break" the while. */
4158 in_p = 0;
4159 low = high = build_int_cst (TREE_TYPE (exp), 0);
4161 while (1)
4163 code = TREE_CODE (exp);
4164 exp_type = TREE_TYPE (exp);
4166 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4168 if (TREE_OPERAND_LENGTH (exp) > 0)
4169 arg0 = TREE_OPERAND (exp, 0);
4170 if (TREE_CODE_CLASS (code) == tcc_comparison
4171 || TREE_CODE_CLASS (code) == tcc_unary
4172 || TREE_CODE_CLASS (code) == tcc_binary)
4173 arg0_type = TREE_TYPE (arg0);
4174 if (TREE_CODE_CLASS (code) == tcc_binary
4175 || TREE_CODE_CLASS (code) == tcc_comparison
4176 || (TREE_CODE_CLASS (code) == tcc_expression
4177 && TREE_OPERAND_LENGTH (exp) > 1))
4178 arg1 = TREE_OPERAND (exp, 1);
4181 switch (code)
4183 case TRUTH_NOT_EXPR:
4184 in_p = ! in_p, exp = arg0;
4185 continue;
4187 case EQ_EXPR: case NE_EXPR:
4188 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4189 /* We can only do something if the range is testing for zero
4190 and if the second operand is an integer constant. Note that
4191 saying something is "in" the range we make is done by
4192 complementing IN_P since it will set in the initial case of
4193 being not equal to zero; "out" is leaving it alone. */
4194 if (low == 0 || high == 0
4195 || ! integer_zerop (low) || ! integer_zerop (high)
4196 || TREE_CODE (arg1) != INTEGER_CST)
4197 break;
4199 switch (code)
4201 case NE_EXPR: /* - [c, c] */
4202 low = high = arg1;
4203 break;
4204 case EQ_EXPR: /* + [c, c] */
4205 in_p = ! in_p, low = high = arg1;
4206 break;
4207 case GT_EXPR: /* - [-, c] */
4208 low = 0, high = arg1;
4209 break;
4210 case GE_EXPR: /* + [c, -] */
4211 in_p = ! in_p, low = arg1, high = 0;
4212 break;
4213 case LT_EXPR: /* - [c, -] */
4214 low = arg1, high = 0;
4215 break;
4216 case LE_EXPR: /* + [-, c] */
4217 in_p = ! in_p, low = 0, high = arg1;
4218 break;
4219 default:
4220 gcc_unreachable ();
4223 /* If this is an unsigned comparison, we also know that EXP is
4224 greater than or equal to zero. We base the range tests we make
4225 on that fact, so we record it here so we can parse existing
4226 range tests. We test arg0_type since often the return type
4227 of, e.g. EQ_EXPR, is boolean. */
4228 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4230 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4231 in_p, low, high, 1,
4232 build_int_cst (arg0_type, 0),
4233 NULL_TREE))
4234 break;
4236 in_p = n_in_p, low = n_low, high = n_high;
4238 /* If the high bound is missing, but we have a nonzero low
4239 bound, reverse the range so it goes from zero to the low bound
4240 minus 1. */
4241 if (high == 0 && low && ! integer_zerop (low))
4243 in_p = ! in_p;
4244 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4245 integer_one_node, 0);
4246 low = build_int_cst (arg0_type, 0);
4250 exp = arg0;
4251 continue;
4253 case NEGATE_EXPR:
4254 /* (-x) IN [a,b] -> x in [-b, -a] */
4255 n_low = range_binop (MINUS_EXPR, exp_type,
4256 build_int_cst (exp_type, 0),
4257 0, high, 1);
4258 n_high = range_binop (MINUS_EXPR, exp_type,
4259 build_int_cst (exp_type, 0),
4260 0, low, 0);
4261 low = n_low, high = n_high;
4262 exp = arg0;
4263 continue;
4265 case BIT_NOT_EXPR:
4266 /* ~ X -> -X - 1 */
4267 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4268 build_int_cst (exp_type, 1));
4269 continue;
4271 case PLUS_EXPR: case MINUS_EXPR:
4272 if (TREE_CODE (arg1) != INTEGER_CST)
4273 break;
4275 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4276 move a constant to the other side. */
4277 if (!TYPE_UNSIGNED (arg0_type)
4278 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4279 break;
4281 /* If EXP is signed, any overflow in the computation is undefined,
4282 so we don't worry about it so long as our computations on
4283 the bounds don't overflow. For unsigned, overflow is defined
4284 and this is exactly the right thing. */
4285 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4286 arg0_type, low, 0, arg1, 0);
4287 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4288 arg0_type, high, 1, arg1, 0);
4289 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4290 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4291 break;
4293 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4294 *strict_overflow_p = true;
4296 /* Check for an unsigned range which has wrapped around the maximum
4297 value thus making n_high < n_low, and normalize it. */
4298 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4300 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4301 integer_one_node, 0);
4302 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4303 integer_one_node, 0);
4305 /* If the range is of the form +/- [ x+1, x ], we won't
4306 be able to normalize it. But then, it represents the
4307 whole range or the empty set, so make it
4308 +/- [ -, - ]. */
4309 if (tree_int_cst_equal (n_low, low)
4310 && tree_int_cst_equal (n_high, high))
4311 low = high = 0;
4312 else
4313 in_p = ! in_p;
4315 else
4316 low = n_low, high = n_high;
4318 exp = arg0;
4319 continue;
4321 CASE_CONVERT: case NON_LVALUE_EXPR:
4322 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4323 break;
4325 if (! INTEGRAL_TYPE_P (arg0_type)
4326 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4327 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4328 break;
4330 n_low = low, n_high = high;
4332 if (n_low != 0)
4333 n_low = fold_convert (arg0_type, n_low);
4335 if (n_high != 0)
4336 n_high = fold_convert (arg0_type, n_high);
4339 /* If we're converting arg0 from an unsigned type, to exp,
4340 a signed type, we will be doing the comparison as unsigned.
4341 The tests above have already verified that LOW and HIGH
4342 are both positive.
4344 So we have to ensure that we will handle large unsigned
4345 values the same way that the current signed bounds treat
4346 negative values. */
4348 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4350 tree high_positive;
4351 tree equiv_type;
4352 /* For fixed-point modes, we need to pass the saturating flag
4353 as the 2nd parameter. */
4354 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4355 equiv_type = lang_hooks.types.type_for_mode
4356 (TYPE_MODE (arg0_type),
4357 TYPE_SATURATING (arg0_type));
4358 else
4359 equiv_type = lang_hooks.types.type_for_mode
4360 (TYPE_MODE (arg0_type), 1);
4362 /* A range without an upper bound is, naturally, unbounded.
4363 Since convert would have cropped a very large value, use
4364 the max value for the destination type. */
4365 high_positive
4366 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4367 : TYPE_MAX_VALUE (arg0_type);
4369 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4370 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4371 fold_convert (arg0_type,
4372 high_positive),
4373 build_int_cst (arg0_type, 1));
4375 /* If the low bound is specified, "and" the range with the
4376 range for which the original unsigned value will be
4377 positive. */
4378 if (low != 0)
4380 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4381 1, n_low, n_high, 1,
4382 fold_convert (arg0_type,
4383 integer_zero_node),
4384 high_positive))
4385 break;
4387 in_p = (n_in_p == in_p);
4389 else
4391 /* Otherwise, "or" the range with the range of the input
4392 that will be interpreted as negative. */
4393 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4394 0, n_low, n_high, 1,
4395 fold_convert (arg0_type,
4396 integer_zero_node),
4397 high_positive))
4398 break;
4400 in_p = (in_p != n_in_p);
4404 exp = arg0;
4405 low = n_low, high = n_high;
4406 continue;
4408 default:
4409 break;
4412 break;
4415 /* If EXP is a constant, we can evaluate whether this is true or false. */
4416 if (TREE_CODE (exp) == INTEGER_CST)
4418 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4419 exp, 0, low, 0))
4420 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4421 exp, 1, high, 1)));
4422 low = high = 0;
4423 exp = 0;
4426 *pin_p = in_p, *plow = low, *phigh = high;
4427 return exp;
4430 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4431 type, TYPE, return an expression to test if EXP is in (or out of, depending
4432 on IN_P) the range. Return 0 if the test couldn't be created. */
4434 static tree
4435 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4437 tree etype = TREE_TYPE (exp);
4438 tree value;
4440 #ifdef HAVE_canonicalize_funcptr_for_compare
4441 /* Disable this optimization for function pointer expressions
4442 on targets that require function pointer canonicalization. */
4443 if (HAVE_canonicalize_funcptr_for_compare
4444 && TREE_CODE (etype) == POINTER_TYPE
4445 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4446 return NULL_TREE;
4447 #endif
4449 if (! in_p)
4451 value = build_range_check (type, exp, 1, low, high);
4452 if (value != 0)
4453 return invert_truthvalue (value);
4455 return 0;
4458 if (low == 0 && high == 0)
4459 return build_int_cst (type, 1);
4461 if (low == 0)
4462 return fold_build2 (LE_EXPR, type, exp,
4463 fold_convert (etype, high));
4465 if (high == 0)
4466 return fold_build2 (GE_EXPR, type, exp,
4467 fold_convert (etype, low));
4469 if (operand_equal_p (low, high, 0))
4470 return fold_build2 (EQ_EXPR, type, exp,
4471 fold_convert (etype, low));
4473 if (integer_zerop (low))
4475 if (! TYPE_UNSIGNED (etype))
4477 etype = unsigned_type_for (etype);
4478 high = fold_convert (etype, high);
4479 exp = fold_convert (etype, exp);
4481 return build_range_check (type, exp, 1, 0, high);
4484 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4485 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4487 unsigned HOST_WIDE_INT lo;
4488 HOST_WIDE_INT hi;
4489 int prec;
4491 prec = TYPE_PRECISION (etype);
4492 if (prec <= HOST_BITS_PER_WIDE_INT)
4494 hi = 0;
4495 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4497 else
4499 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4500 lo = (unsigned HOST_WIDE_INT) -1;
4503 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4505 if (TYPE_UNSIGNED (etype))
4507 etype = signed_type_for (etype);
4508 exp = fold_convert (etype, exp);
4510 return fold_build2 (GT_EXPR, type, exp,
4511 build_int_cst (etype, 0));
4515 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4516 This requires wrap-around arithmetics for the type of the expression. */
4517 switch (TREE_CODE (etype))
4519 case INTEGER_TYPE:
4520 /* There is no requirement that LOW be within the range of ETYPE
4521 if the latter is a subtype. It must, however, be within the base
4522 type of ETYPE. So be sure we do the subtraction in that type. */
4523 if (TREE_TYPE (etype))
4524 etype = TREE_TYPE (etype);
4525 break;
4527 case ENUMERAL_TYPE:
4528 case BOOLEAN_TYPE:
4529 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4530 TYPE_UNSIGNED (etype));
4531 break;
4533 default:
4534 break;
4537 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4538 if (TREE_CODE (etype) == INTEGER_TYPE
4539 && !TYPE_OVERFLOW_WRAPS (etype))
4541 tree utype, minv, maxv;
4543 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4544 for the type in question, as we rely on this here. */
4545 utype = unsigned_type_for (etype);
4546 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4547 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4548 integer_one_node, 1);
4549 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4551 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4552 minv, 1, maxv, 1)))
4553 etype = utype;
4554 else
4555 return 0;
4558 high = fold_convert (etype, high);
4559 low = fold_convert (etype, low);
4560 exp = fold_convert (etype, exp);
4562 value = const_binop (MINUS_EXPR, high, low, 0);
4565 if (POINTER_TYPE_P (etype))
4567 if (value != 0 && !TREE_OVERFLOW (value))
4569 low = fold_convert (sizetype, low);
4570 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4571 return build_range_check (type,
4572 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4573 1, build_int_cst (etype, 0), value);
4575 return 0;
4578 if (value != 0 && !TREE_OVERFLOW (value))
4579 return build_range_check (type,
4580 fold_build2 (MINUS_EXPR, etype, exp, low),
4581 1, build_int_cst (etype, 0), value);
4583 return 0;
4586 /* Return the predecessor of VAL in its type, handling the infinite case. */
4588 static tree
4589 range_predecessor (tree val)
4591 tree type = TREE_TYPE (val);
4593 if (INTEGRAL_TYPE_P (type)
4594 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4595 return 0;
4596 else
4597 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4600 /* Return the successor of VAL in its type, handling the infinite case. */
4602 static tree
4603 range_successor (tree val)
4605 tree type = TREE_TYPE (val);
4607 if (INTEGRAL_TYPE_P (type)
4608 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4609 return 0;
4610 else
4611 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4614 /* Given two ranges, see if we can merge them into one. Return 1 if we
4615 can, 0 if we can't. Set the output range into the specified parameters. */
4617 static int
4618 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4619 tree high0, int in1_p, tree low1, tree high1)
4621 int no_overlap;
4622 int subset;
4623 int temp;
4624 tree tem;
4625 int in_p;
4626 tree low, high;
4627 int lowequal = ((low0 == 0 && low1 == 0)
4628 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4629 low0, 0, low1, 0)));
4630 int highequal = ((high0 == 0 && high1 == 0)
4631 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4632 high0, 1, high1, 1)));
4634 /* Make range 0 be the range that starts first, or ends last if they
4635 start at the same value. Swap them if it isn't. */
4636 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4637 low0, 0, low1, 0))
4638 || (lowequal
4639 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4640 high1, 1, high0, 1))))
4642 temp = in0_p, in0_p = in1_p, in1_p = temp;
4643 tem = low0, low0 = low1, low1 = tem;
4644 tem = high0, high0 = high1, high1 = tem;
4647 /* Now flag two cases, whether the ranges are disjoint or whether the
4648 second range is totally subsumed in the first. Note that the tests
4649 below are simplified by the ones above. */
4650 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4651 high0, 1, low1, 0));
4652 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4653 high1, 1, high0, 1));
4655 /* We now have four cases, depending on whether we are including or
4656 excluding the two ranges. */
4657 if (in0_p && in1_p)
4659 /* If they don't overlap, the result is false. If the second range
4660 is a subset it is the result. Otherwise, the range is from the start
4661 of the second to the end of the first. */
4662 if (no_overlap)
4663 in_p = 0, low = high = 0;
4664 else if (subset)
4665 in_p = 1, low = low1, high = high1;
4666 else
4667 in_p = 1, low = low1, high = high0;
4670 else if (in0_p && ! in1_p)
4672 /* If they don't overlap, the result is the first range. If they are
4673 equal, the result is false. If the second range is a subset of the
4674 first, and the ranges begin at the same place, we go from just after
4675 the end of the second range to the end of the first. If the second
4676 range is not a subset of the first, or if it is a subset and both
4677 ranges end at the same place, the range starts at the start of the
4678 first range and ends just before the second range.
4679 Otherwise, we can't describe this as a single range. */
4680 if (no_overlap)
4681 in_p = 1, low = low0, high = high0;
4682 else if (lowequal && highequal)
4683 in_p = 0, low = high = 0;
4684 else if (subset && lowequal)
4686 low = range_successor (high1);
4687 high = high0;
4688 in_p = 1;
4689 if (low == 0)
4691 /* We are in the weird situation where high0 > high1 but
4692 high1 has no successor. Punt. */
4693 return 0;
4696 else if (! subset || highequal)
4698 low = low0;
4699 high = range_predecessor (low1);
4700 in_p = 1;
4701 if (high == 0)
4703 /* low0 < low1 but low1 has no predecessor. Punt. */
4704 return 0;
4707 else
4708 return 0;
4711 else if (! in0_p && in1_p)
4713 /* If they don't overlap, the result is the second range. If the second
4714 is a subset of the first, the result is false. Otherwise,
4715 the range starts just after the first range and ends at the
4716 end of the second. */
4717 if (no_overlap)
4718 in_p = 1, low = low1, high = high1;
4719 else if (subset || highequal)
4720 in_p = 0, low = high = 0;
4721 else
4723 low = range_successor (high0);
4724 high = high1;
4725 in_p = 1;
4726 if (low == 0)
4728 /* high1 > high0 but high0 has no successor. Punt. */
4729 return 0;
4734 else
4736 /* The case where we are excluding both ranges. Here the complex case
4737 is if they don't overlap. In that case, the only time we have a
4738 range is if they are adjacent. If the second is a subset of the
4739 first, the result is the first. Otherwise, the range to exclude
4740 starts at the beginning of the first range and ends at the end of the
4741 second. */
4742 if (no_overlap)
4744 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4745 range_successor (high0),
4746 1, low1, 0)))
4747 in_p = 0, low = low0, high = high1;
4748 else
4750 /* Canonicalize - [min, x] into - [-, x]. */
4751 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4752 switch (TREE_CODE (TREE_TYPE (low0)))
4754 case ENUMERAL_TYPE:
4755 if (TYPE_PRECISION (TREE_TYPE (low0))
4756 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4757 break;
4758 /* FALLTHROUGH */
4759 case INTEGER_TYPE:
4760 if (tree_int_cst_equal (low0,
4761 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4762 low0 = 0;
4763 break;
4764 case POINTER_TYPE:
4765 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4766 && integer_zerop (low0))
4767 low0 = 0;
4768 break;
4769 default:
4770 break;
4773 /* Canonicalize - [x, max] into - [x, -]. */
4774 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4775 switch (TREE_CODE (TREE_TYPE (high1)))
4777 case ENUMERAL_TYPE:
4778 if (TYPE_PRECISION (TREE_TYPE (high1))
4779 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4780 break;
4781 /* FALLTHROUGH */
4782 case INTEGER_TYPE:
4783 if (tree_int_cst_equal (high1,
4784 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4785 high1 = 0;
4786 break;
4787 case POINTER_TYPE:
4788 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4789 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4790 high1, 1,
4791 integer_one_node, 1)))
4792 high1 = 0;
4793 break;
4794 default:
4795 break;
4798 /* The ranges might be also adjacent between the maximum and
4799 minimum values of the given type. For
4800 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4801 return + [x + 1, y - 1]. */
4802 if (low0 == 0 && high1 == 0)
4804 low = range_successor (high0);
4805 high = range_predecessor (low1);
4806 if (low == 0 || high == 0)
4807 return 0;
4809 in_p = 1;
4811 else
4812 return 0;
4815 else if (subset)
4816 in_p = 0, low = low0, high = high0;
4817 else
4818 in_p = 0, low = low0, high = high1;
4821 *pin_p = in_p, *plow = low, *phigh = high;
4822 return 1;
4826 /* Subroutine of fold, looking inside expressions of the form
4827 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4828 of the COND_EXPR. This function is being used also to optimize
4829 A op B ? C : A, by reversing the comparison first.
4831 Return a folded expression whose code is not a COND_EXPR
4832 anymore, or NULL_TREE if no folding opportunity is found. */
4834 static tree
4835 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4837 enum tree_code comp_code = TREE_CODE (arg0);
4838 tree arg00 = TREE_OPERAND (arg0, 0);
4839 tree arg01 = TREE_OPERAND (arg0, 1);
4840 tree arg1_type = TREE_TYPE (arg1);
4841 tree tem;
4843 STRIP_NOPS (arg1);
4844 STRIP_NOPS (arg2);
4846 /* If we have A op 0 ? A : -A, consider applying the following
4847 transformations:
4849 A == 0? A : -A same as -A
4850 A != 0? A : -A same as A
4851 A >= 0? A : -A same as abs (A)
4852 A > 0? A : -A same as abs (A)
4853 A <= 0? A : -A same as -abs (A)
4854 A < 0? A : -A same as -abs (A)
4856 None of these transformations work for modes with signed
4857 zeros. If A is +/-0, the first two transformations will
4858 change the sign of the result (from +0 to -0, or vice
4859 versa). The last four will fix the sign of the result,
4860 even though the original expressions could be positive or
4861 negative, depending on the sign of A.
4863 Note that all these transformations are correct if A is
4864 NaN, since the two alternatives (A and -A) are also NaNs. */
4865 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4866 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4867 ? real_zerop (arg01)
4868 : integer_zerop (arg01))
4869 && ((TREE_CODE (arg2) == NEGATE_EXPR
4870 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4871 /* In the case that A is of the form X-Y, '-A' (arg2) may
4872 have already been folded to Y-X, check for that. */
4873 || (TREE_CODE (arg1) == MINUS_EXPR
4874 && TREE_CODE (arg2) == MINUS_EXPR
4875 && operand_equal_p (TREE_OPERAND (arg1, 0),
4876 TREE_OPERAND (arg2, 1), 0)
4877 && operand_equal_p (TREE_OPERAND (arg1, 1),
4878 TREE_OPERAND (arg2, 0), 0))))
4879 switch (comp_code)
4881 case EQ_EXPR:
4882 case UNEQ_EXPR:
4883 tem = fold_convert (arg1_type, arg1);
4884 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4885 case NE_EXPR:
4886 case LTGT_EXPR:
4887 return pedantic_non_lvalue (fold_convert (type, arg1));
4888 case UNGE_EXPR:
4889 case UNGT_EXPR:
4890 if (flag_trapping_math)
4891 break;
4892 /* Fall through. */
4893 case GE_EXPR:
4894 case GT_EXPR:
4895 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4896 arg1 = fold_convert (signed_type_for
4897 (TREE_TYPE (arg1)), arg1);
4898 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4899 return pedantic_non_lvalue (fold_convert (type, tem));
4900 case UNLE_EXPR:
4901 case UNLT_EXPR:
4902 if (flag_trapping_math)
4903 break;
4904 case LE_EXPR:
4905 case LT_EXPR:
4906 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4907 arg1 = fold_convert (signed_type_for
4908 (TREE_TYPE (arg1)), arg1);
4909 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4910 return negate_expr (fold_convert (type, tem));
4911 default:
4912 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4913 break;
4916 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4917 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4918 both transformations are correct when A is NaN: A != 0
4919 is then true, and A == 0 is false. */
4921 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4922 && integer_zerop (arg01) && integer_zerop (arg2))
4924 if (comp_code == NE_EXPR)
4925 return pedantic_non_lvalue (fold_convert (type, arg1));
4926 else if (comp_code == EQ_EXPR)
4927 return build_int_cst (type, 0);
4930 /* Try some transformations of A op B ? A : B.
4932 A == B? A : B same as B
4933 A != B? A : B same as A
4934 A >= B? A : B same as max (A, B)
4935 A > B? A : B same as max (B, A)
4936 A <= B? A : B same as min (A, B)
4937 A < B? A : B same as min (B, A)
4939 As above, these transformations don't work in the presence
4940 of signed zeros. For example, if A and B are zeros of
4941 opposite sign, the first two transformations will change
4942 the sign of the result. In the last four, the original
4943 expressions give different results for (A=+0, B=-0) and
4944 (A=-0, B=+0), but the transformed expressions do not.
4946 The first two transformations are correct if either A or B
4947 is a NaN. In the first transformation, the condition will
4948 be false, and B will indeed be chosen. In the case of the
4949 second transformation, the condition A != B will be true,
4950 and A will be chosen.
4952 The conversions to max() and min() are not correct if B is
4953 a number and A is not. The conditions in the original
4954 expressions will be false, so all four give B. The min()
4955 and max() versions would give a NaN instead. */
4956 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4957 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4958 /* Avoid these transformations if the COND_EXPR may be used
4959 as an lvalue in the C++ front-end. PR c++/19199. */
4960 && (in_gimple_form
4961 || (strcmp (lang_hooks.name, "GNU C++") != 0
4962 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4963 || ! maybe_lvalue_p (arg1)
4964 || ! maybe_lvalue_p (arg2)))
4966 tree comp_op0 = arg00;
4967 tree comp_op1 = arg01;
4968 tree comp_type = TREE_TYPE (comp_op0);
4970 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4971 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4973 comp_type = type;
4974 comp_op0 = arg1;
4975 comp_op1 = arg2;
4978 switch (comp_code)
4980 case EQ_EXPR:
4981 return pedantic_non_lvalue (fold_convert (type, arg2));
4982 case NE_EXPR:
4983 return pedantic_non_lvalue (fold_convert (type, arg1));
4984 case LE_EXPR:
4985 case LT_EXPR:
4986 case UNLE_EXPR:
4987 case UNLT_EXPR:
4988 /* In C++ a ?: expression can be an lvalue, so put the
4989 operand which will be used if they are equal first
4990 so that we can convert this back to the
4991 corresponding COND_EXPR. */
4992 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4994 comp_op0 = fold_convert (comp_type, comp_op0);
4995 comp_op1 = fold_convert (comp_type, comp_op1);
4996 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4997 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4998 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4999 return pedantic_non_lvalue (fold_convert (type, tem));
5001 break;
5002 case GE_EXPR:
5003 case GT_EXPR:
5004 case UNGE_EXPR:
5005 case UNGT_EXPR:
5006 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5008 comp_op0 = fold_convert (comp_type, comp_op0);
5009 comp_op1 = fold_convert (comp_type, comp_op1);
5010 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5011 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5012 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5013 return pedantic_non_lvalue (fold_convert (type, tem));
5015 break;
5016 case UNEQ_EXPR:
5017 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5018 return pedantic_non_lvalue (fold_convert (type, arg2));
5019 break;
5020 case LTGT_EXPR:
5021 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5022 return pedantic_non_lvalue (fold_convert (type, arg1));
5023 break;
5024 default:
5025 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5026 break;
5030 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5031 we might still be able to simplify this. For example,
5032 if C1 is one less or one more than C2, this might have started
5033 out as a MIN or MAX and been transformed by this function.
5034 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5036 if (INTEGRAL_TYPE_P (type)
5037 && TREE_CODE (arg01) == INTEGER_CST
5038 && TREE_CODE (arg2) == INTEGER_CST)
5039 switch (comp_code)
5041 case EQ_EXPR:
5042 /* We can replace A with C1 in this case. */
5043 arg1 = fold_convert (type, arg01);
5044 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5046 case LT_EXPR:
5047 /* If C1 is C2 + 1, this is min(A, C2). */
5048 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5049 OEP_ONLY_CONST)
5050 && operand_equal_p (arg01,
5051 const_binop (PLUS_EXPR, arg2,
5052 build_int_cst (type, 1), 0),
5053 OEP_ONLY_CONST))
5054 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5055 type,
5056 fold_convert (type, arg1),
5057 arg2));
5058 break;
5060 case LE_EXPR:
5061 /* If C1 is C2 - 1, this is min(A, C2). */
5062 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5063 OEP_ONLY_CONST)
5064 && operand_equal_p (arg01,
5065 const_binop (MINUS_EXPR, arg2,
5066 build_int_cst (type, 1), 0),
5067 OEP_ONLY_CONST))
5068 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5069 type,
5070 fold_convert (type, arg1),
5071 arg2));
5072 break;
5074 case GT_EXPR:
5075 /* If C1 is C2 - 1, this is max(A, C2). */
5076 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5077 OEP_ONLY_CONST)
5078 && operand_equal_p (arg01,
5079 const_binop (MINUS_EXPR, arg2,
5080 build_int_cst (type, 1), 0),
5081 OEP_ONLY_CONST))
5082 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5083 type,
5084 fold_convert (type, arg1),
5085 arg2));
5086 break;
5088 case GE_EXPR:
5089 /* If C1 is C2 + 1, this is max(A, C2). */
5090 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5091 OEP_ONLY_CONST)
5092 && operand_equal_p (arg01,
5093 const_binop (PLUS_EXPR, arg2,
5094 build_int_cst (type, 1), 0),
5095 OEP_ONLY_CONST))
5096 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5097 type,
5098 fold_convert (type, arg1),
5099 arg2));
5100 break;
5101 case NE_EXPR:
5102 break;
5103 default:
5104 gcc_unreachable ();
5107 return NULL_TREE;
5112 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5113 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5114 #endif
5116 /* EXP is some logical combination of boolean tests. See if we can
5117 merge it into some range test. Return the new tree if so. */
5119 static tree
5120 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5122 int or_op = (code == TRUTH_ORIF_EXPR
5123 || code == TRUTH_OR_EXPR);
5124 int in0_p, in1_p, in_p;
5125 tree low0, low1, low, high0, high1, high;
5126 bool strict_overflow_p = false;
5127 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5128 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5129 tree tem;
5130 const char * const warnmsg = G_("assuming signed overflow does not occur "
5131 "when simplifying range test");
5133 /* If this is an OR operation, invert both sides; we will invert
5134 again at the end. */
5135 if (or_op)
5136 in0_p = ! in0_p, in1_p = ! in1_p;
5138 /* If both expressions are the same, if we can merge the ranges, and we
5139 can build the range test, return it or it inverted. If one of the
5140 ranges is always true or always false, consider it to be the same
5141 expression as the other. */
5142 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5143 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5144 in1_p, low1, high1)
5145 && 0 != (tem = (build_range_check (type,
5146 lhs != 0 ? lhs
5147 : rhs != 0 ? rhs : integer_zero_node,
5148 in_p, low, high))))
5150 if (strict_overflow_p)
5151 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5152 return or_op ? invert_truthvalue (tem) : tem;
5155 /* On machines where the branch cost is expensive, if this is a
5156 short-circuited branch and the underlying object on both sides
5157 is the same, make a non-short-circuit operation. */
5158 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5159 && lhs != 0 && rhs != 0
5160 && (code == TRUTH_ANDIF_EXPR
5161 || code == TRUTH_ORIF_EXPR)
5162 && operand_equal_p (lhs, rhs, 0))
5164 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5165 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5166 which cases we can't do this. */
5167 if (simple_operand_p (lhs))
5168 return build2 (code == TRUTH_ANDIF_EXPR
5169 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5170 type, op0, op1);
5172 else if (lang_hooks.decls.global_bindings_p () == 0
5173 && ! CONTAINS_PLACEHOLDER_P (lhs))
5175 tree common = save_expr (lhs);
5177 if (0 != (lhs = build_range_check (type, common,
5178 or_op ? ! in0_p : in0_p,
5179 low0, high0))
5180 && (0 != (rhs = build_range_check (type, common,
5181 or_op ? ! in1_p : in1_p,
5182 low1, high1))))
5184 if (strict_overflow_p)
5185 fold_overflow_warning (warnmsg,
5186 WARN_STRICT_OVERFLOW_COMPARISON);
5187 return build2 (code == TRUTH_ANDIF_EXPR
5188 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5189 type, lhs, rhs);
5194 return 0;
5197 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5198 bit value. Arrange things so the extra bits will be set to zero if and
5199 only if C is signed-extended to its full width. If MASK is nonzero,
5200 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5202 static tree
5203 unextend (tree c, int p, int unsignedp, tree mask)
5205 tree type = TREE_TYPE (c);
5206 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5207 tree temp;
5209 if (p == modesize || unsignedp)
5210 return c;
5212 /* We work by getting just the sign bit into the low-order bit, then
5213 into the high-order bit, then sign-extend. We then XOR that value
5214 with C. */
5215 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5216 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5218 /* We must use a signed type in order to get an arithmetic right shift.
5219 However, we must also avoid introducing accidental overflows, so that
5220 a subsequent call to integer_zerop will work. Hence we must
5221 do the type conversion here. At this point, the constant is either
5222 zero or one, and the conversion to a signed type can never overflow.
5223 We could get an overflow if this conversion is done anywhere else. */
5224 if (TYPE_UNSIGNED (type))
5225 temp = fold_convert (signed_type_for (type), temp);
5227 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5228 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5229 if (mask != 0)
5230 temp = const_binop (BIT_AND_EXPR, temp,
5231 fold_convert (TREE_TYPE (c), mask), 0);
5232 /* If necessary, convert the type back to match the type of C. */
5233 if (TYPE_UNSIGNED (type))
5234 temp = fold_convert (type, temp);
5236 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5239 /* Find ways of folding logical expressions of LHS and RHS:
5240 Try to merge two comparisons to the same innermost item.
5241 Look for range tests like "ch >= '0' && ch <= '9'".
5242 Look for combinations of simple terms on machines with expensive branches
5243 and evaluate the RHS unconditionally.
5245 For example, if we have p->a == 2 && p->b == 4 and we can make an
5246 object large enough to span both A and B, we can do this with a comparison
5247 against the object ANDed with the a mask.
5249 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5250 operations to do this with one comparison.
5252 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5253 function and the one above.
5255 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5256 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5258 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5259 two operands.
5261 We return the simplified tree or 0 if no optimization is possible. */
5263 static tree
5264 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5266 /* If this is the "or" of two comparisons, we can do something if
5267 the comparisons are NE_EXPR. If this is the "and", we can do something
5268 if the comparisons are EQ_EXPR. I.e.,
5269 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5271 WANTED_CODE is this operation code. For single bit fields, we can
5272 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5273 comparison for one-bit fields. */
5275 enum tree_code wanted_code;
5276 enum tree_code lcode, rcode;
5277 tree ll_arg, lr_arg, rl_arg, rr_arg;
5278 tree ll_inner, lr_inner, rl_inner, rr_inner;
5279 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5280 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5281 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5282 HOST_WIDE_INT lnbitsize, lnbitpos;
5283 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5284 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5285 enum machine_mode lnmode;
5286 tree ll_mask, lr_mask, rl_mask, rr_mask;
5287 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5288 tree l_const, r_const;
5289 tree lntype, result;
5290 int first_bit, end_bit;
5291 int volatilep;
5292 tree orig_lhs = lhs, orig_rhs = rhs;
5293 enum tree_code orig_code = code;
5295 /* Start by getting the comparison codes. Fail if anything is volatile.
5296 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5297 it were surrounded with a NE_EXPR. */
5299 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5300 return 0;
5302 lcode = TREE_CODE (lhs);
5303 rcode = TREE_CODE (rhs);
5305 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5307 lhs = build2 (NE_EXPR, truth_type, lhs,
5308 build_int_cst (TREE_TYPE (lhs), 0));
5309 lcode = NE_EXPR;
5312 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5314 rhs = build2 (NE_EXPR, truth_type, rhs,
5315 build_int_cst (TREE_TYPE (rhs), 0));
5316 rcode = NE_EXPR;
5319 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5320 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5321 return 0;
5323 ll_arg = TREE_OPERAND (lhs, 0);
5324 lr_arg = TREE_OPERAND (lhs, 1);
5325 rl_arg = TREE_OPERAND (rhs, 0);
5326 rr_arg = TREE_OPERAND (rhs, 1);
5328 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5329 if (simple_operand_p (ll_arg)
5330 && simple_operand_p (lr_arg))
5332 tree result;
5333 if (operand_equal_p (ll_arg, rl_arg, 0)
5334 && operand_equal_p (lr_arg, rr_arg, 0))
5336 result = combine_comparisons (code, lcode, rcode,
5337 truth_type, ll_arg, lr_arg);
5338 if (result)
5339 return result;
5341 else if (operand_equal_p (ll_arg, rr_arg, 0)
5342 && operand_equal_p (lr_arg, rl_arg, 0))
5344 result = combine_comparisons (code, lcode,
5345 swap_tree_comparison (rcode),
5346 truth_type, ll_arg, lr_arg);
5347 if (result)
5348 return result;
5352 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5353 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5355 /* If the RHS can be evaluated unconditionally and its operands are
5356 simple, it wins to evaluate the RHS unconditionally on machines
5357 with expensive branches. In this case, this isn't a comparison
5358 that can be merged. Avoid doing this if the RHS is a floating-point
5359 comparison since those can trap. */
5361 if (BRANCH_COST >= 2
5362 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5363 && simple_operand_p (rl_arg)
5364 && simple_operand_p (rr_arg))
5366 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5367 if (code == TRUTH_OR_EXPR
5368 && lcode == NE_EXPR && integer_zerop (lr_arg)
5369 && rcode == NE_EXPR && integer_zerop (rr_arg)
5370 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5371 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5372 return build2 (NE_EXPR, truth_type,
5373 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5374 ll_arg, rl_arg),
5375 build_int_cst (TREE_TYPE (ll_arg), 0));
5377 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5378 if (code == TRUTH_AND_EXPR
5379 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5380 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5381 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5382 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5383 return build2 (EQ_EXPR, truth_type,
5384 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5385 ll_arg, rl_arg),
5386 build_int_cst (TREE_TYPE (ll_arg), 0));
5388 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5390 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5391 return build2 (code, truth_type, lhs, rhs);
5392 return NULL_TREE;
5396 /* See if the comparisons can be merged. Then get all the parameters for
5397 each side. */
5399 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5400 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5401 return 0;
5403 volatilep = 0;
5404 ll_inner = decode_field_reference (ll_arg,
5405 &ll_bitsize, &ll_bitpos, &ll_mode,
5406 &ll_unsignedp, &volatilep, &ll_mask,
5407 &ll_and_mask);
5408 lr_inner = decode_field_reference (lr_arg,
5409 &lr_bitsize, &lr_bitpos, &lr_mode,
5410 &lr_unsignedp, &volatilep, &lr_mask,
5411 &lr_and_mask);
5412 rl_inner = decode_field_reference (rl_arg,
5413 &rl_bitsize, &rl_bitpos, &rl_mode,
5414 &rl_unsignedp, &volatilep, &rl_mask,
5415 &rl_and_mask);
5416 rr_inner = decode_field_reference (rr_arg,
5417 &rr_bitsize, &rr_bitpos, &rr_mode,
5418 &rr_unsignedp, &volatilep, &rr_mask,
5419 &rr_and_mask);
5421 /* It must be true that the inner operation on the lhs of each
5422 comparison must be the same if we are to be able to do anything.
5423 Then see if we have constants. If not, the same must be true for
5424 the rhs's. */
5425 if (volatilep || ll_inner == 0 || rl_inner == 0
5426 || ! operand_equal_p (ll_inner, rl_inner, 0))
5427 return 0;
5429 if (TREE_CODE (lr_arg) == INTEGER_CST
5430 && TREE_CODE (rr_arg) == INTEGER_CST)
5431 l_const = lr_arg, r_const = rr_arg;
5432 else if (lr_inner == 0 || rr_inner == 0
5433 || ! operand_equal_p (lr_inner, rr_inner, 0))
5434 return 0;
5435 else
5436 l_const = r_const = 0;
5438 /* If either comparison code is not correct for our logical operation,
5439 fail. However, we can convert a one-bit comparison against zero into
5440 the opposite comparison against that bit being set in the field. */
5442 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5443 if (lcode != wanted_code)
5445 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5447 /* Make the left operand unsigned, since we are only interested
5448 in the value of one bit. Otherwise we are doing the wrong
5449 thing below. */
5450 ll_unsignedp = 1;
5451 l_const = ll_mask;
5453 else
5454 return 0;
5457 /* This is analogous to the code for l_const above. */
5458 if (rcode != wanted_code)
5460 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5462 rl_unsignedp = 1;
5463 r_const = rl_mask;
5465 else
5466 return 0;
5469 /* See if we can find a mode that contains both fields being compared on
5470 the left. If we can't, fail. Otherwise, update all constants and masks
5471 to be relative to a field of that size. */
5472 first_bit = MIN (ll_bitpos, rl_bitpos);
5473 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5474 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5475 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5476 volatilep);
5477 if (lnmode == VOIDmode)
5478 return 0;
5480 lnbitsize = GET_MODE_BITSIZE (lnmode);
5481 lnbitpos = first_bit & ~ (lnbitsize - 1);
5482 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5483 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5485 if (BYTES_BIG_ENDIAN)
5487 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5488 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5491 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5492 size_int (xll_bitpos), 0);
5493 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5494 size_int (xrl_bitpos), 0);
5496 if (l_const)
5498 l_const = fold_convert (lntype, l_const);
5499 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5500 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5501 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5502 fold_build1 (BIT_NOT_EXPR,
5503 lntype, ll_mask),
5504 0)))
5506 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5508 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5511 if (r_const)
5513 r_const = fold_convert (lntype, r_const);
5514 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5515 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5516 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5517 fold_build1 (BIT_NOT_EXPR,
5518 lntype, rl_mask),
5519 0)))
5521 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5523 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5527 /* Handle the case of comparisons with constants. If there is something in
5528 common between the masks, those bits of the constants must be the same.
5529 If not, the condition is always false. Test for this to avoid generating
5530 incorrect code below. */
5531 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5532 if (! integer_zerop (result)
5533 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5534 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5536 if (wanted_code == NE_EXPR)
5538 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5539 return constant_boolean_node (true, truth_type);
5541 else
5543 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5544 return constant_boolean_node (false, truth_type);
5548 return NULL_TREE;
5551 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5552 constant. */
5554 static tree
5555 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5557 tree arg0 = op0;
5558 enum tree_code op_code;
5559 tree comp_const;
5560 tree minmax_const;
5561 int consts_equal, consts_lt;
5562 tree inner;
5564 STRIP_SIGN_NOPS (arg0);
5566 op_code = TREE_CODE (arg0);
5567 minmax_const = TREE_OPERAND (arg0, 1);
5568 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5569 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5570 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5571 inner = TREE_OPERAND (arg0, 0);
5573 /* If something does not permit us to optimize, return the original tree. */
5574 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5575 || TREE_CODE (comp_const) != INTEGER_CST
5576 || TREE_OVERFLOW (comp_const)
5577 || TREE_CODE (minmax_const) != INTEGER_CST
5578 || TREE_OVERFLOW (minmax_const))
5579 return NULL_TREE;
5581 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5582 and GT_EXPR, doing the rest with recursive calls using logical
5583 simplifications. */
5584 switch (code)
5586 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5588 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5589 type, op0, op1);
5590 if (tem)
5591 return invert_truthvalue (tem);
5592 return NULL_TREE;
5595 case GE_EXPR:
5596 return
5597 fold_build2 (TRUTH_ORIF_EXPR, type,
5598 optimize_minmax_comparison
5599 (EQ_EXPR, type, arg0, comp_const),
5600 optimize_minmax_comparison
5601 (GT_EXPR, type, arg0, comp_const));
5603 case EQ_EXPR:
5604 if (op_code == MAX_EXPR && consts_equal)
5605 /* MAX (X, 0) == 0 -> X <= 0 */
5606 return fold_build2 (LE_EXPR, type, inner, comp_const);
5608 else if (op_code == MAX_EXPR && consts_lt)
5609 /* MAX (X, 0) == 5 -> X == 5 */
5610 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5612 else if (op_code == MAX_EXPR)
5613 /* MAX (X, 0) == -1 -> false */
5614 return omit_one_operand (type, integer_zero_node, inner);
5616 else if (consts_equal)
5617 /* MIN (X, 0) == 0 -> X >= 0 */
5618 return fold_build2 (GE_EXPR, type, inner, comp_const);
5620 else if (consts_lt)
5621 /* MIN (X, 0) == 5 -> false */
5622 return omit_one_operand (type, integer_zero_node, inner);
5624 else
5625 /* MIN (X, 0) == -1 -> X == -1 */
5626 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5628 case GT_EXPR:
5629 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5630 /* MAX (X, 0) > 0 -> X > 0
5631 MAX (X, 0) > 5 -> X > 5 */
5632 return fold_build2 (GT_EXPR, type, inner, comp_const);
5634 else if (op_code == MAX_EXPR)
5635 /* MAX (X, 0) > -1 -> true */
5636 return omit_one_operand (type, integer_one_node, inner);
5638 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5639 /* MIN (X, 0) > 0 -> false
5640 MIN (X, 0) > 5 -> false */
5641 return omit_one_operand (type, integer_zero_node, inner);
5643 else
5644 /* MIN (X, 0) > -1 -> X > -1 */
5645 return fold_build2 (GT_EXPR, type, inner, comp_const);
5647 default:
5648 return NULL_TREE;
5652 /* T is an integer expression that is being multiplied, divided, or taken a
5653 modulus (CODE says which and what kind of divide or modulus) by a
5654 constant C. See if we can eliminate that operation by folding it with
5655 other operations already in T. WIDE_TYPE, if non-null, is a type that
5656 should be used for the computation if wider than our type.
5658 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5659 (X * 2) + (Y * 4). We must, however, be assured that either the original
5660 expression would not overflow or that overflow is undefined for the type
5661 in the language in question.
5663 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5664 the machine has a multiply-accumulate insn or that this is part of an
5665 addressing calculation.
5667 If we return a non-null expression, it is an equivalent form of the
5668 original computation, but need not be in the original type.
5670 We set *STRICT_OVERFLOW_P to true if the return values depends on
5671 signed overflow being undefined. Otherwise we do not change
5672 *STRICT_OVERFLOW_P. */
5674 static tree
5675 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5676 bool *strict_overflow_p)
5678 /* To avoid exponential search depth, refuse to allow recursion past
5679 three levels. Beyond that (1) it's highly unlikely that we'll find
5680 something interesting and (2) we've probably processed it before
5681 when we built the inner expression. */
5683 static int depth;
5684 tree ret;
5686 if (depth > 3)
5687 return NULL;
5689 depth++;
5690 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5691 depth--;
5693 return ret;
5696 static tree
5697 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5698 bool *strict_overflow_p)
5700 tree type = TREE_TYPE (t);
5701 enum tree_code tcode = TREE_CODE (t);
5702 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5703 > GET_MODE_SIZE (TYPE_MODE (type)))
5704 ? wide_type : type);
5705 tree t1, t2;
5706 int same_p = tcode == code;
5707 tree op0 = NULL_TREE, op1 = NULL_TREE;
5708 bool sub_strict_overflow_p;
5710 /* Don't deal with constants of zero here; they confuse the code below. */
5711 if (integer_zerop (c))
5712 return NULL_TREE;
5714 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5715 op0 = TREE_OPERAND (t, 0);
5717 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5718 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5720 /* Note that we need not handle conditional operations here since fold
5721 already handles those cases. So just do arithmetic here. */
5722 switch (tcode)
5724 case INTEGER_CST:
5725 /* For a constant, we can always simplify if we are a multiply
5726 or (for divide and modulus) if it is a multiple of our constant. */
5727 if (code == MULT_EXPR
5728 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5729 return const_binop (code, fold_convert (ctype, t),
5730 fold_convert (ctype, c), 0);
5731 break;
5733 CASE_CONVERT: case NON_LVALUE_EXPR:
5734 /* If op0 is an expression ... */
5735 if ((COMPARISON_CLASS_P (op0)
5736 || UNARY_CLASS_P (op0)
5737 || BINARY_CLASS_P (op0)
5738 || VL_EXP_CLASS_P (op0)
5739 || EXPRESSION_CLASS_P (op0))
5740 /* ... and has wrapping overflow, and its type is smaller
5741 than ctype, then we cannot pass through as widening. */
5742 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5743 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5744 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5745 && (TYPE_PRECISION (ctype)
5746 > TYPE_PRECISION (TREE_TYPE (op0))))
5747 /* ... or this is a truncation (t is narrower than op0),
5748 then we cannot pass through this narrowing. */
5749 || (TYPE_PRECISION (type)
5750 < TYPE_PRECISION (TREE_TYPE (op0)))
5751 /* ... or signedness changes for division or modulus,
5752 then we cannot pass through this conversion. */
5753 || (code != MULT_EXPR
5754 && (TYPE_UNSIGNED (ctype)
5755 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5756 /* ... or has undefined overflow while the converted to
5757 type has not, we cannot do the operation in the inner type
5758 as that would introduce undefined overflow. */
5759 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5760 && !TYPE_OVERFLOW_UNDEFINED (type))))
5761 break;
5763 /* Pass the constant down and see if we can make a simplification. If
5764 we can, replace this expression with the inner simplification for
5765 possible later conversion to our or some other type. */
5766 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5767 && TREE_CODE (t2) == INTEGER_CST
5768 && !TREE_OVERFLOW (t2)
5769 && (0 != (t1 = extract_muldiv (op0, t2, code,
5770 code == MULT_EXPR
5771 ? ctype : NULL_TREE,
5772 strict_overflow_p))))
5773 return t1;
5774 break;
5776 case ABS_EXPR:
5777 /* If widening the type changes it from signed to unsigned, then we
5778 must avoid building ABS_EXPR itself as unsigned. */
5779 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5781 tree cstype = (*signed_type_for) (ctype);
5782 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5783 != 0)
5785 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5786 return fold_convert (ctype, t1);
5788 break;
5790 /* If the constant is negative, we cannot simplify this. */
5791 if (tree_int_cst_sgn (c) == -1)
5792 break;
5793 /* FALLTHROUGH */
5794 case NEGATE_EXPR:
5795 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5796 != 0)
5797 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5798 break;
5800 case MIN_EXPR: case MAX_EXPR:
5801 /* If widening the type changes the signedness, then we can't perform
5802 this optimization as that changes the result. */
5803 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5804 break;
5806 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5807 sub_strict_overflow_p = false;
5808 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5809 &sub_strict_overflow_p)) != 0
5810 && (t2 = extract_muldiv (op1, c, code, wide_type,
5811 &sub_strict_overflow_p)) != 0)
5813 if (tree_int_cst_sgn (c) < 0)
5814 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5815 if (sub_strict_overflow_p)
5816 *strict_overflow_p = true;
5817 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5818 fold_convert (ctype, t2));
5820 break;
5822 case LSHIFT_EXPR: case RSHIFT_EXPR:
5823 /* If the second operand is constant, this is a multiplication
5824 or floor division, by a power of two, so we can treat it that
5825 way unless the multiplier or divisor overflows. Signed
5826 left-shift overflow is implementation-defined rather than
5827 undefined in C90, so do not convert signed left shift into
5828 multiplication. */
5829 if (TREE_CODE (op1) == INTEGER_CST
5830 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5831 /* const_binop may not detect overflow correctly,
5832 so check for it explicitly here. */
5833 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5834 && TREE_INT_CST_HIGH (op1) == 0
5835 && 0 != (t1 = fold_convert (ctype,
5836 const_binop (LSHIFT_EXPR,
5837 size_one_node,
5838 op1, 0)))
5839 && !TREE_OVERFLOW (t1))
5840 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5841 ? MULT_EXPR : FLOOR_DIV_EXPR,
5842 ctype, fold_convert (ctype, op0), t1),
5843 c, code, wide_type, strict_overflow_p);
5844 break;
5846 case PLUS_EXPR: case MINUS_EXPR:
5847 /* See if we can eliminate the operation on both sides. If we can, we
5848 can return a new PLUS or MINUS. If we can't, the only remaining
5849 cases where we can do anything are if the second operand is a
5850 constant. */
5851 sub_strict_overflow_p = false;
5852 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5853 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5854 if (t1 != 0 && t2 != 0
5855 && (code == MULT_EXPR
5856 /* If not multiplication, we can only do this if both operands
5857 are divisible by c. */
5858 || (multiple_of_p (ctype, op0, c)
5859 && multiple_of_p (ctype, op1, c))))
5861 if (sub_strict_overflow_p)
5862 *strict_overflow_p = true;
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5864 fold_convert (ctype, t2));
5867 /* If this was a subtraction, negate OP1 and set it to be an addition.
5868 This simplifies the logic below. */
5869 if (tcode == MINUS_EXPR)
5870 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5872 if (TREE_CODE (op1) != INTEGER_CST)
5873 break;
5875 /* If either OP1 or C are negative, this optimization is not safe for
5876 some of the division and remainder types while for others we need
5877 to change the code. */
5878 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5880 if (code == CEIL_DIV_EXPR)
5881 code = FLOOR_DIV_EXPR;
5882 else if (code == FLOOR_DIV_EXPR)
5883 code = CEIL_DIV_EXPR;
5884 else if (code != MULT_EXPR
5885 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5886 break;
5889 /* If it's a multiply or a division/modulus operation of a multiple
5890 of our constant, do the operation and verify it doesn't overflow. */
5891 if (code == MULT_EXPR
5892 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5894 op1 = const_binop (code, fold_convert (ctype, op1),
5895 fold_convert (ctype, c), 0);
5896 /* We allow the constant to overflow with wrapping semantics. */
5897 if (op1 == 0
5898 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5899 break;
5901 else
5902 break;
5904 /* If we have an unsigned type is not a sizetype, we cannot widen
5905 the operation since it will change the result if the original
5906 computation overflowed. */
5907 if (TYPE_UNSIGNED (ctype)
5908 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5909 && ctype != type)
5910 break;
5912 /* If we were able to eliminate our operation from the first side,
5913 apply our operation to the second side and reform the PLUS. */
5914 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5915 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5917 /* The last case is if we are a multiply. In that case, we can
5918 apply the distributive law to commute the multiply and addition
5919 if the multiplication of the constants doesn't overflow. */
5920 if (code == MULT_EXPR)
5921 return fold_build2 (tcode, ctype,
5922 fold_build2 (code, ctype,
5923 fold_convert (ctype, op0),
5924 fold_convert (ctype, c)),
5925 op1);
5927 break;
5929 case MULT_EXPR:
5930 /* We have a special case here if we are doing something like
5931 (C * 8) % 4 since we know that's zero. */
5932 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5933 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5934 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5935 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5936 return omit_one_operand (type, integer_zero_node, op0);
5938 /* ... fall through ... */
5940 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5941 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5942 /* If we can extract our operation from the LHS, do so and return a
5943 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5944 do something only if the second operand is a constant. */
5945 if (same_p
5946 && (t1 = extract_muldiv (op0, c, code, wide_type,
5947 strict_overflow_p)) != 0)
5948 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5949 fold_convert (ctype, op1));
5950 else if (tcode == MULT_EXPR && code == MULT_EXPR
5951 && (t1 = extract_muldiv (op1, c, code, wide_type,
5952 strict_overflow_p)) != 0)
5953 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5954 fold_convert (ctype, t1));
5955 else if (TREE_CODE (op1) != INTEGER_CST)
5956 return 0;
5958 /* If these are the same operation types, we can associate them
5959 assuming no overflow. */
5960 if (tcode == code
5961 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5962 fold_convert (ctype, c), 1))
5963 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5964 TREE_INT_CST_HIGH (t1),
5965 (TYPE_UNSIGNED (ctype)
5966 && tcode != MULT_EXPR) ? -1 : 1,
5967 TREE_OVERFLOW (t1)))
5968 && !TREE_OVERFLOW (t1))
5969 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5971 /* If these operations "cancel" each other, we have the main
5972 optimizations of this pass, which occur when either constant is a
5973 multiple of the other, in which case we replace this with either an
5974 operation or CODE or TCODE.
5976 If we have an unsigned type that is not a sizetype, we cannot do
5977 this since it will change the result if the original computation
5978 overflowed. */
5979 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5980 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5981 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5982 || (tcode == MULT_EXPR
5983 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5984 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5985 && code != MULT_EXPR)))
5987 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5990 *strict_overflow_p = true;
5991 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5992 fold_convert (ctype,
5993 const_binop (TRUNC_DIV_EXPR,
5994 op1, c, 0)));
5996 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5998 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5999 *strict_overflow_p = true;
6000 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6001 fold_convert (ctype,
6002 const_binop (TRUNC_DIV_EXPR,
6003 c, op1, 0)));
6006 break;
6008 default:
6009 break;
6012 return 0;
6015 /* Return a node which has the indicated constant VALUE (either 0 or
6016 1), and is of the indicated TYPE. */
6018 tree
6019 constant_boolean_node (int value, tree type)
6021 if (type == integer_type_node)
6022 return value ? integer_one_node : integer_zero_node;
6023 else if (type == boolean_type_node)
6024 return value ? boolean_true_node : boolean_false_node;
6025 else
6026 return build_int_cst (type, value);
6030 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6031 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6032 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6033 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6034 COND is the first argument to CODE; otherwise (as in the example
6035 given here), it is the second argument. TYPE is the type of the
6036 original expression. Return NULL_TREE if no simplification is
6037 possible. */
6039 static tree
6040 fold_binary_op_with_conditional_arg (enum tree_code code,
6041 tree type, tree op0, tree op1,
6042 tree cond, tree arg, int cond_first_p)
6044 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6045 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6046 tree test, true_value, false_value;
6047 tree lhs = NULL_TREE;
6048 tree rhs = NULL_TREE;
6050 /* This transformation is only worthwhile if we don't have to wrap
6051 arg in a SAVE_EXPR, and the operation can be simplified on at least
6052 one of the branches once its pushed inside the COND_EXPR. */
6053 if (!TREE_CONSTANT (arg))
6054 return NULL_TREE;
6056 if (TREE_CODE (cond) == COND_EXPR)
6058 test = TREE_OPERAND (cond, 0);
6059 true_value = TREE_OPERAND (cond, 1);
6060 false_value = TREE_OPERAND (cond, 2);
6061 /* If this operand throws an expression, then it does not make
6062 sense to try to perform a logical or arithmetic operation
6063 involving it. */
6064 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6065 lhs = true_value;
6066 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6067 rhs = false_value;
6069 else
6071 tree testtype = TREE_TYPE (cond);
6072 test = cond;
6073 true_value = constant_boolean_node (true, testtype);
6074 false_value = constant_boolean_node (false, testtype);
6077 arg = fold_convert (arg_type, arg);
6078 if (lhs == 0)
6080 true_value = fold_convert (cond_type, true_value);
6081 if (cond_first_p)
6082 lhs = fold_build2 (code, type, true_value, arg);
6083 else
6084 lhs = fold_build2 (code, type, arg, true_value);
6086 if (rhs == 0)
6088 false_value = fold_convert (cond_type, false_value);
6089 if (cond_first_p)
6090 rhs = fold_build2 (code, type, false_value, arg);
6091 else
6092 rhs = fold_build2 (code, type, arg, false_value);
6095 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6096 return fold_convert (type, test);
6100 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6102 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6103 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6104 ADDEND is the same as X.
6106 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6107 and finite. The problematic cases are when X is zero, and its mode
6108 has signed zeros. In the case of rounding towards -infinity,
6109 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6110 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6112 bool
6113 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6115 if (!real_zerop (addend))
6116 return false;
6118 /* Don't allow the fold with -fsignaling-nans. */
6119 if (HONOR_SNANS (TYPE_MODE (type)))
6120 return false;
6122 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6123 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6124 return true;
6126 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6127 if (TREE_CODE (addend) == REAL_CST
6128 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6129 negate = !negate;
6131 /* The mode has signed zeros, and we have to honor their sign.
6132 In this situation, there is only one case we can return true for.
6133 X - 0 is the same as X unless rounding towards -infinity is
6134 supported. */
6135 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6138 /* Subroutine of fold() that checks comparisons of built-in math
6139 functions against real constants.
6141 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6142 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6143 is the type of the result and ARG0 and ARG1 are the operands of the
6144 comparison. ARG1 must be a TREE_REAL_CST.
6146 The function returns the constant folded tree if a simplification
6147 can be made, and NULL_TREE otherwise. */
6149 static tree
6150 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6151 tree type, tree arg0, tree arg1)
6153 REAL_VALUE_TYPE c;
6155 if (BUILTIN_SQRT_P (fcode))
6157 tree arg = CALL_EXPR_ARG (arg0, 0);
6158 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6160 c = TREE_REAL_CST (arg1);
6161 if (REAL_VALUE_NEGATIVE (c))
6163 /* sqrt(x) < y is always false, if y is negative. */
6164 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6165 return omit_one_operand (type, integer_zero_node, arg);
6167 /* sqrt(x) > y is always true, if y is negative and we
6168 don't care about NaNs, i.e. negative values of x. */
6169 if (code == NE_EXPR || !HONOR_NANS (mode))
6170 return omit_one_operand (type, integer_one_node, arg);
6172 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6173 return fold_build2 (GE_EXPR, type, arg,
6174 build_real (TREE_TYPE (arg), dconst0));
6176 else if (code == GT_EXPR || code == GE_EXPR)
6178 REAL_VALUE_TYPE c2;
6180 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6181 real_convert (&c2, mode, &c2);
6183 if (REAL_VALUE_ISINF (c2))
6185 /* sqrt(x) > y is x == +Inf, when y is very large. */
6186 if (HONOR_INFINITIES (mode))
6187 return fold_build2 (EQ_EXPR, type, arg,
6188 build_real (TREE_TYPE (arg), c2));
6190 /* sqrt(x) > y is always false, when y is very large
6191 and we don't care about infinities. */
6192 return omit_one_operand (type, integer_zero_node, arg);
6195 /* sqrt(x) > c is the same as x > c*c. */
6196 return fold_build2 (code, type, arg,
6197 build_real (TREE_TYPE (arg), c2));
6199 else if (code == LT_EXPR || code == LE_EXPR)
6201 REAL_VALUE_TYPE c2;
6203 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6204 real_convert (&c2, mode, &c2);
6206 if (REAL_VALUE_ISINF (c2))
6208 /* sqrt(x) < y is always true, when y is a very large
6209 value and we don't care about NaNs or Infinities. */
6210 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6211 return omit_one_operand (type, integer_one_node, arg);
6213 /* sqrt(x) < y is x != +Inf when y is very large and we
6214 don't care about NaNs. */
6215 if (! HONOR_NANS (mode))
6216 return fold_build2 (NE_EXPR, type, arg,
6217 build_real (TREE_TYPE (arg), c2));
6219 /* sqrt(x) < y is x >= 0 when y is very large and we
6220 don't care about Infinities. */
6221 if (! HONOR_INFINITIES (mode))
6222 return fold_build2 (GE_EXPR, type, arg,
6223 build_real (TREE_TYPE (arg), dconst0));
6225 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6226 if (lang_hooks.decls.global_bindings_p () != 0
6227 || CONTAINS_PLACEHOLDER_P (arg))
6228 return NULL_TREE;
6230 arg = save_expr (arg);
6231 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6232 fold_build2 (GE_EXPR, type, arg,
6233 build_real (TREE_TYPE (arg),
6234 dconst0)),
6235 fold_build2 (NE_EXPR, type, arg,
6236 build_real (TREE_TYPE (arg),
6237 c2)));
6240 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6241 if (! HONOR_NANS (mode))
6242 return fold_build2 (code, type, arg,
6243 build_real (TREE_TYPE (arg), c2));
6245 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6246 if (lang_hooks.decls.global_bindings_p () == 0
6247 && ! CONTAINS_PLACEHOLDER_P (arg))
6249 arg = save_expr (arg);
6250 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6251 fold_build2 (GE_EXPR, type, arg,
6252 build_real (TREE_TYPE (arg),
6253 dconst0)),
6254 fold_build2 (code, type, arg,
6255 build_real (TREE_TYPE (arg),
6256 c2)));
6261 return NULL_TREE;
6264 /* Subroutine of fold() that optimizes comparisons against Infinities,
6265 either +Inf or -Inf.
6267 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6268 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6269 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6271 The function returns the constant folded tree if a simplification
6272 can be made, and NULL_TREE otherwise. */
6274 static tree
6275 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6277 enum machine_mode mode;
6278 REAL_VALUE_TYPE max;
6279 tree temp;
6280 bool neg;
6282 mode = TYPE_MODE (TREE_TYPE (arg0));
6284 /* For negative infinity swap the sense of the comparison. */
6285 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6286 if (neg)
6287 code = swap_tree_comparison (code);
6289 switch (code)
6291 case GT_EXPR:
6292 /* x > +Inf is always false, if with ignore sNANs. */
6293 if (HONOR_SNANS (mode))
6294 return NULL_TREE;
6295 return omit_one_operand (type, integer_zero_node, arg0);
6297 case LE_EXPR:
6298 /* x <= +Inf is always true, if we don't case about NaNs. */
6299 if (! HONOR_NANS (mode))
6300 return omit_one_operand (type, integer_one_node, arg0);
6302 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6303 if (lang_hooks.decls.global_bindings_p () == 0
6304 && ! CONTAINS_PLACEHOLDER_P (arg0))
6306 arg0 = save_expr (arg0);
6307 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6309 break;
6311 case EQ_EXPR:
6312 case GE_EXPR:
6313 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6314 real_maxval (&max, neg, mode);
6315 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6316 arg0, build_real (TREE_TYPE (arg0), max));
6318 case LT_EXPR:
6319 /* x < +Inf is always equal to x <= DBL_MAX. */
6320 real_maxval (&max, neg, mode);
6321 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6322 arg0, build_real (TREE_TYPE (arg0), max));
6324 case NE_EXPR:
6325 /* x != +Inf is always equal to !(x > DBL_MAX). */
6326 real_maxval (&max, neg, mode);
6327 if (! HONOR_NANS (mode))
6328 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6329 arg0, build_real (TREE_TYPE (arg0), max));
6331 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6332 arg0, build_real (TREE_TYPE (arg0), max));
6333 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6335 default:
6336 break;
6339 return NULL_TREE;
6342 /* Subroutine of fold() that optimizes comparisons of a division by
6343 a nonzero integer constant against an integer constant, i.e.
6344 X/C1 op C2.
6346 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6347 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6348 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6350 The function returns the constant folded tree if a simplification
6351 can be made, and NULL_TREE otherwise. */
6353 static tree
6354 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6356 tree prod, tmp, hi, lo;
6357 tree arg00 = TREE_OPERAND (arg0, 0);
6358 tree arg01 = TREE_OPERAND (arg0, 1);
6359 unsigned HOST_WIDE_INT lpart;
6360 HOST_WIDE_INT hpart;
6361 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6362 bool neg_overflow;
6363 int overflow;
6365 /* We have to do this the hard way to detect unsigned overflow.
6366 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6367 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6368 TREE_INT_CST_HIGH (arg01),
6369 TREE_INT_CST_LOW (arg1),
6370 TREE_INT_CST_HIGH (arg1),
6371 &lpart, &hpart, unsigned_p);
6372 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6373 -1, overflow);
6374 neg_overflow = false;
6376 if (unsigned_p)
6378 tmp = int_const_binop (MINUS_EXPR, arg01,
6379 build_int_cst (TREE_TYPE (arg01), 1), 0);
6380 lo = prod;
6382 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6383 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6384 TREE_INT_CST_HIGH (prod),
6385 TREE_INT_CST_LOW (tmp),
6386 TREE_INT_CST_HIGH (tmp),
6387 &lpart, &hpart, unsigned_p);
6388 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6389 -1, overflow | TREE_OVERFLOW (prod));
6391 else if (tree_int_cst_sgn (arg01) >= 0)
6393 tmp = int_const_binop (MINUS_EXPR, arg01,
6394 build_int_cst (TREE_TYPE (arg01), 1), 0);
6395 switch (tree_int_cst_sgn (arg1))
6397 case -1:
6398 neg_overflow = true;
6399 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6400 hi = prod;
6401 break;
6403 case 0:
6404 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6405 hi = tmp;
6406 break;
6408 case 1:
6409 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6410 lo = prod;
6411 break;
6413 default:
6414 gcc_unreachable ();
6417 else
6419 /* A negative divisor reverses the relational operators. */
6420 code = swap_tree_comparison (code);
6422 tmp = int_const_binop (PLUS_EXPR, arg01,
6423 build_int_cst (TREE_TYPE (arg01), 1), 0);
6424 switch (tree_int_cst_sgn (arg1))
6426 case -1:
6427 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6428 lo = prod;
6429 break;
6431 case 0:
6432 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6433 lo = tmp;
6434 break;
6436 case 1:
6437 neg_overflow = true;
6438 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6439 hi = prod;
6440 break;
6442 default:
6443 gcc_unreachable ();
6447 switch (code)
6449 case EQ_EXPR:
6450 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6451 return omit_one_operand (type, integer_zero_node, arg00);
6452 if (TREE_OVERFLOW (hi))
6453 return fold_build2 (GE_EXPR, type, arg00, lo);
6454 if (TREE_OVERFLOW (lo))
6455 return fold_build2 (LE_EXPR, type, arg00, hi);
6456 return build_range_check (type, arg00, 1, lo, hi);
6458 case NE_EXPR:
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand (type, integer_one_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2 (LT_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2 (GT_EXPR, type, arg00, hi);
6465 return build_range_check (type, arg00, 0, lo, hi);
6467 case LT_EXPR:
6468 if (TREE_OVERFLOW (lo))
6470 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6471 return omit_one_operand (type, tmp, arg00);
6473 return fold_build2 (LT_EXPR, type, arg00, lo);
6475 case LE_EXPR:
6476 if (TREE_OVERFLOW (hi))
6478 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6479 return omit_one_operand (type, tmp, arg00);
6481 return fold_build2 (LE_EXPR, type, arg00, hi);
6483 case GT_EXPR:
6484 if (TREE_OVERFLOW (hi))
6486 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6487 return omit_one_operand (type, tmp, arg00);
6489 return fold_build2 (GT_EXPR, type, arg00, hi);
6491 case GE_EXPR:
6492 if (TREE_OVERFLOW (lo))
6494 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6495 return omit_one_operand (type, tmp, arg00);
6497 return fold_build2 (GE_EXPR, type, arg00, lo);
6499 default:
6500 break;
6503 return NULL_TREE;
6507 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6508 equality/inequality test, then return a simplified form of the test
6509 using a sign testing. Otherwise return NULL. TYPE is the desired
6510 result type. */
6512 static tree
6513 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6514 tree result_type)
6516 /* If this is testing a single bit, we can optimize the test. */
6517 if ((code == NE_EXPR || code == EQ_EXPR)
6518 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6519 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6521 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6522 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6523 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6525 if (arg00 != NULL_TREE
6526 /* This is only a win if casting to a signed type is cheap,
6527 i.e. when arg00's type is not a partial mode. */
6528 && TYPE_PRECISION (TREE_TYPE (arg00))
6529 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6531 tree stype = signed_type_for (TREE_TYPE (arg00));
6532 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6533 result_type, fold_convert (stype, arg00),
6534 build_int_cst (stype, 0));
6538 return NULL_TREE;
6541 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6542 equality/inequality test, then return a simplified form of
6543 the test using shifts and logical operations. Otherwise return
6544 NULL. TYPE is the desired result type. */
6546 tree
6547 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6548 tree result_type)
6550 /* If this is testing a single bit, we can optimize the test. */
6551 if ((code == NE_EXPR || code == EQ_EXPR)
6552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6555 tree inner = TREE_OPERAND (arg0, 0);
6556 tree type = TREE_TYPE (arg0);
6557 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6558 enum machine_mode operand_mode = TYPE_MODE (type);
6559 int ops_unsigned;
6560 tree signed_type, unsigned_type, intermediate_type;
6561 tree tem, one;
6563 /* First, see if we can fold the single bit test into a sign-bit
6564 test. */
6565 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6566 result_type);
6567 if (tem)
6568 return tem;
6570 /* Otherwise we have (A & C) != 0 where C is a single bit,
6571 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6572 Similarly for (A & C) == 0. */
6574 /* If INNER is a right shift of a constant and it plus BITNUM does
6575 not overflow, adjust BITNUM and INNER. */
6576 if (TREE_CODE (inner) == RSHIFT_EXPR
6577 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6578 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6579 && bitnum < TYPE_PRECISION (type)
6580 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6581 bitnum - TYPE_PRECISION (type)))
6583 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6584 inner = TREE_OPERAND (inner, 0);
6587 /* If we are going to be able to omit the AND below, we must do our
6588 operations as unsigned. If we must use the AND, we have a choice.
6589 Normally unsigned is faster, but for some machines signed is. */
6590 #ifdef LOAD_EXTEND_OP
6591 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6592 && !flag_syntax_only) ? 0 : 1;
6593 #else
6594 ops_unsigned = 1;
6595 #endif
6597 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6598 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6599 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6600 inner = fold_convert (intermediate_type, inner);
6602 if (bitnum != 0)
6603 inner = build2 (RSHIFT_EXPR, intermediate_type,
6604 inner, size_int (bitnum));
6606 one = build_int_cst (intermediate_type, 1);
6608 if (code == EQ_EXPR)
6609 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6611 /* Put the AND last so it can combine with more things. */
6612 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6614 /* Make sure to return the proper type. */
6615 inner = fold_convert (result_type, inner);
6617 return inner;
6619 return NULL_TREE;
6622 /* Check whether we are allowed to reorder operands arg0 and arg1,
6623 such that the evaluation of arg1 occurs before arg0. */
6625 static bool
6626 reorder_operands_p (const_tree arg0, const_tree arg1)
6628 if (! flag_evaluation_order)
6629 return true;
6630 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6631 return true;
6632 return ! TREE_SIDE_EFFECTS (arg0)
6633 && ! TREE_SIDE_EFFECTS (arg1);
6636 /* Test whether it is preferable two swap two operands, ARG0 and
6637 ARG1, for example because ARG0 is an integer constant and ARG1
6638 isn't. If REORDER is true, only recommend swapping if we can
6639 evaluate the operands in reverse order. */
6641 bool
6642 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6644 STRIP_SIGN_NOPS (arg0);
6645 STRIP_SIGN_NOPS (arg1);
6647 if (TREE_CODE (arg1) == INTEGER_CST)
6648 return 0;
6649 if (TREE_CODE (arg0) == INTEGER_CST)
6650 return 1;
6652 if (TREE_CODE (arg1) == REAL_CST)
6653 return 0;
6654 if (TREE_CODE (arg0) == REAL_CST)
6655 return 1;
6657 if (TREE_CODE (arg1) == FIXED_CST)
6658 return 0;
6659 if (TREE_CODE (arg0) == FIXED_CST)
6660 return 1;
6662 if (TREE_CODE (arg1) == COMPLEX_CST)
6663 return 0;
6664 if (TREE_CODE (arg0) == COMPLEX_CST)
6665 return 1;
6667 if (TREE_CONSTANT (arg1))
6668 return 0;
6669 if (TREE_CONSTANT (arg0))
6670 return 1;
6672 if (optimize_size)
6673 return 0;
6675 if (reorder && flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6677 return 0;
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0) == SSA_NAME
6684 && TREE_CODE (arg1) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6686 return 1;
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1) == SSA_NAME)
6690 return 0;
6691 if (TREE_CODE (arg0) == SSA_NAME)
6692 return 1;
6694 /* Put variables last. */
6695 if (DECL_P (arg1))
6696 return 0;
6697 if (DECL_P (arg0))
6698 return 1;
6700 return 0;
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6706 static tree
6707 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6709 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6710 tree arg1_unw;
6711 tree shorter_type, outer_type;
6712 tree min, max;
6713 bool above, below;
6715 if (arg0_unw == arg0)
6716 return NULL_TREE;
6717 shorter_type = TREE_TYPE (arg0_unw);
6719 #ifdef HAVE_canonicalize_funcptr_for_compare
6720 /* Disable this optimization if we're casting a function pointer
6721 type on targets that require function pointer canonicalization. */
6722 if (HAVE_canonicalize_funcptr_for_compare
6723 && TREE_CODE (shorter_type) == POINTER_TYPE
6724 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6725 return NULL_TREE;
6726 #endif
6728 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6729 return NULL_TREE;
6731 arg1_unw = get_unwidened (arg1, NULL_TREE);
6733 /* If possible, express the comparison in the shorter mode. */
6734 if ((code == EQ_EXPR || code == NE_EXPR
6735 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6736 && (TREE_TYPE (arg1_unw) == shorter_type
6737 || (TYPE_PRECISION (shorter_type)
6738 > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6739 || ((TYPE_PRECISION (shorter_type)
6740 == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6741 && (TYPE_UNSIGNED (shorter_type)
6742 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6743 || (TREE_CODE (arg1_unw) == INTEGER_CST
6744 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6745 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6746 && int_fits_type_p (arg1_unw, shorter_type))))
6747 return fold_build2 (code, type, arg0_unw,
6748 fold_convert (shorter_type, arg1_unw));
6750 if (TREE_CODE (arg1_unw) != INTEGER_CST
6751 || TREE_CODE (shorter_type) != INTEGER_TYPE
6752 || !int_fits_type_p (arg1_unw, shorter_type))
6753 return NULL_TREE;
6755 /* If we are comparing with the integer that does not fit into the range
6756 of the shorter type, the result is known. */
6757 outer_type = TREE_TYPE (arg1_unw);
6758 min = lower_bound_in_type (outer_type, shorter_type);
6759 max = upper_bound_in_type (outer_type, shorter_type);
6761 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6762 max, arg1_unw));
6763 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6764 arg1_unw, min));
6766 switch (code)
6768 case EQ_EXPR:
6769 if (above || below)
6770 return omit_one_operand (type, integer_zero_node, arg0);
6771 break;
6773 case NE_EXPR:
6774 if (above || below)
6775 return omit_one_operand (type, integer_one_node, arg0);
6776 break;
6778 case LT_EXPR:
6779 case LE_EXPR:
6780 if (above)
6781 return omit_one_operand (type, integer_one_node, arg0);
6782 else if (below)
6783 return omit_one_operand (type, integer_zero_node, arg0);
6785 case GT_EXPR:
6786 case GE_EXPR:
6787 if (above)
6788 return omit_one_operand (type, integer_zero_node, arg0);
6789 else if (below)
6790 return omit_one_operand (type, integer_one_node, arg0);
6792 default:
6793 break;
6796 return NULL_TREE;
6799 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6800 ARG0 just the signedness is changed. */
6802 static tree
6803 fold_sign_changed_comparison (enum tree_code code, tree type,
6804 tree arg0, tree arg1)
6806 tree arg0_inner;
6807 tree inner_type, outer_type;
6809 if (!CONVERT_EXPR_P (arg0))
6810 return NULL_TREE;
6812 outer_type = TREE_TYPE (arg0);
6813 arg0_inner = TREE_OPERAND (arg0, 0);
6814 inner_type = TREE_TYPE (arg0_inner);
6816 #ifdef HAVE_canonicalize_funcptr_for_compare
6817 /* Disable this optimization if we're casting a function pointer
6818 type on targets that require function pointer canonicalization. */
6819 if (HAVE_canonicalize_funcptr_for_compare
6820 && TREE_CODE (inner_type) == POINTER_TYPE
6821 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6822 return NULL_TREE;
6823 #endif
6825 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6826 return NULL_TREE;
6828 /* If the conversion is from an integral subtype to its basetype
6829 leave it alone. */
6830 if (TREE_TYPE (inner_type) == outer_type)
6831 return NULL_TREE;
6833 if (TREE_CODE (arg1) != INTEGER_CST
6834 && !(CONVERT_EXPR_P (arg1)
6835 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6836 return NULL_TREE;
6838 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6839 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6840 && code != NE_EXPR
6841 && code != EQ_EXPR)
6842 return NULL_TREE;
6844 if (TREE_CODE (arg1) == INTEGER_CST)
6845 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6846 TREE_INT_CST_HIGH (arg1), 0,
6847 TREE_OVERFLOW (arg1));
6848 else
6849 arg1 = fold_convert (inner_type, arg1);
6851 return fold_build2 (code, type, arg0_inner, arg1);
6854 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6855 step of the array. Reconstructs s and delta in the case of s * delta
6856 being an integer constant (and thus already folded).
6857 ADDR is the address. MULT is the multiplicative expression.
6858 If the function succeeds, the new address expression is returned. Otherwise
6859 NULL_TREE is returned. */
6861 static tree
6862 try_move_mult_to_index (tree addr, tree op1)
6864 tree s, delta, step;
6865 tree ref = TREE_OPERAND (addr, 0), pref;
6866 tree ret, pos;
6867 tree itype;
6868 bool mdim = false;
6870 /* Strip the nops that might be added when converting op1 to sizetype. */
6871 STRIP_NOPS (op1);
6873 /* Canonicalize op1 into a possibly non-constant delta
6874 and an INTEGER_CST s. */
6875 if (TREE_CODE (op1) == MULT_EXPR)
6877 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6879 STRIP_NOPS (arg0);
6880 STRIP_NOPS (arg1);
6882 if (TREE_CODE (arg0) == INTEGER_CST)
6884 s = arg0;
6885 delta = arg1;
6887 else if (TREE_CODE (arg1) == INTEGER_CST)
6889 s = arg1;
6890 delta = arg0;
6892 else
6893 return NULL_TREE;
6895 else if (TREE_CODE (op1) == INTEGER_CST)
6897 delta = op1;
6898 s = NULL_TREE;
6900 else
6902 /* Simulate we are delta * 1. */
6903 delta = op1;
6904 s = integer_one_node;
6907 for (;; ref = TREE_OPERAND (ref, 0))
6909 if (TREE_CODE (ref) == ARRAY_REF)
6911 /* Remember if this was a multi-dimensional array. */
6912 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6913 mdim = true;
6915 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6916 if (! itype)
6917 continue;
6919 step = array_ref_element_size (ref);
6920 if (TREE_CODE (step) != INTEGER_CST)
6921 continue;
6923 if (s)
6925 if (! tree_int_cst_equal (step, s))
6926 continue;
6928 else
6930 /* Try if delta is a multiple of step. */
6931 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6932 if (! tmp)
6933 continue;
6934 delta = tmp;
6937 /* Only fold here if we can verify we do not overflow one
6938 dimension of a multi-dimensional array. */
6939 if (mdim)
6941 tree tmp;
6943 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6944 || !INTEGRAL_TYPE_P (itype)
6945 || !TYPE_MAX_VALUE (itype)
6946 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6947 continue;
6949 tmp = fold_binary (PLUS_EXPR, itype,
6950 fold_convert (itype,
6951 TREE_OPERAND (ref, 1)),
6952 fold_convert (itype, delta));
6953 if (!tmp
6954 || TREE_CODE (tmp) != INTEGER_CST
6955 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6956 continue;
6959 break;
6961 else
6962 mdim = false;
6964 if (!handled_component_p (ref))
6965 return NULL_TREE;
6968 /* We found the suitable array reference. So copy everything up to it,
6969 and replace the index. */
6971 pref = TREE_OPERAND (addr, 0);
6972 ret = copy_node (pref);
6973 pos = ret;
6975 while (pref != ref)
6977 pref = TREE_OPERAND (pref, 0);
6978 TREE_OPERAND (pos, 0) = copy_node (pref);
6979 pos = TREE_OPERAND (pos, 0);
6982 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6983 fold_convert (itype,
6984 TREE_OPERAND (pos, 1)),
6985 fold_convert (itype, delta));
6987 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6991 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6992 means A >= Y && A != MAX, but in this case we know that
6993 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6995 static tree
6996 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6998 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7000 if (TREE_CODE (bound) == LT_EXPR)
7001 a = TREE_OPERAND (bound, 0);
7002 else if (TREE_CODE (bound) == GT_EXPR)
7003 a = TREE_OPERAND (bound, 1);
7004 else
7005 return NULL_TREE;
7007 typea = TREE_TYPE (a);
7008 if (!INTEGRAL_TYPE_P (typea)
7009 && !POINTER_TYPE_P (typea))
7010 return NULL_TREE;
7012 if (TREE_CODE (ineq) == LT_EXPR)
7014 a1 = TREE_OPERAND (ineq, 1);
7015 y = TREE_OPERAND (ineq, 0);
7017 else if (TREE_CODE (ineq) == GT_EXPR)
7019 a1 = TREE_OPERAND (ineq, 0);
7020 y = TREE_OPERAND (ineq, 1);
7022 else
7023 return NULL_TREE;
7025 if (TREE_TYPE (a1) != typea)
7026 return NULL_TREE;
7028 if (POINTER_TYPE_P (typea))
7030 /* Convert the pointer types into integer before taking the difference. */
7031 tree ta = fold_convert (ssizetype, a);
7032 tree ta1 = fold_convert (ssizetype, a1);
7033 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7035 else
7036 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7038 if (!diff || !integer_onep (diff))
7039 return NULL_TREE;
7041 return fold_build2 (GE_EXPR, type, a, y);
7044 /* Fold a sum or difference of at least one multiplication.
7045 Returns the folded tree or NULL if no simplification could be made. */
7047 static tree
7048 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7050 tree arg00, arg01, arg10, arg11;
7051 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7053 /* (A * C) +- (B * C) -> (A+-B) * C.
7054 (A * C) +- A -> A * (C+-1).
7055 We are most concerned about the case where C is a constant,
7056 but other combinations show up during loop reduction. Since
7057 it is not difficult, try all four possibilities. */
7059 if (TREE_CODE (arg0) == MULT_EXPR)
7061 arg00 = TREE_OPERAND (arg0, 0);
7062 arg01 = TREE_OPERAND (arg0, 1);
7064 else if (TREE_CODE (arg0) == INTEGER_CST)
7066 arg00 = build_one_cst (type);
7067 arg01 = arg0;
7069 else
7071 /* We cannot generate constant 1 for fract. */
7072 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7073 return NULL_TREE;
7074 arg00 = arg0;
7075 arg01 = build_one_cst (type);
7077 if (TREE_CODE (arg1) == MULT_EXPR)
7079 arg10 = TREE_OPERAND (arg1, 0);
7080 arg11 = TREE_OPERAND (arg1, 1);
7082 else if (TREE_CODE (arg1) == INTEGER_CST)
7084 arg10 = build_one_cst (type);
7085 arg11 = arg1;
7087 else
7089 /* We cannot generate constant 1 for fract. */
7090 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7091 return NULL_TREE;
7092 arg10 = arg1;
7093 arg11 = build_one_cst (type);
7095 same = NULL_TREE;
7097 if (operand_equal_p (arg01, arg11, 0))
7098 same = arg01, alt0 = arg00, alt1 = arg10;
7099 else if (operand_equal_p (arg00, arg10, 0))
7100 same = arg00, alt0 = arg01, alt1 = arg11;
7101 else if (operand_equal_p (arg00, arg11, 0))
7102 same = arg00, alt0 = arg01, alt1 = arg10;
7103 else if (operand_equal_p (arg01, arg10, 0))
7104 same = arg01, alt0 = arg00, alt1 = arg11;
7106 /* No identical multiplicands; see if we can find a common
7107 power-of-two factor in non-power-of-two multiplies. This
7108 can help in multi-dimensional array access. */
7109 else if (host_integerp (arg01, 0)
7110 && host_integerp (arg11, 0))
7112 HOST_WIDE_INT int01, int11, tmp;
7113 bool swap = false;
7114 tree maybe_same;
7115 int01 = TREE_INT_CST_LOW (arg01);
7116 int11 = TREE_INT_CST_LOW (arg11);
7118 /* Move min of absolute values to int11. */
7119 if ((int01 >= 0 ? int01 : -int01)
7120 < (int11 >= 0 ? int11 : -int11))
7122 tmp = int01, int01 = int11, int11 = tmp;
7123 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7124 maybe_same = arg01;
7125 swap = true;
7127 else
7128 maybe_same = arg11;
7130 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7132 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7133 build_int_cst (TREE_TYPE (arg00),
7134 int01 / int11));
7135 alt1 = arg10;
7136 same = maybe_same;
7137 if (swap)
7138 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7142 if (same)
7143 return fold_build2 (MULT_EXPR, type,
7144 fold_build2 (code, type,
7145 fold_convert (type, alt0),
7146 fold_convert (type, alt1)),
7147 fold_convert (type, same));
7149 return NULL_TREE;
7152 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7153 specified by EXPR into the buffer PTR of length LEN bytes.
7154 Return the number of bytes placed in the buffer, or zero
7155 upon failure. */
7157 static int
7158 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7160 tree type = TREE_TYPE (expr);
7161 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7162 int byte, offset, word, words;
7163 unsigned char value;
7165 if (total_bytes > len)
7166 return 0;
7167 words = total_bytes / UNITS_PER_WORD;
7169 for (byte = 0; byte < total_bytes; byte++)
7171 int bitpos = byte * BITS_PER_UNIT;
7172 if (bitpos < HOST_BITS_PER_WIDE_INT)
7173 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7174 else
7175 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7176 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7178 if (total_bytes > UNITS_PER_WORD)
7180 word = byte / UNITS_PER_WORD;
7181 if (WORDS_BIG_ENDIAN)
7182 word = (words - 1) - word;
7183 offset = word * UNITS_PER_WORD;
7184 if (BYTES_BIG_ENDIAN)
7185 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7186 else
7187 offset += byte % UNITS_PER_WORD;
7189 else
7190 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7191 ptr[offset] = value;
7193 return total_bytes;
7197 /* Subroutine of native_encode_expr. Encode the REAL_CST
7198 specified by EXPR into the buffer PTR of length LEN bytes.
7199 Return the number of bytes placed in the buffer, or zero
7200 upon failure. */
7202 static int
7203 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7205 tree type = TREE_TYPE (expr);
7206 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7207 int byte, offset, word, words, bitpos;
7208 unsigned char value;
7210 /* There are always 32 bits in each long, no matter the size of
7211 the hosts long. We handle floating point representations with
7212 up to 192 bits. */
7213 long tmp[6];
7215 if (total_bytes > len)
7216 return 0;
7217 words = 32 / UNITS_PER_WORD;
7219 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7221 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7222 bitpos += BITS_PER_UNIT)
7224 byte = (bitpos / BITS_PER_UNIT) & 3;
7225 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7227 if (UNITS_PER_WORD < 4)
7229 word = byte / UNITS_PER_WORD;
7230 if (WORDS_BIG_ENDIAN)
7231 word = (words - 1) - word;
7232 offset = word * UNITS_PER_WORD;
7233 if (BYTES_BIG_ENDIAN)
7234 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7235 else
7236 offset += byte % UNITS_PER_WORD;
7238 else
7239 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7240 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7242 return total_bytes;
7245 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7246 specified by EXPR into the buffer PTR of length LEN bytes.
7247 Return the number of bytes placed in the buffer, or zero
7248 upon failure. */
7250 static int
7251 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7253 int rsize, isize;
7254 tree part;
7256 part = TREE_REALPART (expr);
7257 rsize = native_encode_expr (part, ptr, len);
7258 if (rsize == 0)
7259 return 0;
7260 part = TREE_IMAGPART (expr);
7261 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7262 if (isize != rsize)
7263 return 0;
7264 return rsize + isize;
7268 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7269 specified by EXPR into the buffer PTR of length LEN bytes.
7270 Return the number of bytes placed in the buffer, or zero
7271 upon failure. */
7273 static int
7274 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7276 int i, size, offset, count;
7277 tree itype, elem, elements;
7279 offset = 0;
7280 elements = TREE_VECTOR_CST_ELTS (expr);
7281 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7282 itype = TREE_TYPE (TREE_TYPE (expr));
7283 size = GET_MODE_SIZE (TYPE_MODE (itype));
7284 for (i = 0; i < count; i++)
7286 if (elements)
7288 elem = TREE_VALUE (elements);
7289 elements = TREE_CHAIN (elements);
7291 else
7292 elem = NULL_TREE;
7294 if (elem)
7296 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7297 return 0;
7299 else
7301 if (offset + size > len)
7302 return 0;
7303 memset (ptr+offset, 0, size);
7305 offset += size;
7307 return offset;
7311 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7312 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7313 buffer PTR of length LEN bytes. Return the number of bytes
7314 placed in the buffer, or zero upon failure. */
7317 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7319 switch (TREE_CODE (expr))
7321 case INTEGER_CST:
7322 return native_encode_int (expr, ptr, len);
7324 case REAL_CST:
7325 return native_encode_real (expr, ptr, len);
7327 case COMPLEX_CST:
7328 return native_encode_complex (expr, ptr, len);
7330 case VECTOR_CST:
7331 return native_encode_vector (expr, ptr, len);
7333 default:
7334 return 0;
7339 /* Subroutine of native_interpret_expr. Interpret the contents of
7340 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7341 If the buffer cannot be interpreted, return NULL_TREE. */
7343 static tree
7344 native_interpret_int (tree type, const unsigned char *ptr, int len)
7346 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7347 int byte, offset, word, words;
7348 unsigned char value;
7349 unsigned int HOST_WIDE_INT lo = 0;
7350 HOST_WIDE_INT hi = 0;
7352 if (total_bytes > len)
7353 return NULL_TREE;
7354 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7355 return NULL_TREE;
7356 words = total_bytes / UNITS_PER_WORD;
7358 for (byte = 0; byte < total_bytes; byte++)
7360 int bitpos = byte * BITS_PER_UNIT;
7361 if (total_bytes > UNITS_PER_WORD)
7363 word = byte / UNITS_PER_WORD;
7364 if (WORDS_BIG_ENDIAN)
7365 word = (words - 1) - word;
7366 offset = word * UNITS_PER_WORD;
7367 if (BYTES_BIG_ENDIAN)
7368 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7369 else
7370 offset += byte % UNITS_PER_WORD;
7372 else
7373 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7374 value = ptr[offset];
7376 if (bitpos < HOST_BITS_PER_WIDE_INT)
7377 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7378 else
7379 hi |= (unsigned HOST_WIDE_INT) value
7380 << (bitpos - HOST_BITS_PER_WIDE_INT);
7383 return build_int_cst_wide_type (type, lo, hi);
7387 /* Subroutine of native_interpret_expr. Interpret the contents of
7388 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7389 If the buffer cannot be interpreted, return NULL_TREE. */
7391 static tree
7392 native_interpret_real (tree type, const unsigned char *ptr, int len)
7394 enum machine_mode mode = TYPE_MODE (type);
7395 int total_bytes = GET_MODE_SIZE (mode);
7396 int byte, offset, word, words, bitpos;
7397 unsigned char value;
7398 /* There are always 32 bits in each long, no matter the size of
7399 the hosts long. We handle floating point representations with
7400 up to 192 bits. */
7401 REAL_VALUE_TYPE r;
7402 long tmp[6];
7404 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7405 if (total_bytes > len || total_bytes > 24)
7406 return NULL_TREE;
7407 words = 32 / UNITS_PER_WORD;
7409 memset (tmp, 0, sizeof (tmp));
7410 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7411 bitpos += BITS_PER_UNIT)
7413 byte = (bitpos / BITS_PER_UNIT) & 3;
7414 if (UNITS_PER_WORD < 4)
7416 word = byte / UNITS_PER_WORD;
7417 if (WORDS_BIG_ENDIAN)
7418 word = (words - 1) - word;
7419 offset = word * UNITS_PER_WORD;
7420 if (BYTES_BIG_ENDIAN)
7421 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7422 else
7423 offset += byte % UNITS_PER_WORD;
7425 else
7426 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7427 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7429 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7432 real_from_target (&r, tmp, mode);
7433 return build_real (type, r);
7437 /* Subroutine of native_interpret_expr. Interpret the contents of
7438 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7439 If the buffer cannot be interpreted, return NULL_TREE. */
7441 static tree
7442 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7444 tree etype, rpart, ipart;
7445 int size;
7447 etype = TREE_TYPE (type);
7448 size = GET_MODE_SIZE (TYPE_MODE (etype));
7449 if (size * 2 > len)
7450 return NULL_TREE;
7451 rpart = native_interpret_expr (etype, ptr, size);
7452 if (!rpart)
7453 return NULL_TREE;
7454 ipart = native_interpret_expr (etype, ptr+size, size);
7455 if (!ipart)
7456 return NULL_TREE;
7457 return build_complex (type, rpart, ipart);
7461 /* Subroutine of native_interpret_expr. Interpret the contents of
7462 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7463 If the buffer cannot be interpreted, return NULL_TREE. */
7465 static tree
7466 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7468 tree etype, elem, elements;
7469 int i, size, count;
7471 etype = TREE_TYPE (type);
7472 size = GET_MODE_SIZE (TYPE_MODE (etype));
7473 count = TYPE_VECTOR_SUBPARTS (type);
7474 if (size * count > len)
7475 return NULL_TREE;
7477 elements = NULL_TREE;
7478 for (i = count - 1; i >= 0; i--)
7480 elem = native_interpret_expr (etype, ptr+(i*size), size);
7481 if (!elem)
7482 return NULL_TREE;
7483 elements = tree_cons (NULL_TREE, elem, elements);
7485 return build_vector (type, elements);
7489 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7490 the buffer PTR of length LEN as a constant of type TYPE. For
7491 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7492 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7493 return NULL_TREE. */
7495 tree
7496 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7498 switch (TREE_CODE (type))
7500 case INTEGER_TYPE:
7501 case ENUMERAL_TYPE:
7502 case BOOLEAN_TYPE:
7503 return native_interpret_int (type, ptr, len);
7505 case REAL_TYPE:
7506 return native_interpret_real (type, ptr, len);
7508 case COMPLEX_TYPE:
7509 return native_interpret_complex (type, ptr, len);
7511 case VECTOR_TYPE:
7512 return native_interpret_vector (type, ptr, len);
7514 default:
7515 return NULL_TREE;
7520 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7521 TYPE at compile-time. If we're unable to perform the conversion
7522 return NULL_TREE. */
7524 static tree
7525 fold_view_convert_expr (tree type, tree expr)
7527 /* We support up to 512-bit values (for V8DFmode). */
7528 unsigned char buffer[64];
7529 int len;
7531 /* Check that the host and target are sane. */
7532 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7533 return NULL_TREE;
7535 len = native_encode_expr (expr, buffer, sizeof (buffer));
7536 if (len == 0)
7537 return NULL_TREE;
7539 return native_interpret_expr (type, buffer, len);
7542 /* Build an expression for the address of T. Folds away INDIRECT_REF
7543 to avoid confusing the gimplify process. When IN_FOLD is true
7544 avoid modifications of T. */
7546 static tree
7547 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7549 /* The size of the object is not relevant when talking about its address. */
7550 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7551 t = TREE_OPERAND (t, 0);
7553 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7554 if (TREE_CODE (t) == INDIRECT_REF
7555 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7557 t = TREE_OPERAND (t, 0);
7559 if (TREE_TYPE (t) != ptrtype)
7560 t = build1 (NOP_EXPR, ptrtype, t);
7562 else if (!in_fold)
7564 tree base = t;
7566 while (handled_component_p (base))
7567 base = TREE_OPERAND (base, 0);
7569 if (DECL_P (base))
7570 TREE_ADDRESSABLE (base) = 1;
7572 t = build1 (ADDR_EXPR, ptrtype, t);
7574 else
7575 t = build1 (ADDR_EXPR, ptrtype, t);
7577 return t;
7580 /* Build an expression for the address of T with type PTRTYPE. This
7581 function modifies the input parameter 'T' by sometimes setting the
7582 TREE_ADDRESSABLE flag. */
7584 tree
7585 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7587 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7590 /* Build an expression for the address of T. This function modifies
7591 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7592 flag. When called from fold functions, use fold_addr_expr instead. */
7594 tree
7595 build_fold_addr_expr (tree t)
7597 return build_fold_addr_expr_with_type_1 (t,
7598 build_pointer_type (TREE_TYPE (t)),
7599 false);
7602 /* Same as build_fold_addr_expr, builds an expression for the address
7603 of T, but avoids touching the input node 't'. Fold functions
7604 should use this version. */
7606 static tree
7607 fold_addr_expr (tree t)
7609 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7611 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7614 /* Fold a unary expression of code CODE and type TYPE with operand
7615 OP0. Return the folded expression if folding is successful.
7616 Otherwise, return NULL_TREE. */
7618 tree
7619 fold_unary (enum tree_code code, tree type, tree op0)
7621 tree tem;
7622 tree arg0;
7623 enum tree_code_class kind = TREE_CODE_CLASS (code);
7625 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7626 && TREE_CODE_LENGTH (code) == 1);
7628 arg0 = op0;
7629 if (arg0)
7631 if (code == NOP_EXPR || code == CONVERT_EXPR
7632 || code == FLOAT_EXPR || code == ABS_EXPR)
7634 /* Don't use STRIP_NOPS, because signedness of argument type
7635 matters. */
7636 STRIP_SIGN_NOPS (arg0);
7638 else
7640 /* Strip any conversions that don't change the mode. This
7641 is safe for every expression, except for a comparison
7642 expression because its signedness is derived from its
7643 operands.
7645 Note that this is done as an internal manipulation within
7646 the constant folder, in order to find the simplest
7647 representation of the arguments so that their form can be
7648 studied. In any cases, the appropriate type conversions
7649 should be put back in the tree that will get out of the
7650 constant folder. */
7651 STRIP_NOPS (arg0);
7655 if (TREE_CODE_CLASS (code) == tcc_unary)
7657 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7658 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7659 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7660 else if (TREE_CODE (arg0) == COND_EXPR)
7662 tree arg01 = TREE_OPERAND (arg0, 1);
7663 tree arg02 = TREE_OPERAND (arg0, 2);
7664 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7665 arg01 = fold_build1 (code, type, arg01);
7666 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7667 arg02 = fold_build1 (code, type, arg02);
7668 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7669 arg01, arg02);
7671 /* If this was a conversion, and all we did was to move into
7672 inside the COND_EXPR, bring it back out. But leave it if
7673 it is a conversion from integer to integer and the
7674 result precision is no wider than a word since such a
7675 conversion is cheap and may be optimized away by combine,
7676 while it couldn't if it were outside the COND_EXPR. Then return
7677 so we don't get into an infinite recursion loop taking the
7678 conversion out and then back in. */
7680 if ((code == NOP_EXPR || code == CONVERT_EXPR
7681 || code == NON_LVALUE_EXPR)
7682 && TREE_CODE (tem) == COND_EXPR
7683 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7684 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7685 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7686 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7687 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7688 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7689 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7690 && (INTEGRAL_TYPE_P
7691 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7692 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7693 || flag_syntax_only))
7694 tem = build1 (code, type,
7695 build3 (COND_EXPR,
7696 TREE_TYPE (TREE_OPERAND
7697 (TREE_OPERAND (tem, 1), 0)),
7698 TREE_OPERAND (tem, 0),
7699 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7700 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7701 return tem;
7703 else if (COMPARISON_CLASS_P (arg0))
7705 if (TREE_CODE (type) == BOOLEAN_TYPE)
7707 arg0 = copy_node (arg0);
7708 TREE_TYPE (arg0) = type;
7709 return arg0;
7711 else if (TREE_CODE (type) != INTEGER_TYPE)
7712 return fold_build3 (COND_EXPR, type, arg0,
7713 fold_build1 (code, type,
7714 integer_one_node),
7715 fold_build1 (code, type,
7716 integer_zero_node));
7720 switch (code)
7722 case PAREN_EXPR:
7723 /* Re-association barriers around constants and other re-association
7724 barriers can be removed. */
7725 if (CONSTANT_CLASS_P (op0)
7726 || TREE_CODE (op0) == PAREN_EXPR)
7727 return fold_convert (type, op0);
7728 return NULL_TREE;
7730 CASE_CONVERT:
7731 case FLOAT_EXPR:
7732 case FIX_TRUNC_EXPR:
7733 if (TREE_TYPE (op0) == type)
7734 return op0;
7736 /* If we have (type) (a CMP b) and type is an integral type, return
7737 new expression involving the new type. */
7738 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7739 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7740 TREE_OPERAND (op0, 1));
7742 /* Handle cases of two conversions in a row. */
7743 if (CONVERT_EXPR_P (op0))
7745 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7746 tree inter_type = TREE_TYPE (op0);
7747 int inside_int = INTEGRAL_TYPE_P (inside_type);
7748 int inside_ptr = POINTER_TYPE_P (inside_type);
7749 int inside_float = FLOAT_TYPE_P (inside_type);
7750 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7751 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7752 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7753 int inter_int = INTEGRAL_TYPE_P (inter_type);
7754 int inter_ptr = POINTER_TYPE_P (inter_type);
7755 int inter_float = FLOAT_TYPE_P (inter_type);
7756 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7757 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7758 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7759 int final_int = INTEGRAL_TYPE_P (type);
7760 int final_ptr = POINTER_TYPE_P (type);
7761 int final_float = FLOAT_TYPE_P (type);
7762 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7763 unsigned int final_prec = TYPE_PRECISION (type);
7764 int final_unsignedp = TYPE_UNSIGNED (type);
7766 /* In addition to the cases of two conversions in a row
7767 handled below, if we are converting something to its own
7768 type via an object of identical or wider precision, neither
7769 conversion is needed. */
7770 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7771 && (((inter_int || inter_ptr) && final_int)
7772 || (inter_float && final_float))
7773 && inter_prec >= final_prec)
7774 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7776 /* Likewise, if the intermediate and final types are either both
7777 float or both integer, we don't need the middle conversion if
7778 it is wider than the final type and doesn't change the signedness
7779 (for integers). Avoid this if the final type is a pointer
7780 since then we sometimes need the inner conversion. Likewise if
7781 the outer has a precision not equal to the size of its mode. */
7782 if (((inter_int && inside_int)
7783 || (inter_float && inside_float)
7784 || (inter_vec && inside_vec))
7785 && inter_prec >= inside_prec
7786 && (inter_float || inter_vec
7787 || inter_unsignedp == inside_unsignedp)
7788 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7789 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7790 && ! final_ptr
7791 && (! final_vec || inter_prec == inside_prec))
7792 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7794 /* If we have a sign-extension of a zero-extended value, we can
7795 replace that by a single zero-extension. */
7796 if (inside_int && inter_int && final_int
7797 && inside_prec < inter_prec && inter_prec < final_prec
7798 && inside_unsignedp && !inter_unsignedp)
7799 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7801 /* Two conversions in a row are not needed unless:
7802 - some conversion is floating-point (overstrict for now), or
7803 - some conversion is a vector (overstrict for now), or
7804 - the intermediate type is narrower than both initial and
7805 final, or
7806 - the intermediate type and innermost type differ in signedness,
7807 and the outermost type is wider than the intermediate, or
7808 - the initial type is a pointer type and the precisions of the
7809 intermediate and final types differ, or
7810 - the final type is a pointer type and the precisions of the
7811 initial and intermediate types differ. */
7812 if (! inside_float && ! inter_float && ! final_float
7813 && ! inside_vec && ! inter_vec && ! final_vec
7814 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7815 && ! (inside_int && inter_int
7816 && inter_unsignedp != inside_unsignedp
7817 && inter_prec < final_prec)
7818 && ((inter_unsignedp && inter_prec > inside_prec)
7819 == (final_unsignedp && final_prec > inter_prec))
7820 && ! (inside_ptr && inter_prec != final_prec)
7821 && ! (final_ptr && inside_prec != inter_prec)
7822 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7823 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7824 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7827 /* Handle (T *)&A.B.C for A being of type T and B and C
7828 living at offset zero. This occurs frequently in
7829 C++ upcasting and then accessing the base. */
7830 if (TREE_CODE (op0) == ADDR_EXPR
7831 && POINTER_TYPE_P (type)
7832 && handled_component_p (TREE_OPERAND (op0, 0)))
7834 HOST_WIDE_INT bitsize, bitpos;
7835 tree offset;
7836 enum machine_mode mode;
7837 int unsignedp, volatilep;
7838 tree base = TREE_OPERAND (op0, 0);
7839 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7840 &mode, &unsignedp, &volatilep, false);
7841 /* If the reference was to a (constant) zero offset, we can use
7842 the address of the base if it has the same base type
7843 as the result type. */
7844 if (! offset && bitpos == 0
7845 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7846 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7847 return fold_convert (type, fold_addr_expr (base));
7850 if ((TREE_CODE (op0) == MODIFY_EXPR
7851 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7852 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7853 /* Detect assigning a bitfield. */
7854 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7855 && DECL_BIT_FIELD
7856 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7858 /* Don't leave an assignment inside a conversion
7859 unless assigning a bitfield. */
7860 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7861 /* First do the assignment, then return converted constant. */
7862 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7863 TREE_NO_WARNING (tem) = 1;
7864 TREE_USED (tem) = 1;
7865 return tem;
7868 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7869 constants (if x has signed type, the sign bit cannot be set
7870 in c). This folds extension into the BIT_AND_EXPR.
7871 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7872 very likely don't have maximal range for their precision and this
7873 transformation effectively doesn't preserve non-maximal ranges. */
7874 if (TREE_CODE (type) == INTEGER_TYPE
7875 && TREE_CODE (op0) == BIT_AND_EXPR
7876 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7878 tree and = op0;
7879 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7880 int change = 0;
7882 if (TYPE_UNSIGNED (TREE_TYPE (and))
7883 || (TYPE_PRECISION (type)
7884 <= TYPE_PRECISION (TREE_TYPE (and))))
7885 change = 1;
7886 else if (TYPE_PRECISION (TREE_TYPE (and1))
7887 <= HOST_BITS_PER_WIDE_INT
7888 && host_integerp (and1, 1))
7890 unsigned HOST_WIDE_INT cst;
7892 cst = tree_low_cst (and1, 1);
7893 cst &= (HOST_WIDE_INT) -1
7894 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7895 change = (cst == 0);
7896 #ifdef LOAD_EXTEND_OP
7897 if (change
7898 && !flag_syntax_only
7899 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7900 == ZERO_EXTEND))
7902 tree uns = unsigned_type_for (TREE_TYPE (and0));
7903 and0 = fold_convert (uns, and0);
7904 and1 = fold_convert (uns, and1);
7906 #endif
7908 if (change)
7910 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7911 TREE_INT_CST_HIGH (and1), 0,
7912 TREE_OVERFLOW (and1));
7913 return fold_build2 (BIT_AND_EXPR, type,
7914 fold_convert (type, and0), tem);
7918 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7919 when one of the new casts will fold away. Conservatively we assume
7920 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7921 if (POINTER_TYPE_P (type)
7922 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7923 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7924 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7925 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7927 tree arg00 = TREE_OPERAND (arg0, 0);
7928 tree arg01 = TREE_OPERAND (arg0, 1);
7930 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7931 fold_convert (sizetype, arg01));
7934 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7935 of the same precision, and X is an integer type not narrower than
7936 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7937 if (INTEGRAL_TYPE_P (type)
7938 && TREE_CODE (op0) == BIT_NOT_EXPR
7939 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7940 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7941 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7943 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7944 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7945 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7946 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7949 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7950 type of X and Y (integer types only). */
7951 if (INTEGRAL_TYPE_P (type)
7952 && TREE_CODE (op0) == MULT_EXPR
7953 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7954 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7956 /* Be careful not to introduce new overflows. */
7957 tree mult_type;
7958 if (TYPE_OVERFLOW_WRAPS (type))
7959 mult_type = type;
7960 else
7961 mult_type = unsigned_type_for (type);
7963 tem = fold_build2 (MULT_EXPR, mult_type,
7964 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7965 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7966 return fold_convert (type, tem);
7969 tem = fold_convert_const (code, type, op0);
7970 return tem ? tem : NULL_TREE;
7972 case FIXED_CONVERT_EXPR:
7973 tem = fold_convert_const (code, type, arg0);
7974 return tem ? tem : NULL_TREE;
7976 case VIEW_CONVERT_EXPR:
7977 if (TREE_TYPE (op0) == type)
7978 return op0;
7979 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7980 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7982 /* For integral conversions with the same precision or pointer
7983 conversions use a NOP_EXPR instead. */
7984 if ((INTEGRAL_TYPE_P (type)
7985 || POINTER_TYPE_P (type))
7986 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7987 || POINTER_TYPE_P (TREE_TYPE (op0)))
7988 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7989 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7990 a sub-type to its base type as generated by the Ada FE. */
7991 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7992 && TREE_TYPE (TREE_TYPE (op0))))
7993 return fold_convert (type, op0);
7995 /* Strip inner integral conversions that do not change the precision. */
7996 if (CONVERT_EXPR_P (op0)
7997 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7998 || POINTER_TYPE_P (TREE_TYPE (op0)))
7999 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8000 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8001 && (TYPE_PRECISION (TREE_TYPE (op0))
8002 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8003 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8005 return fold_view_convert_expr (type, op0);
8007 case NEGATE_EXPR:
8008 tem = fold_negate_expr (arg0);
8009 if (tem)
8010 return fold_convert (type, tem);
8011 return NULL_TREE;
8013 case ABS_EXPR:
8014 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8015 return fold_abs_const (arg0, type);
8016 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8017 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8018 /* Convert fabs((double)float) into (double)fabsf(float). */
8019 else if (TREE_CODE (arg0) == NOP_EXPR
8020 && TREE_CODE (type) == REAL_TYPE)
8022 tree targ0 = strip_float_extensions (arg0);
8023 if (targ0 != arg0)
8024 return fold_convert (type, fold_build1 (ABS_EXPR,
8025 TREE_TYPE (targ0),
8026 targ0));
8028 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8029 else if (TREE_CODE (arg0) == ABS_EXPR)
8030 return arg0;
8031 else if (tree_expr_nonnegative_p (arg0))
8032 return arg0;
8034 /* Strip sign ops from argument. */
8035 if (TREE_CODE (type) == REAL_TYPE)
8037 tem = fold_strip_sign_ops (arg0);
8038 if (tem)
8039 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8041 return NULL_TREE;
8043 case CONJ_EXPR:
8044 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8045 return fold_convert (type, arg0);
8046 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8048 tree itype = TREE_TYPE (type);
8049 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8050 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8051 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8053 if (TREE_CODE (arg0) == COMPLEX_CST)
8055 tree itype = TREE_TYPE (type);
8056 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8057 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8058 return build_complex (type, rpart, negate_expr (ipart));
8060 if (TREE_CODE (arg0) == CONJ_EXPR)
8061 return fold_convert (type, TREE_OPERAND (arg0, 0));
8062 return NULL_TREE;
8064 case BIT_NOT_EXPR:
8065 if (TREE_CODE (arg0) == INTEGER_CST)
8066 return fold_not_const (arg0, type);
8067 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8068 return fold_convert (type, TREE_OPERAND (arg0, 0));
8069 /* Convert ~ (-A) to A - 1. */
8070 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8071 return fold_build2 (MINUS_EXPR, type,
8072 fold_convert (type, TREE_OPERAND (arg0, 0)),
8073 build_int_cst (type, 1));
8074 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8075 else if (INTEGRAL_TYPE_P (type)
8076 && ((TREE_CODE (arg0) == MINUS_EXPR
8077 && integer_onep (TREE_OPERAND (arg0, 1)))
8078 || (TREE_CODE (arg0) == PLUS_EXPR
8079 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8080 return fold_build1 (NEGATE_EXPR, type,
8081 fold_convert (type, TREE_OPERAND (arg0, 0)));
8082 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8083 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8084 && (tem = fold_unary (BIT_NOT_EXPR, type,
8085 fold_convert (type,
8086 TREE_OPERAND (arg0, 0)))))
8087 return fold_build2 (BIT_XOR_EXPR, type, tem,
8088 fold_convert (type, TREE_OPERAND (arg0, 1)));
8089 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8090 && (tem = fold_unary (BIT_NOT_EXPR, type,
8091 fold_convert (type,
8092 TREE_OPERAND (arg0, 1)))))
8093 return fold_build2 (BIT_XOR_EXPR, type,
8094 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8095 /* Perform BIT_NOT_EXPR on each element individually. */
8096 else if (TREE_CODE (arg0) == VECTOR_CST)
8098 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8099 int count = TYPE_VECTOR_SUBPARTS (type), i;
8101 for (i = 0; i < count; i++)
8103 if (elements)
8105 elem = TREE_VALUE (elements);
8106 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8107 if (elem == NULL_TREE)
8108 break;
8109 elements = TREE_CHAIN (elements);
8111 else
8112 elem = build_int_cst (TREE_TYPE (type), -1);
8113 list = tree_cons (NULL_TREE, elem, list);
8115 if (i == count)
8116 return build_vector (type, nreverse (list));
8119 return NULL_TREE;
8121 case TRUTH_NOT_EXPR:
8122 /* The argument to invert_truthvalue must have Boolean type. */
8123 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8124 arg0 = fold_convert (boolean_type_node, arg0);
8126 /* Note that the operand of this must be an int
8127 and its values must be 0 or 1.
8128 ("true" is a fixed value perhaps depending on the language,
8129 but we don't handle values other than 1 correctly yet.) */
8130 tem = fold_truth_not_expr (arg0);
8131 if (!tem)
8132 return NULL_TREE;
8133 return fold_convert (type, tem);
8135 case REALPART_EXPR:
8136 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8137 return fold_convert (type, arg0);
8138 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8139 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8140 TREE_OPERAND (arg0, 1));
8141 if (TREE_CODE (arg0) == COMPLEX_CST)
8142 return fold_convert (type, TREE_REALPART (arg0));
8143 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8145 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8146 tem = fold_build2 (TREE_CODE (arg0), itype,
8147 fold_build1 (REALPART_EXPR, itype,
8148 TREE_OPERAND (arg0, 0)),
8149 fold_build1 (REALPART_EXPR, itype,
8150 TREE_OPERAND (arg0, 1)));
8151 return fold_convert (type, tem);
8153 if (TREE_CODE (arg0) == CONJ_EXPR)
8155 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8156 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8157 return fold_convert (type, tem);
8159 if (TREE_CODE (arg0) == CALL_EXPR)
8161 tree fn = get_callee_fndecl (arg0);
8162 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8163 switch (DECL_FUNCTION_CODE (fn))
8165 CASE_FLT_FN (BUILT_IN_CEXPI):
8166 fn = mathfn_built_in (type, BUILT_IN_COS);
8167 if (fn)
8168 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8169 break;
8171 default:
8172 break;
8175 return NULL_TREE;
8177 case IMAGPART_EXPR:
8178 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8179 return fold_convert (type, integer_zero_node);
8180 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8181 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8182 TREE_OPERAND (arg0, 0));
8183 if (TREE_CODE (arg0) == COMPLEX_CST)
8184 return fold_convert (type, TREE_IMAGPART (arg0));
8185 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8187 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8188 tem = fold_build2 (TREE_CODE (arg0), itype,
8189 fold_build1 (IMAGPART_EXPR, itype,
8190 TREE_OPERAND (arg0, 0)),
8191 fold_build1 (IMAGPART_EXPR, itype,
8192 TREE_OPERAND (arg0, 1)));
8193 return fold_convert (type, tem);
8195 if (TREE_CODE (arg0) == CONJ_EXPR)
8197 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8198 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8199 return fold_convert (type, negate_expr (tem));
8201 if (TREE_CODE (arg0) == CALL_EXPR)
8203 tree fn = get_callee_fndecl (arg0);
8204 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8205 switch (DECL_FUNCTION_CODE (fn))
8207 CASE_FLT_FN (BUILT_IN_CEXPI):
8208 fn = mathfn_built_in (type, BUILT_IN_SIN);
8209 if (fn)
8210 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8211 break;
8213 default:
8214 break;
8217 return NULL_TREE;
8219 default:
8220 return NULL_TREE;
8221 } /* switch (code) */
8224 /* Fold a binary expression of code CODE and type TYPE with operands
8225 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8226 Return the folded expression if folding is successful. Otherwise,
8227 return NULL_TREE. */
8229 static tree
8230 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8232 enum tree_code compl_code;
8234 if (code == MIN_EXPR)
8235 compl_code = MAX_EXPR;
8236 else if (code == MAX_EXPR)
8237 compl_code = MIN_EXPR;
8238 else
8239 gcc_unreachable ();
8241 /* MIN (MAX (a, b), b) == b. */
8242 if (TREE_CODE (op0) == compl_code
8243 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8244 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8246 /* MIN (MAX (b, a), b) == b. */
8247 if (TREE_CODE (op0) == compl_code
8248 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8249 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8250 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8252 /* MIN (a, MAX (a, b)) == a. */
8253 if (TREE_CODE (op1) == compl_code
8254 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8255 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8256 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8258 /* MIN (a, MAX (b, a)) == a. */
8259 if (TREE_CODE (op1) == compl_code
8260 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8261 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8262 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8264 return NULL_TREE;
8267 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8268 by changing CODE to reduce the magnitude of constants involved in
8269 ARG0 of the comparison.
8270 Returns a canonicalized comparison tree if a simplification was
8271 possible, otherwise returns NULL_TREE.
8272 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8273 valid if signed overflow is undefined. */
8275 static tree
8276 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8277 tree arg0, tree arg1,
8278 bool *strict_overflow_p)
8280 enum tree_code code0 = TREE_CODE (arg0);
8281 tree t, cst0 = NULL_TREE;
8282 int sgn0;
8283 bool swap = false;
8285 /* Match A +- CST code arg1 and CST code arg1. */
8286 if (!(((code0 == MINUS_EXPR
8287 || code0 == PLUS_EXPR)
8288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8289 || code0 == INTEGER_CST))
8290 return NULL_TREE;
8292 /* Identify the constant in arg0 and its sign. */
8293 if (code0 == INTEGER_CST)
8294 cst0 = arg0;
8295 else
8296 cst0 = TREE_OPERAND (arg0, 1);
8297 sgn0 = tree_int_cst_sgn (cst0);
8299 /* Overflowed constants and zero will cause problems. */
8300 if (integer_zerop (cst0)
8301 || TREE_OVERFLOW (cst0))
8302 return NULL_TREE;
8304 /* See if we can reduce the magnitude of the constant in
8305 arg0 by changing the comparison code. */
8306 if (code0 == INTEGER_CST)
8308 /* CST <= arg1 -> CST-1 < arg1. */
8309 if (code == LE_EXPR && sgn0 == 1)
8310 code = LT_EXPR;
8311 /* -CST < arg1 -> -CST-1 <= arg1. */
8312 else if (code == LT_EXPR && sgn0 == -1)
8313 code = LE_EXPR;
8314 /* CST > arg1 -> CST-1 >= arg1. */
8315 else if (code == GT_EXPR && sgn0 == 1)
8316 code = GE_EXPR;
8317 /* -CST >= arg1 -> -CST-1 > arg1. */
8318 else if (code == GE_EXPR && sgn0 == -1)
8319 code = GT_EXPR;
8320 else
8321 return NULL_TREE;
8322 /* arg1 code' CST' might be more canonical. */
8323 swap = true;
8325 else
8327 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8328 if (code == LT_EXPR
8329 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8330 code = LE_EXPR;
8331 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8332 else if (code == GT_EXPR
8333 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8334 code = GE_EXPR;
8335 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8336 else if (code == LE_EXPR
8337 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8338 code = LT_EXPR;
8339 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8340 else if (code == GE_EXPR
8341 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8342 code = GT_EXPR;
8343 else
8344 return NULL_TREE;
8345 *strict_overflow_p = true;
8348 /* Now build the constant reduced in magnitude. */
8349 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8350 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8351 if (code0 != INTEGER_CST)
8352 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8354 /* If swapping might yield to a more canonical form, do so. */
8355 if (swap)
8356 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8357 else
8358 return fold_build2 (code, type, t, arg1);
8361 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8362 overflow further. Try to decrease the magnitude of constants involved
8363 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8364 and put sole constants at the second argument position.
8365 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8367 static tree
8368 maybe_canonicalize_comparison (enum tree_code code, tree type,
8369 tree arg0, tree arg1)
8371 tree t;
8372 bool strict_overflow_p;
8373 const char * const warnmsg = G_("assuming signed overflow does not occur "
8374 "when reducing constant in comparison");
8376 /* In principle pointers also have undefined overflow behavior,
8377 but that causes problems elsewhere. */
8378 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8379 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8380 return NULL_TREE;
8382 /* Try canonicalization by simplifying arg0. */
8383 strict_overflow_p = false;
8384 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8385 &strict_overflow_p);
8386 if (t)
8388 if (strict_overflow_p)
8389 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8390 return t;
8393 /* Try canonicalization by simplifying arg1 using the swapped
8394 comparison. */
8395 code = swap_tree_comparison (code);
8396 strict_overflow_p = false;
8397 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8398 &strict_overflow_p);
8399 if (t && strict_overflow_p)
8400 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8401 return t;
8404 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8405 space. This is used to avoid issuing overflow warnings for
8406 expressions like &p->x which can not wrap. */
8408 static bool
8409 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8411 unsigned HOST_WIDE_INT offset_low, total_low;
8412 HOST_WIDE_INT size, offset_high, total_high;
8414 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8415 return true;
8417 if (bitpos < 0)
8418 return true;
8420 if (offset == NULL_TREE)
8422 offset_low = 0;
8423 offset_high = 0;
8425 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8426 return true;
8427 else
8429 offset_low = TREE_INT_CST_LOW (offset);
8430 offset_high = TREE_INT_CST_HIGH (offset);
8433 if (add_double_with_sign (offset_low, offset_high,
8434 bitpos / BITS_PER_UNIT, 0,
8435 &total_low, &total_high,
8436 true))
8437 return true;
8439 if (total_high != 0)
8440 return true;
8442 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8443 if (size <= 0)
8444 return true;
8446 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8447 array. */
8448 if (TREE_CODE (base) == ADDR_EXPR)
8450 HOST_WIDE_INT base_size;
8452 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8453 if (base_size > 0 && size < base_size)
8454 size = base_size;
8457 return total_low > (unsigned HOST_WIDE_INT) size;
8460 /* Subroutine of fold_binary. This routine performs all of the
8461 transformations that are common to the equality/inequality
8462 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8463 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8464 fold_binary should call fold_binary. Fold a comparison with
8465 tree code CODE and type TYPE with operands OP0 and OP1. Return
8466 the folded comparison or NULL_TREE. */
8468 static tree
8469 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8471 tree arg0, arg1, tem;
8473 arg0 = op0;
8474 arg1 = op1;
8476 STRIP_SIGN_NOPS (arg0);
8477 STRIP_SIGN_NOPS (arg1);
8479 tem = fold_relational_const (code, type, arg0, arg1);
8480 if (tem != NULL_TREE)
8481 return tem;
8483 /* If one arg is a real or integer constant, put it last. */
8484 if (tree_swap_operands_p (arg0, arg1, true))
8485 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8487 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8488 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8489 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8490 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8491 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8492 && (TREE_CODE (arg1) == INTEGER_CST
8493 && !TREE_OVERFLOW (arg1)))
8495 tree const1 = TREE_OPERAND (arg0, 1);
8496 tree const2 = arg1;
8497 tree variable = TREE_OPERAND (arg0, 0);
8498 tree lhs;
8499 int lhs_add;
8500 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8502 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8503 TREE_TYPE (arg1), const2, const1);
8505 /* If the constant operation overflowed this can be
8506 simplified as a comparison against INT_MAX/INT_MIN. */
8507 if (TREE_CODE (lhs) == INTEGER_CST
8508 && TREE_OVERFLOW (lhs))
8510 int const1_sgn = tree_int_cst_sgn (const1);
8511 enum tree_code code2 = code;
8513 /* Get the sign of the constant on the lhs if the
8514 operation were VARIABLE + CONST1. */
8515 if (TREE_CODE (arg0) == MINUS_EXPR)
8516 const1_sgn = -const1_sgn;
8518 /* The sign of the constant determines if we overflowed
8519 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8520 Canonicalize to the INT_MIN overflow by swapping the comparison
8521 if necessary. */
8522 if (const1_sgn == -1)
8523 code2 = swap_tree_comparison (code);
8525 /* We now can look at the canonicalized case
8526 VARIABLE + 1 CODE2 INT_MIN
8527 and decide on the result. */
8528 if (code2 == LT_EXPR
8529 || code2 == LE_EXPR
8530 || code2 == EQ_EXPR)
8531 return omit_one_operand (type, boolean_false_node, variable);
8532 else if (code2 == NE_EXPR
8533 || code2 == GE_EXPR
8534 || code2 == GT_EXPR)
8535 return omit_one_operand (type, boolean_true_node, variable);
8538 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8539 && (TREE_CODE (lhs) != INTEGER_CST
8540 || !TREE_OVERFLOW (lhs)))
8542 fold_overflow_warning (("assuming signed overflow does not occur "
8543 "when changing X +- C1 cmp C2 to "
8544 "X cmp C1 +- C2"),
8545 WARN_STRICT_OVERFLOW_COMPARISON);
8546 return fold_build2 (code, type, variable, lhs);
8550 /* For comparisons of pointers we can decompose it to a compile time
8551 comparison of the base objects and the offsets into the object.
8552 This requires at least one operand being an ADDR_EXPR or a
8553 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8554 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8555 && (TREE_CODE (arg0) == ADDR_EXPR
8556 || TREE_CODE (arg1) == ADDR_EXPR
8557 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8558 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8560 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8561 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8562 enum machine_mode mode;
8563 int volatilep, unsignedp;
8564 bool indirect_base0 = false, indirect_base1 = false;
8566 /* Get base and offset for the access. Strip ADDR_EXPR for
8567 get_inner_reference, but put it back by stripping INDIRECT_REF
8568 off the base object if possible. indirect_baseN will be true
8569 if baseN is not an address but refers to the object itself. */
8570 base0 = arg0;
8571 if (TREE_CODE (arg0) == ADDR_EXPR)
8573 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8574 &bitsize, &bitpos0, &offset0, &mode,
8575 &unsignedp, &volatilep, false);
8576 if (TREE_CODE (base0) == INDIRECT_REF)
8577 base0 = TREE_OPERAND (base0, 0);
8578 else
8579 indirect_base0 = true;
8581 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8583 base0 = TREE_OPERAND (arg0, 0);
8584 offset0 = TREE_OPERAND (arg0, 1);
8587 base1 = arg1;
8588 if (TREE_CODE (arg1) == ADDR_EXPR)
8590 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8591 &bitsize, &bitpos1, &offset1, &mode,
8592 &unsignedp, &volatilep, false);
8593 if (TREE_CODE (base1) == INDIRECT_REF)
8594 base1 = TREE_OPERAND (base1, 0);
8595 else
8596 indirect_base1 = true;
8598 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8600 base1 = TREE_OPERAND (arg1, 0);
8601 offset1 = TREE_OPERAND (arg1, 1);
8604 /* If we have equivalent bases we might be able to simplify. */
8605 if (indirect_base0 == indirect_base1
8606 && operand_equal_p (base0, base1, 0))
8608 /* We can fold this expression to a constant if the non-constant
8609 offset parts are equal. */
8610 if ((offset0 == offset1
8611 || (offset0 && offset1
8612 && operand_equal_p (offset0, offset1, 0)))
8613 && (code == EQ_EXPR
8614 || code == NE_EXPR
8615 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8618 if (code != EQ_EXPR
8619 && code != NE_EXPR
8620 && bitpos0 != bitpos1
8621 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8622 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8623 fold_overflow_warning (("assuming pointer wraparound does not "
8624 "occur when comparing P +- C1 with "
8625 "P +- C2"),
8626 WARN_STRICT_OVERFLOW_CONDITIONAL);
8628 switch (code)
8630 case EQ_EXPR:
8631 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8632 case NE_EXPR:
8633 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8634 case LT_EXPR:
8635 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8636 case LE_EXPR:
8637 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8638 case GE_EXPR:
8639 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8640 case GT_EXPR:
8641 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8642 default:;
8645 /* We can simplify the comparison to a comparison of the variable
8646 offset parts if the constant offset parts are equal.
8647 Be careful to use signed size type here because otherwise we
8648 mess with array offsets in the wrong way. This is possible
8649 because pointer arithmetic is restricted to retain within an
8650 object and overflow on pointer differences is undefined as of
8651 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8652 else if (bitpos0 == bitpos1
8653 && ((code == EQ_EXPR || code == NE_EXPR)
8654 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8656 tree signed_size_type_node;
8657 signed_size_type_node = signed_type_for (size_type_node);
8659 /* By converting to signed size type we cover middle-end pointer
8660 arithmetic which operates on unsigned pointer types of size
8661 type size and ARRAY_REF offsets which are properly sign or
8662 zero extended from their type in case it is narrower than
8663 size type. */
8664 if (offset0 == NULL_TREE)
8665 offset0 = build_int_cst (signed_size_type_node, 0);
8666 else
8667 offset0 = fold_convert (signed_size_type_node, offset0);
8668 if (offset1 == NULL_TREE)
8669 offset1 = build_int_cst (signed_size_type_node, 0);
8670 else
8671 offset1 = fold_convert (signed_size_type_node, offset1);
8673 if (code != EQ_EXPR
8674 && code != NE_EXPR
8675 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8676 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8677 fold_overflow_warning (("assuming pointer wraparound does not "
8678 "occur when comparing P +- C1 with "
8679 "P +- C2"),
8680 WARN_STRICT_OVERFLOW_COMPARISON);
8682 return fold_build2 (code, type, offset0, offset1);
8685 /* For non-equal bases we can simplify if they are addresses
8686 of local binding decls or constants. */
8687 else if (indirect_base0 && indirect_base1
8688 /* We know that !operand_equal_p (base0, base1, 0)
8689 because the if condition was false. But make
8690 sure two decls are not the same. */
8691 && base0 != base1
8692 && TREE_CODE (arg0) == ADDR_EXPR
8693 && TREE_CODE (arg1) == ADDR_EXPR
8694 && (((TREE_CODE (base0) == VAR_DECL
8695 || TREE_CODE (base0) == PARM_DECL)
8696 && (targetm.binds_local_p (base0)
8697 || CONSTANT_CLASS_P (base1)))
8698 || CONSTANT_CLASS_P (base0))
8699 && (((TREE_CODE (base1) == VAR_DECL
8700 || TREE_CODE (base1) == PARM_DECL)
8701 && (targetm.binds_local_p (base1)
8702 || CONSTANT_CLASS_P (base0)))
8703 || CONSTANT_CLASS_P (base1)))
8705 if (code == EQ_EXPR)
8706 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8707 else if (code == NE_EXPR)
8708 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8710 /* For equal offsets we can simplify to a comparison of the
8711 base addresses. */
8712 else if (bitpos0 == bitpos1
8713 && (indirect_base0
8714 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8715 && (indirect_base1
8716 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8717 && ((offset0 == offset1)
8718 || (offset0 && offset1
8719 && operand_equal_p (offset0, offset1, 0))))
8721 if (indirect_base0)
8722 base0 = fold_addr_expr (base0);
8723 if (indirect_base1)
8724 base1 = fold_addr_expr (base1);
8725 return fold_build2 (code, type, base0, base1);
8729 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8730 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8731 the resulting offset is smaller in absolute value than the
8732 original one. */
8733 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8734 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8735 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8736 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8737 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8738 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8739 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8741 tree const1 = TREE_OPERAND (arg0, 1);
8742 tree const2 = TREE_OPERAND (arg1, 1);
8743 tree variable1 = TREE_OPERAND (arg0, 0);
8744 tree variable2 = TREE_OPERAND (arg1, 0);
8745 tree cst;
8746 const char * const warnmsg = G_("assuming signed overflow does not "
8747 "occur when combining constants around "
8748 "a comparison");
8750 /* Put the constant on the side where it doesn't overflow and is
8751 of lower absolute value than before. */
8752 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8753 ? MINUS_EXPR : PLUS_EXPR,
8754 const2, const1, 0);
8755 if (!TREE_OVERFLOW (cst)
8756 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8758 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8759 return fold_build2 (code, type,
8760 variable1,
8761 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8762 variable2, cst));
8765 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8766 ? MINUS_EXPR : PLUS_EXPR,
8767 const1, const2, 0);
8768 if (!TREE_OVERFLOW (cst)
8769 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8771 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8772 return fold_build2 (code, type,
8773 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8774 variable1, cst),
8775 variable2);
8779 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8780 signed arithmetic case. That form is created by the compiler
8781 often enough for folding it to be of value. One example is in
8782 computing loop trip counts after Operator Strength Reduction. */
8783 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8784 && TREE_CODE (arg0) == MULT_EXPR
8785 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8786 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8787 && integer_zerop (arg1))
8789 tree const1 = TREE_OPERAND (arg0, 1);
8790 tree const2 = arg1; /* zero */
8791 tree variable1 = TREE_OPERAND (arg0, 0);
8792 enum tree_code cmp_code = code;
8794 gcc_assert (!integer_zerop (const1));
8796 fold_overflow_warning (("assuming signed overflow does not occur when "
8797 "eliminating multiplication in comparison "
8798 "with zero"),
8799 WARN_STRICT_OVERFLOW_COMPARISON);
8801 /* If const1 is negative we swap the sense of the comparison. */
8802 if (tree_int_cst_sgn (const1) < 0)
8803 cmp_code = swap_tree_comparison (cmp_code);
8805 return fold_build2 (cmp_code, type, variable1, const2);
8808 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8809 if (tem)
8810 return tem;
8812 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8814 tree targ0 = strip_float_extensions (arg0);
8815 tree targ1 = strip_float_extensions (arg1);
8816 tree newtype = TREE_TYPE (targ0);
8818 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8819 newtype = TREE_TYPE (targ1);
8821 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8822 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8823 return fold_build2 (code, type, fold_convert (newtype, targ0),
8824 fold_convert (newtype, targ1));
8826 /* (-a) CMP (-b) -> b CMP a */
8827 if (TREE_CODE (arg0) == NEGATE_EXPR
8828 && TREE_CODE (arg1) == NEGATE_EXPR)
8829 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8830 TREE_OPERAND (arg0, 0));
8832 if (TREE_CODE (arg1) == REAL_CST)
8834 REAL_VALUE_TYPE cst;
8835 cst = TREE_REAL_CST (arg1);
8837 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8838 if (TREE_CODE (arg0) == NEGATE_EXPR)
8839 return fold_build2 (swap_tree_comparison (code), type,
8840 TREE_OPERAND (arg0, 0),
8841 build_real (TREE_TYPE (arg1),
8842 REAL_VALUE_NEGATE (cst)));
8844 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8845 /* a CMP (-0) -> a CMP 0 */
8846 if (REAL_VALUE_MINUS_ZERO (cst))
8847 return fold_build2 (code, type, arg0,
8848 build_real (TREE_TYPE (arg1), dconst0));
8850 /* x != NaN is always true, other ops are always false. */
8851 if (REAL_VALUE_ISNAN (cst)
8852 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8854 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8855 return omit_one_operand (type, tem, arg0);
8858 /* Fold comparisons against infinity. */
8859 if (REAL_VALUE_ISINF (cst))
8861 tem = fold_inf_compare (code, type, arg0, arg1);
8862 if (tem != NULL_TREE)
8863 return tem;
8867 /* If this is a comparison of a real constant with a PLUS_EXPR
8868 or a MINUS_EXPR of a real constant, we can convert it into a
8869 comparison with a revised real constant as long as no overflow
8870 occurs when unsafe_math_optimizations are enabled. */
8871 if (flag_unsafe_math_optimizations
8872 && TREE_CODE (arg1) == REAL_CST
8873 && (TREE_CODE (arg0) == PLUS_EXPR
8874 || TREE_CODE (arg0) == MINUS_EXPR)
8875 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8876 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8877 ? MINUS_EXPR : PLUS_EXPR,
8878 arg1, TREE_OPERAND (arg0, 1), 0))
8879 && !TREE_OVERFLOW (tem))
8880 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8882 /* Likewise, we can simplify a comparison of a real constant with
8883 a MINUS_EXPR whose first operand is also a real constant, i.e.
8884 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8885 floating-point types only if -fassociative-math is set. */
8886 if (flag_associative_math
8887 && TREE_CODE (arg1) == REAL_CST
8888 && TREE_CODE (arg0) == MINUS_EXPR
8889 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8890 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8891 arg1, 0))
8892 && !TREE_OVERFLOW (tem))
8893 return fold_build2 (swap_tree_comparison (code), type,
8894 TREE_OPERAND (arg0, 1), tem);
8896 /* Fold comparisons against built-in math functions. */
8897 if (TREE_CODE (arg1) == REAL_CST
8898 && flag_unsafe_math_optimizations
8899 && ! flag_errno_math)
8901 enum built_in_function fcode = builtin_mathfn_code (arg0);
8903 if (fcode != END_BUILTINS)
8905 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8906 if (tem != NULL_TREE)
8907 return tem;
8912 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8913 && CONVERT_EXPR_P (arg0))
8915 /* If we are widening one operand of an integer comparison,
8916 see if the other operand is similarly being widened. Perhaps we
8917 can do the comparison in the narrower type. */
8918 tem = fold_widened_comparison (code, type, arg0, arg1);
8919 if (tem)
8920 return tem;
8922 /* Or if we are changing signedness. */
8923 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8924 if (tem)
8925 return tem;
8928 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8929 constant, we can simplify it. */
8930 if (TREE_CODE (arg1) == INTEGER_CST
8931 && (TREE_CODE (arg0) == MIN_EXPR
8932 || TREE_CODE (arg0) == MAX_EXPR)
8933 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8935 tem = optimize_minmax_comparison (code, type, op0, op1);
8936 if (tem)
8937 return tem;
8940 /* Simplify comparison of something with itself. (For IEEE
8941 floating-point, we can only do some of these simplifications.) */
8942 if (operand_equal_p (arg0, arg1, 0))
8944 switch (code)
8946 case EQ_EXPR:
8947 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8948 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8949 return constant_boolean_node (1, type);
8950 break;
8952 case GE_EXPR:
8953 case LE_EXPR:
8954 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8955 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8956 return constant_boolean_node (1, type);
8957 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8959 case NE_EXPR:
8960 /* For NE, we can only do this simplification if integer
8961 or we don't honor IEEE floating point NaNs. */
8962 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8963 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8964 break;
8965 /* ... fall through ... */
8966 case GT_EXPR:
8967 case LT_EXPR:
8968 return constant_boolean_node (0, type);
8969 default:
8970 gcc_unreachable ();
8974 /* If we are comparing an expression that just has comparisons
8975 of two integer values, arithmetic expressions of those comparisons,
8976 and constants, we can simplify it. There are only three cases
8977 to check: the two values can either be equal, the first can be
8978 greater, or the second can be greater. Fold the expression for
8979 those three values. Since each value must be 0 or 1, we have
8980 eight possibilities, each of which corresponds to the constant 0
8981 or 1 or one of the six possible comparisons.
8983 This handles common cases like (a > b) == 0 but also handles
8984 expressions like ((x > y) - (y > x)) > 0, which supposedly
8985 occur in macroized code. */
8987 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8989 tree cval1 = 0, cval2 = 0;
8990 int save_p = 0;
8992 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8993 /* Don't handle degenerate cases here; they should already
8994 have been handled anyway. */
8995 && cval1 != 0 && cval2 != 0
8996 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8997 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8998 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8999 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9000 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9001 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9002 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9004 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9005 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9007 /* We can't just pass T to eval_subst in case cval1 or cval2
9008 was the same as ARG1. */
9010 tree high_result
9011 = fold_build2 (code, type,
9012 eval_subst (arg0, cval1, maxval,
9013 cval2, minval),
9014 arg1);
9015 tree equal_result
9016 = fold_build2 (code, type,
9017 eval_subst (arg0, cval1, maxval,
9018 cval2, maxval),
9019 arg1);
9020 tree low_result
9021 = fold_build2 (code, type,
9022 eval_subst (arg0, cval1, minval,
9023 cval2, maxval),
9024 arg1);
9026 /* All three of these results should be 0 or 1. Confirm they are.
9027 Then use those values to select the proper code to use. */
9029 if (TREE_CODE (high_result) == INTEGER_CST
9030 && TREE_CODE (equal_result) == INTEGER_CST
9031 && TREE_CODE (low_result) == INTEGER_CST)
9033 /* Make a 3-bit mask with the high-order bit being the
9034 value for `>', the next for '=', and the low for '<'. */
9035 switch ((integer_onep (high_result) * 4)
9036 + (integer_onep (equal_result) * 2)
9037 + integer_onep (low_result))
9039 case 0:
9040 /* Always false. */
9041 return omit_one_operand (type, integer_zero_node, arg0);
9042 case 1:
9043 code = LT_EXPR;
9044 break;
9045 case 2:
9046 code = EQ_EXPR;
9047 break;
9048 case 3:
9049 code = LE_EXPR;
9050 break;
9051 case 4:
9052 code = GT_EXPR;
9053 break;
9054 case 5:
9055 code = NE_EXPR;
9056 break;
9057 case 6:
9058 code = GE_EXPR;
9059 break;
9060 case 7:
9061 /* Always true. */
9062 return omit_one_operand (type, integer_one_node, arg0);
9065 if (save_p)
9066 return save_expr (build2 (code, type, cval1, cval2));
9067 return fold_build2 (code, type, cval1, cval2);
9072 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9073 into a single range test. */
9074 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9075 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9076 && TREE_CODE (arg1) == INTEGER_CST
9077 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9078 && !integer_zerop (TREE_OPERAND (arg0, 1))
9079 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9080 && !TREE_OVERFLOW (arg1))
9082 tem = fold_div_compare (code, type, arg0, arg1);
9083 if (tem != NULL_TREE)
9084 return tem;
9087 /* Fold ~X op ~Y as Y op X. */
9088 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9089 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9091 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9092 return fold_build2 (code, type,
9093 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9094 TREE_OPERAND (arg0, 0));
9097 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9098 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9099 && TREE_CODE (arg1) == INTEGER_CST)
9101 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9102 return fold_build2 (swap_tree_comparison (code), type,
9103 TREE_OPERAND (arg0, 0),
9104 fold_build1 (BIT_NOT_EXPR, cmp_type,
9105 fold_convert (cmp_type, arg1)));
9108 return NULL_TREE;
9112 /* Subroutine of fold_binary. Optimize complex multiplications of the
9113 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9114 argument EXPR represents the expression "z" of type TYPE. */
9116 static tree
9117 fold_mult_zconjz (tree type, tree expr)
9119 tree itype = TREE_TYPE (type);
9120 tree rpart, ipart, tem;
9122 if (TREE_CODE (expr) == COMPLEX_EXPR)
9124 rpart = TREE_OPERAND (expr, 0);
9125 ipart = TREE_OPERAND (expr, 1);
9127 else if (TREE_CODE (expr) == COMPLEX_CST)
9129 rpart = TREE_REALPART (expr);
9130 ipart = TREE_IMAGPART (expr);
9132 else
9134 expr = save_expr (expr);
9135 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9136 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9139 rpart = save_expr (rpart);
9140 ipart = save_expr (ipart);
9141 tem = fold_build2 (PLUS_EXPR, itype,
9142 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9143 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9144 return fold_build2 (COMPLEX_EXPR, type, tem,
9145 fold_convert (itype, integer_zero_node));
9149 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9150 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9151 guarantees that P and N have the same least significant log2(M) bits.
9152 N is not otherwise constrained. In particular, N is not normalized to
9153 0 <= N < M as is common. In general, the precise value of P is unknown.
9154 M is chosen as large as possible such that constant N can be determined.
9156 Returns M and sets *RESIDUE to N. */
9158 static unsigned HOST_WIDE_INT
9159 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9161 enum tree_code code;
9163 *residue = 0;
9165 code = TREE_CODE (expr);
9166 if (code == ADDR_EXPR)
9168 expr = TREE_OPERAND (expr, 0);
9169 if (handled_component_p (expr))
9171 HOST_WIDE_INT bitsize, bitpos;
9172 tree offset;
9173 enum machine_mode mode;
9174 int unsignedp, volatilep;
9176 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9177 &mode, &unsignedp, &volatilep, false);
9178 *residue = bitpos / BITS_PER_UNIT;
9179 if (offset)
9181 if (TREE_CODE (offset) == INTEGER_CST)
9182 *residue += TREE_INT_CST_LOW (offset);
9183 else
9184 /* We don't handle more complicated offset expressions. */
9185 return 1;
9189 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9190 return DECL_ALIGN_UNIT (expr);
9192 else if (code == POINTER_PLUS_EXPR)
9194 tree op0, op1;
9195 unsigned HOST_WIDE_INT modulus;
9196 enum tree_code inner_code;
9198 op0 = TREE_OPERAND (expr, 0);
9199 STRIP_NOPS (op0);
9200 modulus = get_pointer_modulus_and_residue (op0, residue);
9202 op1 = TREE_OPERAND (expr, 1);
9203 STRIP_NOPS (op1);
9204 inner_code = TREE_CODE (op1);
9205 if (inner_code == INTEGER_CST)
9207 *residue += TREE_INT_CST_LOW (op1);
9208 return modulus;
9210 else if (inner_code == MULT_EXPR)
9212 op1 = TREE_OPERAND (op1, 1);
9213 if (TREE_CODE (op1) == INTEGER_CST)
9215 unsigned HOST_WIDE_INT align;
9217 /* Compute the greatest power-of-2 divisor of op1. */
9218 align = TREE_INT_CST_LOW (op1);
9219 align &= -align;
9221 /* If align is non-zero and less than *modulus, replace
9222 *modulus with align., If align is 0, then either op1 is 0
9223 or the greatest power-of-2 divisor of op1 doesn't fit in an
9224 unsigned HOST_WIDE_INT. In either case, no additional
9225 constraint is imposed. */
9226 if (align)
9227 modulus = MIN (modulus, align);
9229 return modulus;
9234 /* If we get here, we were unable to determine anything useful about the
9235 expression. */
9236 return 1;
9240 /* Fold a binary expression of code CODE and type TYPE with operands
9241 OP0 and OP1. Return the folded expression if folding is
9242 successful. Otherwise, return NULL_TREE. */
9244 tree
9245 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9247 enum tree_code_class kind = TREE_CODE_CLASS (code);
9248 tree arg0, arg1, tem;
9249 tree t1 = NULL_TREE;
9250 bool strict_overflow_p;
9252 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9253 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9254 && TREE_CODE_LENGTH (code) == 2
9255 && op0 != NULL_TREE
9256 && op1 != NULL_TREE);
9258 arg0 = op0;
9259 arg1 = op1;
9261 /* Strip any conversions that don't change the mode. This is
9262 safe for every expression, except for a comparison expression
9263 because its signedness is derived from its operands. So, in
9264 the latter case, only strip conversions that don't change the
9265 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9266 preserved.
9268 Note that this is done as an internal manipulation within the
9269 constant folder, in order to find the simplest representation
9270 of the arguments so that their form can be studied. In any
9271 cases, the appropriate type conversions should be put back in
9272 the tree that will get out of the constant folder. */
9274 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9276 STRIP_SIGN_NOPS (arg0);
9277 STRIP_SIGN_NOPS (arg1);
9279 else
9281 STRIP_NOPS (arg0);
9282 STRIP_NOPS (arg1);
9285 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9286 constant but we can't do arithmetic on them. */
9287 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9288 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9289 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9290 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9291 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9292 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9294 if (kind == tcc_binary)
9296 /* Make sure type and arg0 have the same saturating flag. */
9297 gcc_assert (TYPE_SATURATING (type)
9298 == TYPE_SATURATING (TREE_TYPE (arg0)));
9299 tem = const_binop (code, arg0, arg1, 0);
9301 else if (kind == tcc_comparison)
9302 tem = fold_relational_const (code, type, arg0, arg1);
9303 else
9304 tem = NULL_TREE;
9306 if (tem != NULL_TREE)
9308 if (TREE_TYPE (tem) != type)
9309 tem = fold_convert (type, tem);
9310 return tem;
9314 /* If this is a commutative operation, and ARG0 is a constant, move it
9315 to ARG1 to reduce the number of tests below. */
9316 if (commutative_tree_code (code)
9317 && tree_swap_operands_p (arg0, arg1, true))
9318 return fold_build2 (code, type, op1, op0);
9320 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9322 First check for cases where an arithmetic operation is applied to a
9323 compound, conditional, or comparison operation. Push the arithmetic
9324 operation inside the compound or conditional to see if any folding
9325 can then be done. Convert comparison to conditional for this purpose.
9326 The also optimizes non-constant cases that used to be done in
9327 expand_expr.
9329 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9330 one of the operands is a comparison and the other is a comparison, a
9331 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9332 code below would make the expression more complex. Change it to a
9333 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9334 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9336 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9337 || code == EQ_EXPR || code == NE_EXPR)
9338 && ((truth_value_p (TREE_CODE (arg0))
9339 && (truth_value_p (TREE_CODE (arg1))
9340 || (TREE_CODE (arg1) == BIT_AND_EXPR
9341 && integer_onep (TREE_OPERAND (arg1, 1)))))
9342 || (truth_value_p (TREE_CODE (arg1))
9343 && (truth_value_p (TREE_CODE (arg0))
9344 || (TREE_CODE (arg0) == BIT_AND_EXPR
9345 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9347 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9348 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9349 : TRUTH_XOR_EXPR,
9350 boolean_type_node,
9351 fold_convert (boolean_type_node, arg0),
9352 fold_convert (boolean_type_node, arg1));
9354 if (code == EQ_EXPR)
9355 tem = invert_truthvalue (tem);
9357 return fold_convert (type, tem);
9360 if (TREE_CODE_CLASS (code) == tcc_binary
9361 || TREE_CODE_CLASS (code) == tcc_comparison)
9363 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9364 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9365 fold_build2 (code, type,
9366 fold_convert (TREE_TYPE (op0),
9367 TREE_OPERAND (arg0, 1)),
9368 op1));
9369 if (TREE_CODE (arg1) == COMPOUND_EXPR
9370 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9371 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9372 fold_build2 (code, type, op0,
9373 fold_convert (TREE_TYPE (op1),
9374 TREE_OPERAND (arg1, 1))));
9376 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9378 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9379 arg0, arg1,
9380 /*cond_first_p=*/1);
9381 if (tem != NULL_TREE)
9382 return tem;
9385 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9387 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9388 arg1, arg0,
9389 /*cond_first_p=*/0);
9390 if (tem != NULL_TREE)
9391 return tem;
9395 switch (code)
9397 case POINTER_PLUS_EXPR:
9398 /* 0 +p index -> (type)index */
9399 if (integer_zerop (arg0))
9400 return non_lvalue (fold_convert (type, arg1));
9402 /* PTR +p 0 -> PTR */
9403 if (integer_zerop (arg1))
9404 return non_lvalue (fold_convert (type, arg0));
9406 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9407 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9408 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9409 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9410 fold_convert (sizetype, arg1),
9411 fold_convert (sizetype, arg0)));
9413 /* index +p PTR -> PTR +p index */
9414 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9415 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9416 return fold_build2 (POINTER_PLUS_EXPR, type,
9417 fold_convert (type, arg1),
9418 fold_convert (sizetype, arg0));
9420 /* (PTR +p B) +p A -> PTR +p (B + A) */
9421 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9423 tree inner;
9424 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9425 tree arg00 = TREE_OPERAND (arg0, 0);
9426 inner = fold_build2 (PLUS_EXPR, sizetype,
9427 arg01, fold_convert (sizetype, arg1));
9428 return fold_convert (type,
9429 fold_build2 (POINTER_PLUS_EXPR,
9430 TREE_TYPE (arg00), arg00, inner));
9433 /* PTR_CST +p CST -> CST1 */
9434 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9435 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9437 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9438 of the array. Loop optimizer sometimes produce this type of
9439 expressions. */
9440 if (TREE_CODE (arg0) == ADDR_EXPR)
9442 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9443 if (tem)
9444 return fold_convert (type, tem);
9447 return NULL_TREE;
9449 case PLUS_EXPR:
9450 /* PTR + INT -> (INT)(PTR p+ INT) */
9451 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9452 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9453 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9454 TREE_TYPE (arg0),
9455 arg0,
9456 fold_convert (sizetype, arg1)));
9457 /* INT + PTR -> (INT)(PTR p+ INT) */
9458 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9459 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9460 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9461 TREE_TYPE (arg1),
9462 arg1,
9463 fold_convert (sizetype, arg0)));
9464 /* A + (-B) -> A - B */
9465 if (TREE_CODE (arg1) == NEGATE_EXPR)
9466 return fold_build2 (MINUS_EXPR, type,
9467 fold_convert (type, arg0),
9468 fold_convert (type, TREE_OPERAND (arg1, 0)));
9469 /* (-A) + B -> B - A */
9470 if (TREE_CODE (arg0) == NEGATE_EXPR
9471 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9472 return fold_build2 (MINUS_EXPR, type,
9473 fold_convert (type, arg1),
9474 fold_convert (type, TREE_OPERAND (arg0, 0)));
9476 if (INTEGRAL_TYPE_P (type))
9478 /* Convert ~A + 1 to -A. */
9479 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9480 && integer_onep (arg1))
9481 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9483 /* ~X + X is -1. */
9484 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9485 && !TYPE_OVERFLOW_TRAPS (type))
9487 tree tem = TREE_OPERAND (arg0, 0);
9489 STRIP_NOPS (tem);
9490 if (operand_equal_p (tem, arg1, 0))
9492 t1 = build_int_cst_type (type, -1);
9493 return omit_one_operand (type, t1, arg1);
9497 /* X + ~X is -1. */
9498 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9499 && !TYPE_OVERFLOW_TRAPS (type))
9501 tree tem = TREE_OPERAND (arg1, 0);
9503 STRIP_NOPS (tem);
9504 if (operand_equal_p (arg0, tem, 0))
9506 t1 = build_int_cst_type (type, -1);
9507 return omit_one_operand (type, t1, arg0);
9511 /* X + (X / CST) * -CST is X % CST. */
9512 if (TREE_CODE (arg1) == MULT_EXPR
9513 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9514 && operand_equal_p (arg0,
9515 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9517 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9518 tree cst1 = TREE_OPERAND (arg1, 1);
9519 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9520 if (sum && integer_zerop (sum))
9521 return fold_convert (type,
9522 fold_build2 (TRUNC_MOD_EXPR,
9523 TREE_TYPE (arg0), arg0, cst0));
9527 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9528 same or one. Make sure type is not saturating.
9529 fold_plusminus_mult_expr will re-associate. */
9530 if ((TREE_CODE (arg0) == MULT_EXPR
9531 || TREE_CODE (arg1) == MULT_EXPR)
9532 && !TYPE_SATURATING (type)
9533 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9535 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9536 if (tem)
9537 return tem;
9540 if (! FLOAT_TYPE_P (type))
9542 if (integer_zerop (arg1))
9543 return non_lvalue (fold_convert (type, arg0));
9545 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9546 with a constant, and the two constants have no bits in common,
9547 we should treat this as a BIT_IOR_EXPR since this may produce more
9548 simplifications. */
9549 if (TREE_CODE (arg0) == BIT_AND_EXPR
9550 && TREE_CODE (arg1) == BIT_AND_EXPR
9551 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9552 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9553 && integer_zerop (const_binop (BIT_AND_EXPR,
9554 TREE_OPERAND (arg0, 1),
9555 TREE_OPERAND (arg1, 1), 0)))
9557 code = BIT_IOR_EXPR;
9558 goto bit_ior;
9561 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9562 (plus (plus (mult) (mult)) (foo)) so that we can
9563 take advantage of the factoring cases below. */
9564 if (((TREE_CODE (arg0) == PLUS_EXPR
9565 || TREE_CODE (arg0) == MINUS_EXPR)
9566 && TREE_CODE (arg1) == MULT_EXPR)
9567 || ((TREE_CODE (arg1) == PLUS_EXPR
9568 || TREE_CODE (arg1) == MINUS_EXPR)
9569 && TREE_CODE (arg0) == MULT_EXPR))
9571 tree parg0, parg1, parg, marg;
9572 enum tree_code pcode;
9574 if (TREE_CODE (arg1) == MULT_EXPR)
9575 parg = arg0, marg = arg1;
9576 else
9577 parg = arg1, marg = arg0;
9578 pcode = TREE_CODE (parg);
9579 parg0 = TREE_OPERAND (parg, 0);
9580 parg1 = TREE_OPERAND (parg, 1);
9581 STRIP_NOPS (parg0);
9582 STRIP_NOPS (parg1);
9584 if (TREE_CODE (parg0) == MULT_EXPR
9585 && TREE_CODE (parg1) != MULT_EXPR)
9586 return fold_build2 (pcode, type,
9587 fold_build2 (PLUS_EXPR, type,
9588 fold_convert (type, parg0),
9589 fold_convert (type, marg)),
9590 fold_convert (type, parg1));
9591 if (TREE_CODE (parg0) != MULT_EXPR
9592 && TREE_CODE (parg1) == MULT_EXPR)
9593 return fold_build2 (PLUS_EXPR, type,
9594 fold_convert (type, parg0),
9595 fold_build2 (pcode, type,
9596 fold_convert (type, marg),
9597 fold_convert (type,
9598 parg1)));
9601 else
9603 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9604 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9605 return non_lvalue (fold_convert (type, arg0));
9607 /* Likewise if the operands are reversed. */
9608 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9609 return non_lvalue (fold_convert (type, arg1));
9611 /* Convert X + -C into X - C. */
9612 if (TREE_CODE (arg1) == REAL_CST
9613 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9615 tem = fold_negate_const (arg1, type);
9616 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9617 return fold_build2 (MINUS_EXPR, type,
9618 fold_convert (type, arg0),
9619 fold_convert (type, tem));
9622 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9623 to __complex__ ( x, y ). This is not the same for SNaNs or
9624 if signed zeros are involved. */
9625 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9627 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9629 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9630 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9631 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9632 bool arg0rz = false, arg0iz = false;
9633 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9634 || (arg0i && (arg0iz = real_zerop (arg0i))))
9636 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9637 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9638 if (arg0rz && arg1i && real_zerop (arg1i))
9640 tree rp = arg1r ? arg1r
9641 : build1 (REALPART_EXPR, rtype, arg1);
9642 tree ip = arg0i ? arg0i
9643 : build1 (IMAGPART_EXPR, rtype, arg0);
9644 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9646 else if (arg0iz && arg1r && real_zerop (arg1r))
9648 tree rp = arg0r ? arg0r
9649 : build1 (REALPART_EXPR, rtype, arg0);
9650 tree ip = arg1i ? arg1i
9651 : build1 (IMAGPART_EXPR, rtype, arg1);
9652 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9657 if (flag_unsafe_math_optimizations
9658 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9659 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9660 && (tem = distribute_real_division (code, type, arg0, arg1)))
9661 return tem;
9663 /* Convert x+x into x*2.0. */
9664 if (operand_equal_p (arg0, arg1, 0)
9665 && SCALAR_FLOAT_TYPE_P (type))
9666 return fold_build2 (MULT_EXPR, type, arg0,
9667 build_real (type, dconst2));
9669 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9670 We associate floats only if the user has specified
9671 -fassociative-math. */
9672 if (flag_associative_math
9673 && TREE_CODE (arg1) == PLUS_EXPR
9674 && TREE_CODE (arg0) != MULT_EXPR)
9676 tree tree10 = TREE_OPERAND (arg1, 0);
9677 tree tree11 = TREE_OPERAND (arg1, 1);
9678 if (TREE_CODE (tree11) == MULT_EXPR
9679 && TREE_CODE (tree10) == MULT_EXPR)
9681 tree tree0;
9682 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9683 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9686 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9687 We associate floats only if the user has specified
9688 -fassociative-math. */
9689 if (flag_associative_math
9690 && TREE_CODE (arg0) == PLUS_EXPR
9691 && TREE_CODE (arg1) != MULT_EXPR)
9693 tree tree00 = TREE_OPERAND (arg0, 0);
9694 tree tree01 = TREE_OPERAND (arg0, 1);
9695 if (TREE_CODE (tree01) == MULT_EXPR
9696 && TREE_CODE (tree00) == MULT_EXPR)
9698 tree tree0;
9699 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9700 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9705 bit_rotate:
9706 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9707 is a rotate of A by C1 bits. */
9708 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9709 is a rotate of A by B bits. */
9711 enum tree_code code0, code1;
9712 tree rtype;
9713 code0 = TREE_CODE (arg0);
9714 code1 = TREE_CODE (arg1);
9715 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9716 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9717 && operand_equal_p (TREE_OPERAND (arg0, 0),
9718 TREE_OPERAND (arg1, 0), 0)
9719 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9720 TYPE_UNSIGNED (rtype))
9721 /* Only create rotates in complete modes. Other cases are not
9722 expanded properly. */
9723 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9725 tree tree01, tree11;
9726 enum tree_code code01, code11;
9728 tree01 = TREE_OPERAND (arg0, 1);
9729 tree11 = TREE_OPERAND (arg1, 1);
9730 STRIP_NOPS (tree01);
9731 STRIP_NOPS (tree11);
9732 code01 = TREE_CODE (tree01);
9733 code11 = TREE_CODE (tree11);
9734 if (code01 == INTEGER_CST
9735 && code11 == INTEGER_CST
9736 && TREE_INT_CST_HIGH (tree01) == 0
9737 && TREE_INT_CST_HIGH (tree11) == 0
9738 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9739 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9740 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9741 code0 == LSHIFT_EXPR ? tree01 : tree11);
9742 else if (code11 == MINUS_EXPR)
9744 tree tree110, tree111;
9745 tree110 = TREE_OPERAND (tree11, 0);
9746 tree111 = TREE_OPERAND (tree11, 1);
9747 STRIP_NOPS (tree110);
9748 STRIP_NOPS (tree111);
9749 if (TREE_CODE (tree110) == INTEGER_CST
9750 && 0 == compare_tree_int (tree110,
9751 TYPE_PRECISION
9752 (TREE_TYPE (TREE_OPERAND
9753 (arg0, 0))))
9754 && operand_equal_p (tree01, tree111, 0))
9755 return build2 ((code0 == LSHIFT_EXPR
9756 ? LROTATE_EXPR
9757 : RROTATE_EXPR),
9758 type, TREE_OPERAND (arg0, 0), tree01);
9760 else if (code01 == MINUS_EXPR)
9762 tree tree010, tree011;
9763 tree010 = TREE_OPERAND (tree01, 0);
9764 tree011 = TREE_OPERAND (tree01, 1);
9765 STRIP_NOPS (tree010);
9766 STRIP_NOPS (tree011);
9767 if (TREE_CODE (tree010) == INTEGER_CST
9768 && 0 == compare_tree_int (tree010,
9769 TYPE_PRECISION
9770 (TREE_TYPE (TREE_OPERAND
9771 (arg0, 0))))
9772 && operand_equal_p (tree11, tree011, 0))
9773 return build2 ((code0 != LSHIFT_EXPR
9774 ? LROTATE_EXPR
9775 : RROTATE_EXPR),
9776 type, TREE_OPERAND (arg0, 0), tree11);
9781 associate:
9782 /* In most languages, can't associate operations on floats through
9783 parentheses. Rather than remember where the parentheses were, we
9784 don't associate floats at all, unless the user has specified
9785 -fassociative-math.
9786 And, we need to make sure type is not saturating. */
9788 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9789 && !TYPE_SATURATING (type))
9791 tree var0, con0, lit0, minus_lit0;
9792 tree var1, con1, lit1, minus_lit1;
9793 bool ok = true;
9795 /* Split both trees into variables, constants, and literals. Then
9796 associate each group together, the constants with literals,
9797 then the result with variables. This increases the chances of
9798 literals being recombined later and of generating relocatable
9799 expressions for the sum of a constant and literal. */
9800 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9801 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9802 code == MINUS_EXPR);
9804 /* With undefined overflow we can only associate constants
9805 with one variable. */
9806 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9807 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9808 && var0 && var1)
9810 tree tmp0 = var0;
9811 tree tmp1 = var1;
9813 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9814 tmp0 = TREE_OPERAND (tmp0, 0);
9815 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9816 tmp1 = TREE_OPERAND (tmp1, 0);
9817 /* The only case we can still associate with two variables
9818 is if they are the same, modulo negation. */
9819 if (!operand_equal_p (tmp0, tmp1, 0))
9820 ok = false;
9823 /* Only do something if we found more than two objects. Otherwise,
9824 nothing has changed and we risk infinite recursion. */
9825 if (ok
9826 && (2 < ((var0 != 0) + (var1 != 0)
9827 + (con0 != 0) + (con1 != 0)
9828 + (lit0 != 0) + (lit1 != 0)
9829 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9831 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9832 if (code == MINUS_EXPR)
9833 code = PLUS_EXPR;
9835 var0 = associate_trees (var0, var1, code, type);
9836 con0 = associate_trees (con0, con1, code, type);
9837 lit0 = associate_trees (lit0, lit1, code, type);
9838 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9840 /* Preserve the MINUS_EXPR if the negative part of the literal is
9841 greater than the positive part. Otherwise, the multiplicative
9842 folding code (i.e extract_muldiv) may be fooled in case
9843 unsigned constants are subtracted, like in the following
9844 example: ((X*2 + 4) - 8U)/2. */
9845 if (minus_lit0 && lit0)
9847 if (TREE_CODE (lit0) == INTEGER_CST
9848 && TREE_CODE (minus_lit0) == INTEGER_CST
9849 && tree_int_cst_lt (lit0, minus_lit0))
9851 minus_lit0 = associate_trees (minus_lit0, lit0,
9852 MINUS_EXPR, type);
9853 lit0 = 0;
9855 else
9857 lit0 = associate_trees (lit0, minus_lit0,
9858 MINUS_EXPR, type);
9859 minus_lit0 = 0;
9862 if (minus_lit0)
9864 if (con0 == 0)
9865 return fold_convert (type,
9866 associate_trees (var0, minus_lit0,
9867 MINUS_EXPR, type));
9868 else
9870 con0 = associate_trees (con0, minus_lit0,
9871 MINUS_EXPR, type);
9872 return fold_convert (type,
9873 associate_trees (var0, con0,
9874 PLUS_EXPR, type));
9878 con0 = associate_trees (con0, lit0, code, type);
9879 return fold_convert (type, associate_trees (var0, con0,
9880 code, type));
9884 return NULL_TREE;
9886 case MINUS_EXPR:
9887 /* Pointer simplifications for subtraction, simple reassociations. */
9888 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9890 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9891 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9892 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9894 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9895 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9896 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9897 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9898 return fold_build2 (PLUS_EXPR, type,
9899 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9900 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9902 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9903 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9905 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9906 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9907 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9908 if (tmp)
9909 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9912 /* A - (-B) -> A + B */
9913 if (TREE_CODE (arg1) == NEGATE_EXPR)
9914 return fold_build2 (PLUS_EXPR, type, op0,
9915 fold_convert (type, TREE_OPERAND (arg1, 0)));
9916 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9917 if (TREE_CODE (arg0) == NEGATE_EXPR
9918 && (FLOAT_TYPE_P (type)
9919 || INTEGRAL_TYPE_P (type))
9920 && negate_expr_p (arg1)
9921 && reorder_operands_p (arg0, arg1))
9922 return fold_build2 (MINUS_EXPR, type,
9923 fold_convert (type, negate_expr (arg1)),
9924 fold_convert (type, TREE_OPERAND (arg0, 0)));
9925 /* Convert -A - 1 to ~A. */
9926 if (INTEGRAL_TYPE_P (type)
9927 && TREE_CODE (arg0) == NEGATE_EXPR
9928 && integer_onep (arg1)
9929 && !TYPE_OVERFLOW_TRAPS (type))
9930 return fold_build1 (BIT_NOT_EXPR, type,
9931 fold_convert (type, TREE_OPERAND (arg0, 0)));
9933 /* Convert -1 - A to ~A. */
9934 if (INTEGRAL_TYPE_P (type)
9935 && integer_all_onesp (arg0))
9936 return fold_build1 (BIT_NOT_EXPR, type, op1);
9939 /* X - (X / CST) * CST is X % CST. */
9940 if (INTEGRAL_TYPE_P (type)
9941 && TREE_CODE (arg1) == MULT_EXPR
9942 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9943 && operand_equal_p (arg0,
9944 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9945 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9946 TREE_OPERAND (arg1, 1), 0))
9947 return fold_convert (type,
9948 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9949 arg0, TREE_OPERAND (arg1, 1)));
9951 if (! FLOAT_TYPE_P (type))
9953 if (integer_zerop (arg0))
9954 return negate_expr (fold_convert (type, arg1));
9955 if (integer_zerop (arg1))
9956 return non_lvalue (fold_convert (type, arg0));
9958 /* Fold A - (A & B) into ~B & A. */
9959 if (!TREE_SIDE_EFFECTS (arg0)
9960 && TREE_CODE (arg1) == BIT_AND_EXPR)
9962 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9964 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9965 return fold_build2 (BIT_AND_EXPR, type,
9966 fold_build1 (BIT_NOT_EXPR, type, arg10),
9967 fold_convert (type, arg0));
9969 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9971 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9972 return fold_build2 (BIT_AND_EXPR, type,
9973 fold_build1 (BIT_NOT_EXPR, type, arg11),
9974 fold_convert (type, arg0));
9978 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9979 any power of 2 minus 1. */
9980 if (TREE_CODE (arg0) == BIT_AND_EXPR
9981 && TREE_CODE (arg1) == BIT_AND_EXPR
9982 && operand_equal_p (TREE_OPERAND (arg0, 0),
9983 TREE_OPERAND (arg1, 0), 0))
9985 tree mask0 = TREE_OPERAND (arg0, 1);
9986 tree mask1 = TREE_OPERAND (arg1, 1);
9987 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9989 if (operand_equal_p (tem, mask1, 0))
9991 tem = fold_build2 (BIT_XOR_EXPR, type,
9992 TREE_OPERAND (arg0, 0), mask1);
9993 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9998 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9999 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10000 return non_lvalue (fold_convert (type, arg0));
10002 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10003 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10004 (-ARG1 + ARG0) reduces to -ARG1. */
10005 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10006 return negate_expr (fold_convert (type, arg1));
10008 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10009 __complex__ ( x, -y ). This is not the same for SNaNs or if
10010 signed zeros are involved. */
10011 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10012 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10013 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10015 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10016 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10017 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10018 bool arg0rz = false, arg0iz = false;
10019 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10020 || (arg0i && (arg0iz = real_zerop (arg0i))))
10022 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10023 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10024 if (arg0rz && arg1i && real_zerop (arg1i))
10026 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10027 arg1r ? arg1r
10028 : build1 (REALPART_EXPR, rtype, arg1));
10029 tree ip = arg0i ? arg0i
10030 : build1 (IMAGPART_EXPR, rtype, arg0);
10031 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10033 else if (arg0iz && arg1r && real_zerop (arg1r))
10035 tree rp = arg0r ? arg0r
10036 : build1 (REALPART_EXPR, rtype, arg0);
10037 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10038 arg1i ? arg1i
10039 : build1 (IMAGPART_EXPR, rtype, arg1));
10040 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10045 /* Fold &x - &x. This can happen from &x.foo - &x.
10046 This is unsafe for certain floats even in non-IEEE formats.
10047 In IEEE, it is unsafe because it does wrong for NaNs.
10048 Also note that operand_equal_p is always false if an operand
10049 is volatile. */
10051 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10052 && operand_equal_p (arg0, arg1, 0))
10053 return fold_convert (type, integer_zero_node);
10055 /* A - B -> A + (-B) if B is easily negatable. */
10056 if (negate_expr_p (arg1)
10057 && ((FLOAT_TYPE_P (type)
10058 /* Avoid this transformation if B is a positive REAL_CST. */
10059 && (TREE_CODE (arg1) != REAL_CST
10060 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10061 || INTEGRAL_TYPE_P (type)))
10062 return fold_build2 (PLUS_EXPR, type,
10063 fold_convert (type, arg0),
10064 fold_convert (type, negate_expr (arg1)));
10066 /* Try folding difference of addresses. */
10068 HOST_WIDE_INT diff;
10070 if ((TREE_CODE (arg0) == ADDR_EXPR
10071 || TREE_CODE (arg1) == ADDR_EXPR)
10072 && ptr_difference_const (arg0, arg1, &diff))
10073 return build_int_cst_type (type, diff);
10076 /* Fold &a[i] - &a[j] to i-j. */
10077 if (TREE_CODE (arg0) == ADDR_EXPR
10078 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10079 && TREE_CODE (arg1) == ADDR_EXPR
10080 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10082 tree aref0 = TREE_OPERAND (arg0, 0);
10083 tree aref1 = TREE_OPERAND (arg1, 0);
10084 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10085 TREE_OPERAND (aref1, 0), 0))
10087 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10088 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10089 tree esz = array_ref_element_size (aref0);
10090 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10091 return fold_build2 (MULT_EXPR, type, diff,
10092 fold_convert (type, esz));
10097 if (flag_unsafe_math_optimizations
10098 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10099 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10100 && (tem = distribute_real_division (code, type, arg0, arg1)))
10101 return tem;
10103 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10104 same or one. Make sure type is not saturating.
10105 fold_plusminus_mult_expr will re-associate. */
10106 if ((TREE_CODE (arg0) == MULT_EXPR
10107 || TREE_CODE (arg1) == MULT_EXPR)
10108 && !TYPE_SATURATING (type)
10109 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10111 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10112 if (tem)
10113 return tem;
10116 goto associate;
10118 case MULT_EXPR:
10119 /* (-A) * (-B) -> A * B */
10120 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10121 return fold_build2 (MULT_EXPR, type,
10122 fold_convert (type, TREE_OPERAND (arg0, 0)),
10123 fold_convert (type, negate_expr (arg1)));
10124 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10125 return fold_build2 (MULT_EXPR, type,
10126 fold_convert (type, negate_expr (arg0)),
10127 fold_convert (type, TREE_OPERAND (arg1, 0)));
10129 if (! FLOAT_TYPE_P (type))
10131 if (integer_zerop (arg1))
10132 return omit_one_operand (type, arg1, arg0);
10133 if (integer_onep (arg1))
10134 return non_lvalue (fold_convert (type, arg0));
10135 /* Transform x * -1 into -x. Make sure to do the negation
10136 on the original operand with conversions not stripped
10137 because we can only strip non-sign-changing conversions. */
10138 if (integer_all_onesp (arg1))
10139 return fold_convert (type, negate_expr (op0));
10140 /* Transform x * -C into -x * C if x is easily negatable. */
10141 if (TREE_CODE (arg1) == INTEGER_CST
10142 && tree_int_cst_sgn (arg1) == -1
10143 && negate_expr_p (arg0)
10144 && (tem = negate_expr (arg1)) != arg1
10145 && !TREE_OVERFLOW (tem))
10146 return fold_build2 (MULT_EXPR, type,
10147 fold_convert (type, negate_expr (arg0)), tem);
10149 /* (a * (1 << b)) is (a << b) */
10150 if (TREE_CODE (arg1) == LSHIFT_EXPR
10151 && integer_onep (TREE_OPERAND (arg1, 0)))
10152 return fold_build2 (LSHIFT_EXPR, type, op0,
10153 TREE_OPERAND (arg1, 1));
10154 if (TREE_CODE (arg0) == LSHIFT_EXPR
10155 && integer_onep (TREE_OPERAND (arg0, 0)))
10156 return fold_build2 (LSHIFT_EXPR, type, op1,
10157 TREE_OPERAND (arg0, 1));
10159 /* (A + A) * C -> A * 2 * C */
10160 if (TREE_CODE (arg0) == PLUS_EXPR
10161 && TREE_CODE (arg1) == INTEGER_CST
10162 && operand_equal_p (TREE_OPERAND (arg0, 0),
10163 TREE_OPERAND (arg0, 1), 0))
10164 return fold_build2 (MULT_EXPR, type,
10165 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10166 TREE_OPERAND (arg0, 1)),
10167 fold_build2 (MULT_EXPR, type,
10168 build_int_cst (type, 2) , arg1));
10170 strict_overflow_p = false;
10171 if (TREE_CODE (arg1) == INTEGER_CST
10172 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10173 &strict_overflow_p)))
10175 if (strict_overflow_p)
10176 fold_overflow_warning (("assuming signed overflow does not "
10177 "occur when simplifying "
10178 "multiplication"),
10179 WARN_STRICT_OVERFLOW_MISC);
10180 return fold_convert (type, tem);
10183 /* Optimize z * conj(z) for integer complex numbers. */
10184 if (TREE_CODE (arg0) == CONJ_EXPR
10185 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10186 return fold_mult_zconjz (type, arg1);
10187 if (TREE_CODE (arg1) == CONJ_EXPR
10188 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10189 return fold_mult_zconjz (type, arg0);
10191 else
10193 /* Maybe fold x * 0 to 0. The expressions aren't the same
10194 when x is NaN, since x * 0 is also NaN. Nor are they the
10195 same in modes with signed zeros, since multiplying a
10196 negative value by 0 gives -0, not +0. */
10197 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10198 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10199 && real_zerop (arg1))
10200 return omit_one_operand (type, arg1, arg0);
10201 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10202 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10203 && real_onep (arg1))
10204 return non_lvalue (fold_convert (type, arg0));
10206 /* Transform x * -1.0 into -x. */
10207 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10208 && real_minus_onep (arg1))
10209 return fold_convert (type, negate_expr (arg0));
10211 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10212 the result for floating point types due to rounding so it is applied
10213 only if -fassociative-math was specify. */
10214 if (flag_associative_math
10215 && TREE_CODE (arg0) == RDIV_EXPR
10216 && TREE_CODE (arg1) == REAL_CST
10217 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10219 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10220 arg1, 0);
10221 if (tem)
10222 return fold_build2 (RDIV_EXPR, type, tem,
10223 TREE_OPERAND (arg0, 1));
10226 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10227 if (operand_equal_p (arg0, arg1, 0))
10229 tree tem = fold_strip_sign_ops (arg0);
10230 if (tem != NULL_TREE)
10232 tem = fold_convert (type, tem);
10233 return fold_build2 (MULT_EXPR, type, tem, tem);
10237 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10238 This is not the same for NaNs or if signed zeros are
10239 involved. */
10240 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10241 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10242 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10243 && TREE_CODE (arg1) == COMPLEX_CST
10244 && real_zerop (TREE_REALPART (arg1)))
10246 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10247 if (real_onep (TREE_IMAGPART (arg1)))
10248 return fold_build2 (COMPLEX_EXPR, type,
10249 negate_expr (fold_build1 (IMAGPART_EXPR,
10250 rtype, arg0)),
10251 fold_build1 (REALPART_EXPR, rtype, arg0));
10252 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10253 return fold_build2 (COMPLEX_EXPR, type,
10254 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10255 negate_expr (fold_build1 (REALPART_EXPR,
10256 rtype, arg0)));
10259 /* Optimize z * conj(z) for floating point complex numbers.
10260 Guarded by flag_unsafe_math_optimizations as non-finite
10261 imaginary components don't produce scalar results. */
10262 if (flag_unsafe_math_optimizations
10263 && TREE_CODE (arg0) == CONJ_EXPR
10264 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10265 return fold_mult_zconjz (type, arg1);
10266 if (flag_unsafe_math_optimizations
10267 && TREE_CODE (arg1) == CONJ_EXPR
10268 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10269 return fold_mult_zconjz (type, arg0);
10271 if (flag_unsafe_math_optimizations)
10273 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10274 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10276 /* Optimizations of root(...)*root(...). */
10277 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10279 tree rootfn, arg;
10280 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10281 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10283 /* Optimize sqrt(x)*sqrt(x) as x. */
10284 if (BUILTIN_SQRT_P (fcode0)
10285 && operand_equal_p (arg00, arg10, 0)
10286 && ! HONOR_SNANS (TYPE_MODE (type)))
10287 return arg00;
10289 /* Optimize root(x)*root(y) as root(x*y). */
10290 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10291 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10292 return build_call_expr (rootfn, 1, arg);
10295 /* Optimize expN(x)*expN(y) as expN(x+y). */
10296 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10298 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10299 tree arg = fold_build2 (PLUS_EXPR, type,
10300 CALL_EXPR_ARG (arg0, 0),
10301 CALL_EXPR_ARG (arg1, 0));
10302 return build_call_expr (expfn, 1, arg);
10305 /* Optimizations of pow(...)*pow(...). */
10306 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10307 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10308 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10310 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10311 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10312 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10313 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10315 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10316 if (operand_equal_p (arg01, arg11, 0))
10318 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10319 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10320 return build_call_expr (powfn, 2, arg, arg01);
10323 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10324 if (operand_equal_p (arg00, arg10, 0))
10326 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10327 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10328 return build_call_expr (powfn, 2, arg00, arg);
10332 /* Optimize tan(x)*cos(x) as sin(x). */
10333 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10334 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10335 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10336 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10337 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10338 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10339 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10340 CALL_EXPR_ARG (arg1, 0), 0))
10342 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10344 if (sinfn != NULL_TREE)
10345 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10348 /* Optimize x*pow(x,c) as pow(x,c+1). */
10349 if (fcode1 == BUILT_IN_POW
10350 || fcode1 == BUILT_IN_POWF
10351 || fcode1 == BUILT_IN_POWL)
10353 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10354 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10355 if (TREE_CODE (arg11) == REAL_CST
10356 && !TREE_OVERFLOW (arg11)
10357 && operand_equal_p (arg0, arg10, 0))
10359 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10360 REAL_VALUE_TYPE c;
10361 tree arg;
10363 c = TREE_REAL_CST (arg11);
10364 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10365 arg = build_real (type, c);
10366 return build_call_expr (powfn, 2, arg0, arg);
10370 /* Optimize pow(x,c)*x as pow(x,c+1). */
10371 if (fcode0 == BUILT_IN_POW
10372 || fcode0 == BUILT_IN_POWF
10373 || fcode0 == BUILT_IN_POWL)
10375 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10376 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10377 if (TREE_CODE (arg01) == REAL_CST
10378 && !TREE_OVERFLOW (arg01)
10379 && operand_equal_p (arg1, arg00, 0))
10381 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10382 REAL_VALUE_TYPE c;
10383 tree arg;
10385 c = TREE_REAL_CST (arg01);
10386 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10387 arg = build_real (type, c);
10388 return build_call_expr (powfn, 2, arg1, arg);
10392 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10393 if (! optimize_size
10394 && operand_equal_p (arg0, arg1, 0))
10396 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10398 if (powfn)
10400 tree arg = build_real (type, dconst2);
10401 return build_call_expr (powfn, 2, arg0, arg);
10406 goto associate;
10408 case BIT_IOR_EXPR:
10409 bit_ior:
10410 if (integer_all_onesp (arg1))
10411 return omit_one_operand (type, arg1, arg0);
10412 if (integer_zerop (arg1))
10413 return non_lvalue (fold_convert (type, arg0));
10414 if (operand_equal_p (arg0, arg1, 0))
10415 return non_lvalue (fold_convert (type, arg0));
10417 /* ~X | X is -1. */
10418 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10419 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10421 t1 = fold_convert (type, integer_zero_node);
10422 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10423 return omit_one_operand (type, t1, arg1);
10426 /* X | ~X is -1. */
10427 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10428 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10430 t1 = fold_convert (type, integer_zero_node);
10431 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10432 return omit_one_operand (type, t1, arg0);
10435 /* Canonicalize (X & C1) | C2. */
10436 if (TREE_CODE (arg0) == BIT_AND_EXPR
10437 && TREE_CODE (arg1) == INTEGER_CST
10438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10440 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10441 int width = TYPE_PRECISION (type), w;
10442 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10443 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10444 hi2 = TREE_INT_CST_HIGH (arg1);
10445 lo2 = TREE_INT_CST_LOW (arg1);
10447 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10448 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10449 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10451 if (width > HOST_BITS_PER_WIDE_INT)
10453 mhi = (unsigned HOST_WIDE_INT) -1
10454 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10455 mlo = -1;
10457 else
10459 mhi = 0;
10460 mlo = (unsigned HOST_WIDE_INT) -1
10461 >> (HOST_BITS_PER_WIDE_INT - width);
10464 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10465 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10466 return fold_build2 (BIT_IOR_EXPR, type,
10467 TREE_OPERAND (arg0, 0), arg1);
10469 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10470 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10471 mode which allows further optimizations. */
10472 hi1 &= mhi;
10473 lo1 &= mlo;
10474 hi2 &= mhi;
10475 lo2 &= mlo;
10476 hi3 = hi1 & ~hi2;
10477 lo3 = lo1 & ~lo2;
10478 for (w = BITS_PER_UNIT;
10479 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10480 w <<= 1)
10482 unsigned HOST_WIDE_INT mask
10483 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10484 if (((lo1 | lo2) & mask) == mask
10485 && (lo1 & ~mask) == 0 && hi1 == 0)
10487 hi3 = 0;
10488 lo3 = mask;
10489 break;
10492 if (hi3 != hi1 || lo3 != lo1)
10493 return fold_build2 (BIT_IOR_EXPR, type,
10494 fold_build2 (BIT_AND_EXPR, type,
10495 TREE_OPERAND (arg0, 0),
10496 build_int_cst_wide (type,
10497 lo3, hi3)),
10498 arg1);
10501 /* (X & Y) | Y is (X, Y). */
10502 if (TREE_CODE (arg0) == BIT_AND_EXPR
10503 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10504 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10505 /* (X & Y) | X is (Y, X). */
10506 if (TREE_CODE (arg0) == BIT_AND_EXPR
10507 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10508 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10509 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10510 /* X | (X & Y) is (Y, X). */
10511 if (TREE_CODE (arg1) == BIT_AND_EXPR
10512 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10513 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10514 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10515 /* X | (Y & X) is (Y, X). */
10516 if (TREE_CODE (arg1) == BIT_AND_EXPR
10517 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10518 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10519 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10521 t1 = distribute_bit_expr (code, type, arg0, arg1);
10522 if (t1 != NULL_TREE)
10523 return t1;
10525 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10527 This results in more efficient code for machines without a NAND
10528 instruction. Combine will canonicalize to the first form
10529 which will allow use of NAND instructions provided by the
10530 backend if they exist. */
10531 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10532 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10534 return fold_build1 (BIT_NOT_EXPR, type,
10535 build2 (BIT_AND_EXPR, type,
10536 fold_convert (type,
10537 TREE_OPERAND (arg0, 0)),
10538 fold_convert (type,
10539 TREE_OPERAND (arg1, 0))));
10542 /* See if this can be simplified into a rotate first. If that
10543 is unsuccessful continue in the association code. */
10544 goto bit_rotate;
10546 case BIT_XOR_EXPR:
10547 if (integer_zerop (arg1))
10548 return non_lvalue (fold_convert (type, arg0));
10549 if (integer_all_onesp (arg1))
10550 return fold_build1 (BIT_NOT_EXPR, type, op0);
10551 if (operand_equal_p (arg0, arg1, 0))
10552 return omit_one_operand (type, integer_zero_node, arg0);
10554 /* ~X ^ X is -1. */
10555 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10556 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10558 t1 = fold_convert (type, integer_zero_node);
10559 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10560 return omit_one_operand (type, t1, arg1);
10563 /* X ^ ~X is -1. */
10564 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10565 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10567 t1 = fold_convert (type, integer_zero_node);
10568 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10569 return omit_one_operand (type, t1, arg0);
10572 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10573 with a constant, and the two constants have no bits in common,
10574 we should treat this as a BIT_IOR_EXPR since this may produce more
10575 simplifications. */
10576 if (TREE_CODE (arg0) == BIT_AND_EXPR
10577 && TREE_CODE (arg1) == BIT_AND_EXPR
10578 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10579 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10580 && integer_zerop (const_binop (BIT_AND_EXPR,
10581 TREE_OPERAND (arg0, 1),
10582 TREE_OPERAND (arg1, 1), 0)))
10584 code = BIT_IOR_EXPR;
10585 goto bit_ior;
10588 /* (X | Y) ^ X -> Y & ~ X*/
10589 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10590 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10592 tree t2 = TREE_OPERAND (arg0, 1);
10593 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10594 arg1);
10595 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10596 fold_convert (type, t1));
10597 return t1;
10600 /* (Y | X) ^ X -> Y & ~ X*/
10601 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10602 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10604 tree t2 = TREE_OPERAND (arg0, 0);
10605 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10606 arg1);
10607 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10608 fold_convert (type, t1));
10609 return t1;
10612 /* X ^ (X | Y) -> Y & ~ X*/
10613 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10614 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10616 tree t2 = TREE_OPERAND (arg1, 1);
10617 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10618 arg0);
10619 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10620 fold_convert (type, t1));
10621 return t1;
10624 /* X ^ (Y | X) -> Y & ~ X*/
10625 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10626 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10628 tree t2 = TREE_OPERAND (arg1, 0);
10629 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10630 arg0);
10631 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10632 fold_convert (type, t1));
10633 return t1;
10636 /* Convert ~X ^ ~Y to X ^ Y. */
10637 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10638 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10639 return fold_build2 (code, type,
10640 fold_convert (type, TREE_OPERAND (arg0, 0)),
10641 fold_convert (type, TREE_OPERAND (arg1, 0)));
10643 /* Convert ~X ^ C to X ^ ~C. */
10644 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10645 && TREE_CODE (arg1) == INTEGER_CST)
10646 return fold_build2 (code, type,
10647 fold_convert (type, TREE_OPERAND (arg0, 0)),
10648 fold_build1 (BIT_NOT_EXPR, type, arg1));
10650 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10651 if (TREE_CODE (arg0) == BIT_AND_EXPR
10652 && integer_onep (TREE_OPERAND (arg0, 1))
10653 && integer_onep (arg1))
10654 return fold_build2 (EQ_EXPR, type, arg0,
10655 build_int_cst (TREE_TYPE (arg0), 0));
10657 /* Fold (X & Y) ^ Y as ~X & Y. */
10658 if (TREE_CODE (arg0) == BIT_AND_EXPR
10659 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10661 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10662 return fold_build2 (BIT_AND_EXPR, type,
10663 fold_build1 (BIT_NOT_EXPR, type, tem),
10664 fold_convert (type, arg1));
10666 /* Fold (X & Y) ^ X as ~Y & X. */
10667 if (TREE_CODE (arg0) == BIT_AND_EXPR
10668 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10669 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10671 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10672 return fold_build2 (BIT_AND_EXPR, type,
10673 fold_build1 (BIT_NOT_EXPR, type, tem),
10674 fold_convert (type, arg1));
10676 /* Fold X ^ (X & Y) as X & ~Y. */
10677 if (TREE_CODE (arg1) == BIT_AND_EXPR
10678 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10680 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10681 return fold_build2 (BIT_AND_EXPR, type,
10682 fold_convert (type, arg0),
10683 fold_build1 (BIT_NOT_EXPR, type, tem));
10685 /* Fold X ^ (Y & X) as ~Y & X. */
10686 if (TREE_CODE (arg1) == BIT_AND_EXPR
10687 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10688 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10690 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10691 return fold_build2 (BIT_AND_EXPR, type,
10692 fold_build1 (BIT_NOT_EXPR, type, tem),
10693 fold_convert (type, arg0));
10696 /* See if this can be simplified into a rotate first. If that
10697 is unsuccessful continue in the association code. */
10698 goto bit_rotate;
10700 case BIT_AND_EXPR:
10701 if (integer_all_onesp (arg1))
10702 return non_lvalue (fold_convert (type, arg0));
10703 if (integer_zerop (arg1))
10704 return omit_one_operand (type, arg1, arg0);
10705 if (operand_equal_p (arg0, arg1, 0))
10706 return non_lvalue (fold_convert (type, arg0));
10708 /* ~X & X is always zero. */
10709 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10710 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10711 return omit_one_operand (type, integer_zero_node, arg1);
10713 /* X & ~X is always zero. */
10714 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10716 return omit_one_operand (type, integer_zero_node, arg0);
10718 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10719 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10720 && TREE_CODE (arg1) == INTEGER_CST
10721 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10723 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10724 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10725 TREE_OPERAND (arg0, 0), tmp1);
10726 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10727 TREE_OPERAND (arg0, 1), tmp1);
10728 return fold_convert (type,
10729 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10730 tmp2, tmp3));
10733 /* (X | Y) & Y is (X, Y). */
10734 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10735 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10736 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10737 /* (X | Y) & X is (Y, X). */
10738 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10739 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10740 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10741 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10742 /* X & (X | Y) is (Y, X). */
10743 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10744 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10745 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10746 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10747 /* X & (Y | X) is (Y, X). */
10748 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10749 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10750 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10751 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10753 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10754 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10755 && integer_onep (TREE_OPERAND (arg0, 1))
10756 && integer_onep (arg1))
10758 tem = TREE_OPERAND (arg0, 0);
10759 return fold_build2 (EQ_EXPR, type,
10760 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10761 build_int_cst (TREE_TYPE (tem), 1)),
10762 build_int_cst (TREE_TYPE (tem), 0));
10764 /* Fold ~X & 1 as (X & 1) == 0. */
10765 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10766 && integer_onep (arg1))
10768 tem = TREE_OPERAND (arg0, 0);
10769 return fold_build2 (EQ_EXPR, type,
10770 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10771 build_int_cst (TREE_TYPE (tem), 1)),
10772 build_int_cst (TREE_TYPE (tem), 0));
10775 /* Fold (X ^ Y) & Y as ~X & Y. */
10776 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10777 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10779 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10780 return fold_build2 (BIT_AND_EXPR, type,
10781 fold_build1 (BIT_NOT_EXPR, type, tem),
10782 fold_convert (type, arg1));
10784 /* Fold (X ^ Y) & X as ~Y & X. */
10785 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10786 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10787 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10789 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10790 return fold_build2 (BIT_AND_EXPR, type,
10791 fold_build1 (BIT_NOT_EXPR, type, tem),
10792 fold_convert (type, arg1));
10794 /* Fold X & (X ^ Y) as X & ~Y. */
10795 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10796 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10798 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10799 return fold_build2 (BIT_AND_EXPR, type,
10800 fold_convert (type, arg0),
10801 fold_build1 (BIT_NOT_EXPR, type, tem));
10803 /* Fold X & (Y ^ X) as ~Y & X. */
10804 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10805 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10806 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10808 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10809 return fold_build2 (BIT_AND_EXPR, type,
10810 fold_build1 (BIT_NOT_EXPR, type, tem),
10811 fold_convert (type, arg0));
10814 t1 = distribute_bit_expr (code, type, arg0, arg1);
10815 if (t1 != NULL_TREE)
10816 return t1;
10817 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10818 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10819 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10821 unsigned int prec
10822 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10824 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10825 && (~TREE_INT_CST_LOW (arg1)
10826 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10827 return fold_convert (type, TREE_OPERAND (arg0, 0));
10830 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10832 This results in more efficient code for machines without a NOR
10833 instruction. Combine will canonicalize to the first form
10834 which will allow use of NOR instructions provided by the
10835 backend if they exist. */
10836 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10837 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10839 return fold_build1 (BIT_NOT_EXPR, type,
10840 build2 (BIT_IOR_EXPR, type,
10841 fold_convert (type,
10842 TREE_OPERAND (arg0, 0)),
10843 fold_convert (type,
10844 TREE_OPERAND (arg1, 0))));
10847 /* If arg0 is derived from the address of an object or function, we may
10848 be able to fold this expression using the object or function's
10849 alignment. */
10850 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10852 unsigned HOST_WIDE_INT modulus, residue;
10853 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10855 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10857 /* This works because modulus is a power of 2. If this weren't the
10858 case, we'd have to replace it by its greatest power-of-2
10859 divisor: modulus & -modulus. */
10860 if (low < modulus)
10861 return build_int_cst (type, residue & low);
10864 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10865 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10866 if the new mask might be further optimized. */
10867 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10868 || TREE_CODE (arg0) == RSHIFT_EXPR)
10869 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10870 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10871 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10872 < TYPE_PRECISION (TREE_TYPE (arg0))
10873 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10874 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10876 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10877 unsigned HOST_WIDE_INT mask
10878 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10879 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10880 tree shift_type = TREE_TYPE (arg0);
10882 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10883 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10884 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10885 && TYPE_PRECISION (TREE_TYPE (arg0))
10886 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10888 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10889 tree arg00 = TREE_OPERAND (arg0, 0);
10890 /* See if more bits can be proven as zero because of
10891 zero extension. */
10892 if (TREE_CODE (arg00) == NOP_EXPR
10893 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10895 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10896 if (TYPE_PRECISION (inner_type)
10897 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10898 && TYPE_PRECISION (inner_type) < prec)
10900 prec = TYPE_PRECISION (inner_type);
10901 /* See if we can shorten the right shift. */
10902 if (shiftc < prec)
10903 shift_type = inner_type;
10906 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10907 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10908 zerobits <<= prec - shiftc;
10909 /* For arithmetic shift if sign bit could be set, zerobits
10910 can contain actually sign bits, so no transformation is
10911 possible, unless MASK masks them all away. In that
10912 case the shift needs to be converted into logical shift. */
10913 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10914 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10916 if ((mask & zerobits) == 0)
10917 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10918 else
10919 zerobits = 0;
10923 /* ((X << 16) & 0xff00) is (X, 0). */
10924 if ((mask & zerobits) == mask)
10925 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10927 newmask = mask | zerobits;
10928 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10930 unsigned int prec;
10932 /* Only do the transformation if NEWMASK is some integer
10933 mode's mask. */
10934 for (prec = BITS_PER_UNIT;
10935 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10936 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10937 break;
10938 if (prec < HOST_BITS_PER_WIDE_INT
10939 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10941 if (shift_type != TREE_TYPE (arg0))
10943 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10944 fold_convert (shift_type,
10945 TREE_OPERAND (arg0, 0)),
10946 TREE_OPERAND (arg0, 1));
10947 tem = fold_convert (type, tem);
10949 else
10950 tem = op0;
10951 return fold_build2 (BIT_AND_EXPR, type, tem,
10952 build_int_cst_type (TREE_TYPE (op1),
10953 newmask));
10958 goto associate;
10960 case RDIV_EXPR:
10961 /* Don't touch a floating-point divide by zero unless the mode
10962 of the constant can represent infinity. */
10963 if (TREE_CODE (arg1) == REAL_CST
10964 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10965 && real_zerop (arg1))
10966 return NULL_TREE;
10968 /* Optimize A / A to 1.0 if we don't care about
10969 NaNs or Infinities. Skip the transformation
10970 for non-real operands. */
10971 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10972 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10973 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10974 && operand_equal_p (arg0, arg1, 0))
10976 tree r = build_real (TREE_TYPE (arg0), dconst1);
10978 return omit_two_operands (type, r, arg0, arg1);
10981 /* The complex version of the above A / A optimization. */
10982 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10983 && operand_equal_p (arg0, arg1, 0))
10985 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10986 if (! HONOR_NANS (TYPE_MODE (elem_type))
10987 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10989 tree r = build_real (elem_type, dconst1);
10990 /* omit_two_operands will call fold_convert for us. */
10991 return omit_two_operands (type, r, arg0, arg1);
10995 /* (-A) / (-B) -> A / B */
10996 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10997 return fold_build2 (RDIV_EXPR, type,
10998 TREE_OPERAND (arg0, 0),
10999 negate_expr (arg1));
11000 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11001 return fold_build2 (RDIV_EXPR, type,
11002 negate_expr (arg0),
11003 TREE_OPERAND (arg1, 0));
11005 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11006 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11007 && real_onep (arg1))
11008 return non_lvalue (fold_convert (type, arg0));
11010 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11011 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11012 && real_minus_onep (arg1))
11013 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11015 /* If ARG1 is a constant, we can convert this to a multiply by the
11016 reciprocal. This does not have the same rounding properties,
11017 so only do this if -freciprocal-math. We can actually
11018 always safely do it if ARG1 is a power of two, but it's hard to
11019 tell if it is or not in a portable manner. */
11020 if (TREE_CODE (arg1) == REAL_CST)
11022 if (flag_reciprocal_math
11023 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11024 arg1, 0)))
11025 return fold_build2 (MULT_EXPR, type, arg0, tem);
11026 /* Find the reciprocal if optimizing and the result is exact. */
11027 if (optimize)
11029 REAL_VALUE_TYPE r;
11030 r = TREE_REAL_CST (arg1);
11031 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11033 tem = build_real (type, r);
11034 return fold_build2 (MULT_EXPR, type,
11035 fold_convert (type, arg0), tem);
11039 /* Convert A/B/C to A/(B*C). */
11040 if (flag_reciprocal_math
11041 && TREE_CODE (arg0) == RDIV_EXPR)
11042 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11043 fold_build2 (MULT_EXPR, type,
11044 TREE_OPERAND (arg0, 1), arg1));
11046 /* Convert A/(B/C) to (A/B)*C. */
11047 if (flag_reciprocal_math
11048 && TREE_CODE (arg1) == RDIV_EXPR)
11049 return fold_build2 (MULT_EXPR, type,
11050 fold_build2 (RDIV_EXPR, type, arg0,
11051 TREE_OPERAND (arg1, 0)),
11052 TREE_OPERAND (arg1, 1));
11054 /* Convert C1/(X*C2) into (C1/C2)/X. */
11055 if (flag_reciprocal_math
11056 && TREE_CODE (arg1) == MULT_EXPR
11057 && TREE_CODE (arg0) == REAL_CST
11058 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11060 tree tem = const_binop (RDIV_EXPR, arg0,
11061 TREE_OPERAND (arg1, 1), 0);
11062 if (tem)
11063 return fold_build2 (RDIV_EXPR, type, tem,
11064 TREE_OPERAND (arg1, 0));
11067 if (flag_unsafe_math_optimizations)
11069 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11070 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11072 /* Optimize sin(x)/cos(x) as tan(x). */
11073 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11074 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11075 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11076 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11077 CALL_EXPR_ARG (arg1, 0), 0))
11079 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11081 if (tanfn != NULL_TREE)
11082 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11085 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11086 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11087 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11088 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11089 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11090 CALL_EXPR_ARG (arg1, 0), 0))
11092 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11094 if (tanfn != NULL_TREE)
11096 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11097 return fold_build2 (RDIV_EXPR, type,
11098 build_real (type, dconst1), tmp);
11102 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11103 NaNs or Infinities. */
11104 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11105 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11106 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11108 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11109 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11111 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11112 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11113 && operand_equal_p (arg00, arg01, 0))
11115 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11117 if (cosfn != NULL_TREE)
11118 return build_call_expr (cosfn, 1, arg00);
11122 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11123 NaNs or Infinities. */
11124 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11125 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11126 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11128 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11129 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11131 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11132 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11133 && operand_equal_p (arg00, arg01, 0))
11135 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11137 if (cosfn != NULL_TREE)
11139 tree tmp = build_call_expr (cosfn, 1, arg00);
11140 return fold_build2 (RDIV_EXPR, type,
11141 build_real (type, dconst1),
11142 tmp);
11147 /* Optimize pow(x,c)/x as pow(x,c-1). */
11148 if (fcode0 == BUILT_IN_POW
11149 || fcode0 == BUILT_IN_POWF
11150 || fcode0 == BUILT_IN_POWL)
11152 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11153 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11154 if (TREE_CODE (arg01) == REAL_CST
11155 && !TREE_OVERFLOW (arg01)
11156 && operand_equal_p (arg1, arg00, 0))
11158 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11159 REAL_VALUE_TYPE c;
11160 tree arg;
11162 c = TREE_REAL_CST (arg01);
11163 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11164 arg = build_real (type, c);
11165 return build_call_expr (powfn, 2, arg1, arg);
11169 /* Optimize a/root(b/c) into a*root(c/b). */
11170 if (BUILTIN_ROOT_P (fcode1))
11172 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11174 if (TREE_CODE (rootarg) == RDIV_EXPR)
11176 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11177 tree b = TREE_OPERAND (rootarg, 0);
11178 tree c = TREE_OPERAND (rootarg, 1);
11180 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11182 tmp = build_call_expr (rootfn, 1, tmp);
11183 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11187 /* Optimize x/expN(y) into x*expN(-y). */
11188 if (BUILTIN_EXPONENT_P (fcode1))
11190 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11191 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11192 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11193 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11196 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11197 if (fcode1 == BUILT_IN_POW
11198 || fcode1 == BUILT_IN_POWF
11199 || fcode1 == BUILT_IN_POWL)
11201 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11202 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11203 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11204 tree neg11 = fold_convert (type, negate_expr (arg11));
11205 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11206 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11209 return NULL_TREE;
11211 case TRUNC_DIV_EXPR:
11212 case FLOOR_DIV_EXPR:
11213 /* Simplify A / (B << N) where A and B are positive and B is
11214 a power of 2, to A >> (N + log2(B)). */
11215 strict_overflow_p = false;
11216 if (TREE_CODE (arg1) == LSHIFT_EXPR
11217 && (TYPE_UNSIGNED (type)
11218 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11220 tree sval = TREE_OPERAND (arg1, 0);
11221 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11223 tree sh_cnt = TREE_OPERAND (arg1, 1);
11224 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11226 if (strict_overflow_p)
11227 fold_overflow_warning (("assuming signed overflow does not "
11228 "occur when simplifying A / (B << N)"),
11229 WARN_STRICT_OVERFLOW_MISC);
11231 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11232 sh_cnt, build_int_cst (NULL_TREE, pow2));
11233 return fold_build2 (RSHIFT_EXPR, type,
11234 fold_convert (type, arg0), sh_cnt);
11238 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11239 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11240 if (INTEGRAL_TYPE_P (type)
11241 && TYPE_UNSIGNED (type)
11242 && code == FLOOR_DIV_EXPR)
11243 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11245 /* Fall thru */
11247 case ROUND_DIV_EXPR:
11248 case CEIL_DIV_EXPR:
11249 case EXACT_DIV_EXPR:
11250 if (integer_onep (arg1))
11251 return non_lvalue (fold_convert (type, arg0));
11252 if (integer_zerop (arg1))
11253 return NULL_TREE;
11254 /* X / -1 is -X. */
11255 if (!TYPE_UNSIGNED (type)
11256 && TREE_CODE (arg1) == INTEGER_CST
11257 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11258 && TREE_INT_CST_HIGH (arg1) == -1)
11259 return fold_convert (type, negate_expr (arg0));
11261 /* Convert -A / -B to A / B when the type is signed and overflow is
11262 undefined. */
11263 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11264 && TREE_CODE (arg0) == NEGATE_EXPR
11265 && negate_expr_p (arg1))
11267 if (INTEGRAL_TYPE_P (type))
11268 fold_overflow_warning (("assuming signed overflow does not occur "
11269 "when distributing negation across "
11270 "division"),
11271 WARN_STRICT_OVERFLOW_MISC);
11272 return fold_build2 (code, type,
11273 fold_convert (type, TREE_OPERAND (arg0, 0)),
11274 negate_expr (arg1));
11276 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11277 && TREE_CODE (arg1) == NEGATE_EXPR
11278 && negate_expr_p (arg0))
11280 if (INTEGRAL_TYPE_P (type))
11281 fold_overflow_warning (("assuming signed overflow does not occur "
11282 "when distributing negation across "
11283 "division"),
11284 WARN_STRICT_OVERFLOW_MISC);
11285 return fold_build2 (code, type, negate_expr (arg0),
11286 TREE_OPERAND (arg1, 0));
11289 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11290 operation, EXACT_DIV_EXPR.
11292 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11293 At one time others generated faster code, it's not clear if they do
11294 after the last round to changes to the DIV code in expmed.c. */
11295 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11296 && multiple_of_p (type, arg0, arg1))
11297 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11299 strict_overflow_p = false;
11300 if (TREE_CODE (arg1) == INTEGER_CST
11301 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11302 &strict_overflow_p)))
11304 if (strict_overflow_p)
11305 fold_overflow_warning (("assuming signed overflow does not occur "
11306 "when simplifying division"),
11307 WARN_STRICT_OVERFLOW_MISC);
11308 return fold_convert (type, tem);
11311 return NULL_TREE;
11313 case CEIL_MOD_EXPR:
11314 case FLOOR_MOD_EXPR:
11315 case ROUND_MOD_EXPR:
11316 case TRUNC_MOD_EXPR:
11317 /* X % 1 is always zero, but be sure to preserve any side
11318 effects in X. */
11319 if (integer_onep (arg1))
11320 return omit_one_operand (type, integer_zero_node, arg0);
11322 /* X % 0, return X % 0 unchanged so that we can get the
11323 proper warnings and errors. */
11324 if (integer_zerop (arg1))
11325 return NULL_TREE;
11327 /* 0 % X is always zero, but be sure to preserve any side
11328 effects in X. Place this after checking for X == 0. */
11329 if (integer_zerop (arg0))
11330 return omit_one_operand (type, integer_zero_node, arg1);
11332 /* X % -1 is zero. */
11333 if (!TYPE_UNSIGNED (type)
11334 && TREE_CODE (arg1) == INTEGER_CST
11335 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11336 && TREE_INT_CST_HIGH (arg1) == -1)
11337 return omit_one_operand (type, integer_zero_node, arg0);
11339 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11340 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11341 strict_overflow_p = false;
11342 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11343 && (TYPE_UNSIGNED (type)
11344 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11346 tree c = arg1;
11347 /* Also optimize A % (C << N) where C is a power of 2,
11348 to A & ((C << N) - 1). */
11349 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11350 c = TREE_OPERAND (arg1, 0);
11352 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11354 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11355 build_int_cst (TREE_TYPE (arg1), 1));
11356 if (strict_overflow_p)
11357 fold_overflow_warning (("assuming signed overflow does not "
11358 "occur when simplifying "
11359 "X % (power of two)"),
11360 WARN_STRICT_OVERFLOW_MISC);
11361 return fold_build2 (BIT_AND_EXPR, type,
11362 fold_convert (type, arg0),
11363 fold_convert (type, mask));
11367 /* X % -C is the same as X % C. */
11368 if (code == TRUNC_MOD_EXPR
11369 && !TYPE_UNSIGNED (type)
11370 && TREE_CODE (arg1) == INTEGER_CST
11371 && !TREE_OVERFLOW (arg1)
11372 && TREE_INT_CST_HIGH (arg1) < 0
11373 && !TYPE_OVERFLOW_TRAPS (type)
11374 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11375 && !sign_bit_p (arg1, arg1))
11376 return fold_build2 (code, type, fold_convert (type, arg0),
11377 fold_convert (type, negate_expr (arg1)));
11379 /* X % -Y is the same as X % Y. */
11380 if (code == TRUNC_MOD_EXPR
11381 && !TYPE_UNSIGNED (type)
11382 && TREE_CODE (arg1) == NEGATE_EXPR
11383 && !TYPE_OVERFLOW_TRAPS (type))
11384 return fold_build2 (code, type, fold_convert (type, arg0),
11385 fold_convert (type, TREE_OPERAND (arg1, 0)));
11387 if (TREE_CODE (arg1) == INTEGER_CST
11388 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11389 &strict_overflow_p)))
11391 if (strict_overflow_p)
11392 fold_overflow_warning (("assuming signed overflow does not occur "
11393 "when simplifying modulus"),
11394 WARN_STRICT_OVERFLOW_MISC);
11395 return fold_convert (type, tem);
11398 return NULL_TREE;
11400 case LROTATE_EXPR:
11401 case RROTATE_EXPR:
11402 if (integer_all_onesp (arg0))
11403 return omit_one_operand (type, arg0, arg1);
11404 goto shift;
11406 case RSHIFT_EXPR:
11407 /* Optimize -1 >> x for arithmetic right shifts. */
11408 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11409 return omit_one_operand (type, arg0, arg1);
11410 /* ... fall through ... */
11412 case LSHIFT_EXPR:
11413 shift:
11414 if (integer_zerop (arg1))
11415 return non_lvalue (fold_convert (type, arg0));
11416 if (integer_zerop (arg0))
11417 return omit_one_operand (type, arg0, arg1);
11419 /* Since negative shift count is not well-defined,
11420 don't try to compute it in the compiler. */
11421 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11422 return NULL_TREE;
11424 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11425 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11426 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11427 && host_integerp (TREE_OPERAND (arg0, 1), false)
11428 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11430 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11431 + TREE_INT_CST_LOW (arg1));
11433 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11434 being well defined. */
11435 if (low >= TYPE_PRECISION (type))
11437 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11438 low = low % TYPE_PRECISION (type);
11439 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11440 return build_int_cst (type, 0);
11441 else
11442 low = TYPE_PRECISION (type) - 1;
11445 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11446 build_int_cst (type, low));
11449 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11450 into x & ((unsigned)-1 >> c) for unsigned types. */
11451 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11452 || (TYPE_UNSIGNED (type)
11453 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11454 && host_integerp (arg1, false)
11455 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11456 && host_integerp (TREE_OPERAND (arg0, 1), false)
11457 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11459 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11460 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11461 tree lshift;
11462 tree arg00;
11464 if (low0 == low1)
11466 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11468 lshift = build_int_cst (type, -1);
11469 lshift = int_const_binop (code, lshift, arg1, 0);
11471 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11475 /* Rewrite an LROTATE_EXPR by a constant into an
11476 RROTATE_EXPR by a new constant. */
11477 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11479 tree tem = build_int_cst (TREE_TYPE (arg1),
11480 TYPE_PRECISION (type));
11481 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11482 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11485 /* If we have a rotate of a bit operation with the rotate count and
11486 the second operand of the bit operation both constant,
11487 permute the two operations. */
11488 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11489 && (TREE_CODE (arg0) == BIT_AND_EXPR
11490 || TREE_CODE (arg0) == BIT_IOR_EXPR
11491 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11492 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11493 return fold_build2 (TREE_CODE (arg0), type,
11494 fold_build2 (code, type,
11495 TREE_OPERAND (arg0, 0), arg1),
11496 fold_build2 (code, type,
11497 TREE_OPERAND (arg0, 1), arg1));
11499 /* Two consecutive rotates adding up to the precision of the
11500 type can be ignored. */
11501 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11502 && TREE_CODE (arg0) == RROTATE_EXPR
11503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11504 && TREE_INT_CST_HIGH (arg1) == 0
11505 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11506 && ((TREE_INT_CST_LOW (arg1)
11507 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11508 == (unsigned int) TYPE_PRECISION (type)))
11509 return TREE_OPERAND (arg0, 0);
11511 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11512 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11513 if the latter can be further optimized. */
11514 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11515 && TREE_CODE (arg0) == BIT_AND_EXPR
11516 && TREE_CODE (arg1) == INTEGER_CST
11517 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11519 tree mask = fold_build2 (code, type,
11520 fold_convert (type, TREE_OPERAND (arg0, 1)),
11521 arg1);
11522 tree shift = fold_build2 (code, type,
11523 fold_convert (type, TREE_OPERAND (arg0, 0)),
11524 arg1);
11525 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11526 if (tem)
11527 return tem;
11530 return NULL_TREE;
11532 case MIN_EXPR:
11533 if (operand_equal_p (arg0, arg1, 0))
11534 return omit_one_operand (type, arg0, arg1);
11535 if (INTEGRAL_TYPE_P (type)
11536 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11537 return omit_one_operand (type, arg1, arg0);
11538 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11539 if (tem)
11540 return tem;
11541 goto associate;
11543 case MAX_EXPR:
11544 if (operand_equal_p (arg0, arg1, 0))
11545 return omit_one_operand (type, arg0, arg1);
11546 if (INTEGRAL_TYPE_P (type)
11547 && TYPE_MAX_VALUE (type)
11548 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11549 return omit_one_operand (type, arg1, arg0);
11550 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11551 if (tem)
11552 return tem;
11553 goto associate;
11555 case TRUTH_ANDIF_EXPR:
11556 /* Note that the operands of this must be ints
11557 and their values must be 0 or 1.
11558 ("true" is a fixed value perhaps depending on the language.) */
11559 /* If first arg is constant zero, return it. */
11560 if (integer_zerop (arg0))
11561 return fold_convert (type, arg0);
11562 case TRUTH_AND_EXPR:
11563 /* If either arg is constant true, drop it. */
11564 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11565 return non_lvalue (fold_convert (type, arg1));
11566 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11567 /* Preserve sequence points. */
11568 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11569 return non_lvalue (fold_convert (type, arg0));
11570 /* If second arg is constant zero, result is zero, but first arg
11571 must be evaluated. */
11572 if (integer_zerop (arg1))
11573 return omit_one_operand (type, arg1, arg0);
11574 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11575 case will be handled here. */
11576 if (integer_zerop (arg0))
11577 return omit_one_operand (type, arg0, arg1);
11579 /* !X && X is always false. */
11580 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11581 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11582 return omit_one_operand (type, integer_zero_node, arg1);
11583 /* X && !X is always false. */
11584 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11585 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11586 return omit_one_operand (type, integer_zero_node, arg0);
11588 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11589 means A >= Y && A != MAX, but in this case we know that
11590 A < X <= MAX. */
11592 if (!TREE_SIDE_EFFECTS (arg0)
11593 && !TREE_SIDE_EFFECTS (arg1))
11595 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11596 if (tem && !operand_equal_p (tem, arg0, 0))
11597 return fold_build2 (code, type, tem, arg1);
11599 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11600 if (tem && !operand_equal_p (tem, arg1, 0))
11601 return fold_build2 (code, type, arg0, tem);
11604 truth_andor:
11605 /* We only do these simplifications if we are optimizing. */
11606 if (!optimize)
11607 return NULL_TREE;
11609 /* Check for things like (A || B) && (A || C). We can convert this
11610 to A || (B && C). Note that either operator can be any of the four
11611 truth and/or operations and the transformation will still be
11612 valid. Also note that we only care about order for the
11613 ANDIF and ORIF operators. If B contains side effects, this
11614 might change the truth-value of A. */
11615 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11616 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11617 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11618 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11619 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11620 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11622 tree a00 = TREE_OPERAND (arg0, 0);
11623 tree a01 = TREE_OPERAND (arg0, 1);
11624 tree a10 = TREE_OPERAND (arg1, 0);
11625 tree a11 = TREE_OPERAND (arg1, 1);
11626 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11627 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11628 && (code == TRUTH_AND_EXPR
11629 || code == TRUTH_OR_EXPR));
11631 if (operand_equal_p (a00, a10, 0))
11632 return fold_build2 (TREE_CODE (arg0), type, a00,
11633 fold_build2 (code, type, a01, a11));
11634 else if (commutative && operand_equal_p (a00, a11, 0))
11635 return fold_build2 (TREE_CODE (arg0), type, a00,
11636 fold_build2 (code, type, a01, a10));
11637 else if (commutative && operand_equal_p (a01, a10, 0))
11638 return fold_build2 (TREE_CODE (arg0), type, a01,
11639 fold_build2 (code, type, a00, a11));
11641 /* This case if tricky because we must either have commutative
11642 operators or else A10 must not have side-effects. */
11644 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11645 && operand_equal_p (a01, a11, 0))
11646 return fold_build2 (TREE_CODE (arg0), type,
11647 fold_build2 (code, type, a00, a10),
11648 a01);
11651 /* See if we can build a range comparison. */
11652 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11653 return tem;
11655 /* Check for the possibility of merging component references. If our
11656 lhs is another similar operation, try to merge its rhs with our
11657 rhs. Then try to merge our lhs and rhs. */
11658 if (TREE_CODE (arg0) == code
11659 && 0 != (tem = fold_truthop (code, type,
11660 TREE_OPERAND (arg0, 1), arg1)))
11661 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11663 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11664 return tem;
11666 return NULL_TREE;
11668 case TRUTH_ORIF_EXPR:
11669 /* Note that the operands of this must be ints
11670 and their values must be 0 or true.
11671 ("true" is a fixed value perhaps depending on the language.) */
11672 /* If first arg is constant true, return it. */
11673 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11674 return fold_convert (type, arg0);
11675 case TRUTH_OR_EXPR:
11676 /* If either arg is constant zero, drop it. */
11677 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11678 return non_lvalue (fold_convert (type, arg1));
11679 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11680 /* Preserve sequence points. */
11681 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11682 return non_lvalue (fold_convert (type, arg0));
11683 /* If second arg is constant true, result is true, but we must
11684 evaluate first arg. */
11685 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11686 return omit_one_operand (type, arg1, arg0);
11687 /* Likewise for first arg, but note this only occurs here for
11688 TRUTH_OR_EXPR. */
11689 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11690 return omit_one_operand (type, arg0, arg1);
11692 /* !X || X is always true. */
11693 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11694 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11695 return omit_one_operand (type, integer_one_node, arg1);
11696 /* X || !X is always true. */
11697 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11698 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11699 return omit_one_operand (type, integer_one_node, arg0);
11701 goto truth_andor;
11703 case TRUTH_XOR_EXPR:
11704 /* If the second arg is constant zero, drop it. */
11705 if (integer_zerop (arg1))
11706 return non_lvalue (fold_convert (type, arg0));
11707 /* If the second arg is constant true, this is a logical inversion. */
11708 if (integer_onep (arg1))
11710 /* Only call invert_truthvalue if operand is a truth value. */
11711 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11712 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11713 else
11714 tem = invert_truthvalue (arg0);
11715 return non_lvalue (fold_convert (type, tem));
11717 /* Identical arguments cancel to zero. */
11718 if (operand_equal_p (arg0, arg1, 0))
11719 return omit_one_operand (type, integer_zero_node, arg0);
11721 /* !X ^ X is always true. */
11722 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11723 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11724 return omit_one_operand (type, integer_one_node, arg1);
11726 /* X ^ !X is always true. */
11727 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11728 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11729 return omit_one_operand (type, integer_one_node, arg0);
11731 return NULL_TREE;
11733 case EQ_EXPR:
11734 case NE_EXPR:
11735 tem = fold_comparison (code, type, op0, op1);
11736 if (tem != NULL_TREE)
11737 return tem;
11739 /* bool_var != 0 becomes bool_var. */
11740 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11741 && code == NE_EXPR)
11742 return non_lvalue (fold_convert (type, arg0));
11744 /* bool_var == 1 becomes bool_var. */
11745 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11746 && code == EQ_EXPR)
11747 return non_lvalue (fold_convert (type, arg0));
11749 /* bool_var != 1 becomes !bool_var. */
11750 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11751 && code == NE_EXPR)
11752 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11754 /* bool_var == 0 becomes !bool_var. */
11755 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11756 && code == EQ_EXPR)
11757 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11759 /* If this is an equality comparison of the address of two non-weak,
11760 unaliased symbols neither of which are extern (since we do not
11761 have access to attributes for externs), then we know the result. */
11762 if (TREE_CODE (arg0) == ADDR_EXPR
11763 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11764 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11765 && ! lookup_attribute ("alias",
11766 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11767 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11768 && TREE_CODE (arg1) == ADDR_EXPR
11769 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11770 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11771 && ! lookup_attribute ("alias",
11772 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11773 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11775 /* We know that we're looking at the address of two
11776 non-weak, unaliased, static _DECL nodes.
11778 It is both wasteful and incorrect to call operand_equal_p
11779 to compare the two ADDR_EXPR nodes. It is wasteful in that
11780 all we need to do is test pointer equality for the arguments
11781 to the two ADDR_EXPR nodes. It is incorrect to use
11782 operand_equal_p as that function is NOT equivalent to a
11783 C equality test. It can in fact return false for two
11784 objects which would test as equal using the C equality
11785 operator. */
11786 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11787 return constant_boolean_node (equal
11788 ? code == EQ_EXPR : code != EQ_EXPR,
11789 type);
11792 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11793 a MINUS_EXPR of a constant, we can convert it into a comparison with
11794 a revised constant as long as no overflow occurs. */
11795 if (TREE_CODE (arg1) == INTEGER_CST
11796 && (TREE_CODE (arg0) == PLUS_EXPR
11797 || TREE_CODE (arg0) == MINUS_EXPR)
11798 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11799 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11800 ? MINUS_EXPR : PLUS_EXPR,
11801 fold_convert (TREE_TYPE (arg0), arg1),
11802 TREE_OPERAND (arg0, 1), 0))
11803 && !TREE_OVERFLOW (tem))
11804 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11806 /* Similarly for a NEGATE_EXPR. */
11807 if (TREE_CODE (arg0) == NEGATE_EXPR
11808 && TREE_CODE (arg1) == INTEGER_CST
11809 && 0 != (tem = negate_expr (arg1))
11810 && TREE_CODE (tem) == INTEGER_CST
11811 && !TREE_OVERFLOW (tem))
11812 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11814 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11815 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11816 && TREE_CODE (arg1) == INTEGER_CST
11817 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11818 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11819 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11820 fold_convert (TREE_TYPE (arg0), arg1),
11821 TREE_OPERAND (arg0, 1)));
11823 /* Transform comparisons of the form X +- C CMP X. */
11824 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11825 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11826 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11827 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11828 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11830 tree cst = TREE_OPERAND (arg0, 1);
11832 if (code == EQ_EXPR
11833 && !integer_zerop (cst))
11834 return omit_two_operands (type, boolean_false_node,
11835 TREE_OPERAND (arg0, 0), arg1);
11836 else
11837 return omit_two_operands (type, boolean_true_node,
11838 TREE_OPERAND (arg0, 0), arg1);
11841 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11842 for !=. Don't do this for ordered comparisons due to overflow. */
11843 if (TREE_CODE (arg0) == MINUS_EXPR
11844 && integer_zerop (arg1))
11845 return fold_build2 (code, type,
11846 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11848 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11849 if (TREE_CODE (arg0) == ABS_EXPR
11850 && (integer_zerop (arg1) || real_zerop (arg1)))
11851 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11853 /* If this is an EQ or NE comparison with zero and ARG0 is
11854 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11855 two operations, but the latter can be done in one less insn
11856 on machines that have only two-operand insns or on which a
11857 constant cannot be the first operand. */
11858 if (TREE_CODE (arg0) == BIT_AND_EXPR
11859 && integer_zerop (arg1))
11861 tree arg00 = TREE_OPERAND (arg0, 0);
11862 tree arg01 = TREE_OPERAND (arg0, 1);
11863 if (TREE_CODE (arg00) == LSHIFT_EXPR
11864 && integer_onep (TREE_OPERAND (arg00, 0)))
11866 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11867 arg01, TREE_OPERAND (arg00, 1));
11868 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11869 build_int_cst (TREE_TYPE (arg0), 1));
11870 return fold_build2 (code, type,
11871 fold_convert (TREE_TYPE (arg1), tem), arg1);
11873 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11874 && integer_onep (TREE_OPERAND (arg01, 0)))
11876 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11877 arg00, TREE_OPERAND (arg01, 1));
11878 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11879 build_int_cst (TREE_TYPE (arg0), 1));
11880 return fold_build2 (code, type,
11881 fold_convert (TREE_TYPE (arg1), tem), arg1);
11885 /* If this is an NE or EQ comparison of zero against the result of a
11886 signed MOD operation whose second operand is a power of 2, make
11887 the MOD operation unsigned since it is simpler and equivalent. */
11888 if (integer_zerop (arg1)
11889 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11890 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11891 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11892 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11893 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11894 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11896 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11897 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11898 fold_convert (newtype,
11899 TREE_OPERAND (arg0, 0)),
11900 fold_convert (newtype,
11901 TREE_OPERAND (arg0, 1)));
11903 return fold_build2 (code, type, newmod,
11904 fold_convert (newtype, arg1));
11907 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11908 C1 is a valid shift constant, and C2 is a power of two, i.e.
11909 a single bit. */
11910 if (TREE_CODE (arg0) == BIT_AND_EXPR
11911 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11912 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11913 == INTEGER_CST
11914 && integer_pow2p (TREE_OPERAND (arg0, 1))
11915 && integer_zerop (arg1))
11917 tree itype = TREE_TYPE (arg0);
11918 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11919 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11921 /* Check for a valid shift count. */
11922 if (TREE_INT_CST_HIGH (arg001) == 0
11923 && TREE_INT_CST_LOW (arg001) < prec)
11925 tree arg01 = TREE_OPERAND (arg0, 1);
11926 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11927 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11928 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11929 can be rewritten as (X & (C2 << C1)) != 0. */
11930 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11932 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11933 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11934 return fold_build2 (code, type, tem, arg1);
11936 /* Otherwise, for signed (arithmetic) shifts,
11937 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11938 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11939 else if (!TYPE_UNSIGNED (itype))
11940 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11941 arg000, build_int_cst (itype, 0));
11942 /* Otherwise, of unsigned (logical) shifts,
11943 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11944 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11945 else
11946 return omit_one_operand (type,
11947 code == EQ_EXPR ? integer_one_node
11948 : integer_zero_node,
11949 arg000);
11953 /* If this is an NE comparison of zero with an AND of one, remove the
11954 comparison since the AND will give the correct value. */
11955 if (code == NE_EXPR
11956 && integer_zerop (arg1)
11957 && TREE_CODE (arg0) == BIT_AND_EXPR
11958 && integer_onep (TREE_OPERAND (arg0, 1)))
11959 return fold_convert (type, arg0);
11961 /* If we have (A & C) == C where C is a power of 2, convert this into
11962 (A & C) != 0. Similarly for NE_EXPR. */
11963 if (TREE_CODE (arg0) == BIT_AND_EXPR
11964 && integer_pow2p (TREE_OPERAND (arg0, 1))
11965 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11966 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11967 arg0, fold_convert (TREE_TYPE (arg0),
11968 integer_zero_node));
11970 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11971 bit, then fold the expression into A < 0 or A >= 0. */
11972 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11973 if (tem)
11974 return tem;
11976 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11977 Similarly for NE_EXPR. */
11978 if (TREE_CODE (arg0) == BIT_AND_EXPR
11979 && TREE_CODE (arg1) == INTEGER_CST
11980 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11982 tree notc = fold_build1 (BIT_NOT_EXPR,
11983 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11984 TREE_OPERAND (arg0, 1));
11985 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11986 arg1, notc);
11987 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11988 if (integer_nonzerop (dandnotc))
11989 return omit_one_operand (type, rslt, arg0);
11992 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11993 Similarly for NE_EXPR. */
11994 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11995 && TREE_CODE (arg1) == INTEGER_CST
11996 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11998 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11999 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12000 TREE_OPERAND (arg0, 1), notd);
12001 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12002 if (integer_nonzerop (candnotd))
12003 return omit_one_operand (type, rslt, arg0);
12006 /* Optimize comparisons of strlen vs zero to a compare of the
12007 first character of the string vs zero. To wit,
12008 strlen(ptr) == 0 => *ptr == 0
12009 strlen(ptr) != 0 => *ptr != 0
12010 Other cases should reduce to one of these two (or a constant)
12011 due to the return value of strlen being unsigned. */
12012 if (TREE_CODE (arg0) == CALL_EXPR
12013 && integer_zerop (arg1))
12015 tree fndecl = get_callee_fndecl (arg0);
12017 if (fndecl
12018 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12019 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12020 && call_expr_nargs (arg0) == 1
12021 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12023 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12024 return fold_build2 (code, type, iref,
12025 build_int_cst (TREE_TYPE (iref), 0));
12029 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12030 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12031 if (TREE_CODE (arg0) == RSHIFT_EXPR
12032 && integer_zerop (arg1)
12033 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12035 tree arg00 = TREE_OPERAND (arg0, 0);
12036 tree arg01 = TREE_OPERAND (arg0, 1);
12037 tree itype = TREE_TYPE (arg00);
12038 if (TREE_INT_CST_HIGH (arg01) == 0
12039 && TREE_INT_CST_LOW (arg01)
12040 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12042 if (TYPE_UNSIGNED (itype))
12044 itype = signed_type_for (itype);
12045 arg00 = fold_convert (itype, arg00);
12047 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12048 type, arg00, build_int_cst (itype, 0));
12052 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12053 if (integer_zerop (arg1)
12054 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12055 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12056 TREE_OPERAND (arg0, 1));
12058 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12059 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12060 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12061 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12062 build_int_cst (TREE_TYPE (arg1), 0));
12063 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12064 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12065 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12066 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12067 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12068 build_int_cst (TREE_TYPE (arg1), 0));
12070 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12071 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12072 && TREE_CODE (arg1) == INTEGER_CST
12073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12074 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12075 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12076 TREE_OPERAND (arg0, 1), arg1));
12078 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12079 (X & C) == 0 when C is a single bit. */
12080 if (TREE_CODE (arg0) == BIT_AND_EXPR
12081 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12082 && integer_zerop (arg1)
12083 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12085 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12086 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12087 TREE_OPERAND (arg0, 1));
12088 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12089 type, tem, arg1);
12092 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12093 constant C is a power of two, i.e. a single bit. */
12094 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12095 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12096 && integer_zerop (arg1)
12097 && integer_pow2p (TREE_OPERAND (arg0, 1))
12098 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12099 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12101 tree arg00 = TREE_OPERAND (arg0, 0);
12102 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12103 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12106 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12107 when is C is a power of two, i.e. a single bit. */
12108 if (TREE_CODE (arg0) == BIT_AND_EXPR
12109 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12110 && integer_zerop (arg1)
12111 && integer_pow2p (TREE_OPERAND (arg0, 1))
12112 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12113 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12115 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12116 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12117 arg000, TREE_OPERAND (arg0, 1));
12118 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12119 tem, build_int_cst (TREE_TYPE (tem), 0));
12122 if (integer_zerop (arg1)
12123 && tree_expr_nonzero_p (arg0))
12125 tree res = constant_boolean_node (code==NE_EXPR, type);
12126 return omit_one_operand (type, res, arg0);
12129 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12130 if (TREE_CODE (arg0) == NEGATE_EXPR
12131 && TREE_CODE (arg1) == NEGATE_EXPR)
12132 return fold_build2 (code, type,
12133 TREE_OPERAND (arg0, 0),
12134 TREE_OPERAND (arg1, 0));
12136 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12137 if (TREE_CODE (arg0) == BIT_AND_EXPR
12138 && TREE_CODE (arg1) == BIT_AND_EXPR)
12140 tree arg00 = TREE_OPERAND (arg0, 0);
12141 tree arg01 = TREE_OPERAND (arg0, 1);
12142 tree arg10 = TREE_OPERAND (arg1, 0);
12143 tree arg11 = TREE_OPERAND (arg1, 1);
12144 tree itype = TREE_TYPE (arg0);
12146 if (operand_equal_p (arg01, arg11, 0))
12147 return fold_build2 (code, type,
12148 fold_build2 (BIT_AND_EXPR, itype,
12149 fold_build2 (BIT_XOR_EXPR, itype,
12150 arg00, arg10),
12151 arg01),
12152 build_int_cst (itype, 0));
12154 if (operand_equal_p (arg01, arg10, 0))
12155 return fold_build2 (code, type,
12156 fold_build2 (BIT_AND_EXPR, itype,
12157 fold_build2 (BIT_XOR_EXPR, itype,
12158 arg00, arg11),
12159 arg01),
12160 build_int_cst (itype, 0));
12162 if (operand_equal_p (arg00, arg11, 0))
12163 return fold_build2 (code, type,
12164 fold_build2 (BIT_AND_EXPR, itype,
12165 fold_build2 (BIT_XOR_EXPR, itype,
12166 arg01, arg10),
12167 arg00),
12168 build_int_cst (itype, 0));
12170 if (operand_equal_p (arg00, arg10, 0))
12171 return fold_build2 (code, type,
12172 fold_build2 (BIT_AND_EXPR, itype,
12173 fold_build2 (BIT_XOR_EXPR, itype,
12174 arg01, arg11),
12175 arg00),
12176 build_int_cst (itype, 0));
12179 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12180 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12182 tree arg00 = TREE_OPERAND (arg0, 0);
12183 tree arg01 = TREE_OPERAND (arg0, 1);
12184 tree arg10 = TREE_OPERAND (arg1, 0);
12185 tree arg11 = TREE_OPERAND (arg1, 1);
12186 tree itype = TREE_TYPE (arg0);
12188 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12189 operand_equal_p guarantees no side-effects so we don't need
12190 to use omit_one_operand on Z. */
12191 if (operand_equal_p (arg01, arg11, 0))
12192 return fold_build2 (code, type, arg00, arg10);
12193 if (operand_equal_p (arg01, arg10, 0))
12194 return fold_build2 (code, type, arg00, arg11);
12195 if (operand_equal_p (arg00, arg11, 0))
12196 return fold_build2 (code, type, arg01, arg10);
12197 if (operand_equal_p (arg00, arg10, 0))
12198 return fold_build2 (code, type, arg01, arg11);
12200 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12201 if (TREE_CODE (arg01) == INTEGER_CST
12202 && TREE_CODE (arg11) == INTEGER_CST)
12203 return fold_build2 (code, type,
12204 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12205 fold_build2 (BIT_XOR_EXPR, itype,
12206 arg01, arg11)),
12207 arg10);
12210 /* Attempt to simplify equality/inequality comparisons of complex
12211 values. Only lower the comparison if the result is known or
12212 can be simplified to a single scalar comparison. */
12213 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12214 || TREE_CODE (arg0) == COMPLEX_CST)
12215 && (TREE_CODE (arg1) == COMPLEX_EXPR
12216 || TREE_CODE (arg1) == COMPLEX_CST))
12218 tree real0, imag0, real1, imag1;
12219 tree rcond, icond;
12221 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12223 real0 = TREE_OPERAND (arg0, 0);
12224 imag0 = TREE_OPERAND (arg0, 1);
12226 else
12228 real0 = TREE_REALPART (arg0);
12229 imag0 = TREE_IMAGPART (arg0);
12232 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12234 real1 = TREE_OPERAND (arg1, 0);
12235 imag1 = TREE_OPERAND (arg1, 1);
12237 else
12239 real1 = TREE_REALPART (arg1);
12240 imag1 = TREE_IMAGPART (arg1);
12243 rcond = fold_binary (code, type, real0, real1);
12244 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12246 if (integer_zerop (rcond))
12248 if (code == EQ_EXPR)
12249 return omit_two_operands (type, boolean_false_node,
12250 imag0, imag1);
12251 return fold_build2 (NE_EXPR, type, imag0, imag1);
12253 else
12255 if (code == NE_EXPR)
12256 return omit_two_operands (type, boolean_true_node,
12257 imag0, imag1);
12258 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12262 icond = fold_binary (code, type, imag0, imag1);
12263 if (icond && TREE_CODE (icond) == INTEGER_CST)
12265 if (integer_zerop (icond))
12267 if (code == EQ_EXPR)
12268 return omit_two_operands (type, boolean_false_node,
12269 real0, real1);
12270 return fold_build2 (NE_EXPR, type, real0, real1);
12272 else
12274 if (code == NE_EXPR)
12275 return omit_two_operands (type, boolean_true_node,
12276 real0, real1);
12277 return fold_build2 (EQ_EXPR, type, real0, real1);
12282 return NULL_TREE;
12284 case LT_EXPR:
12285 case GT_EXPR:
12286 case LE_EXPR:
12287 case GE_EXPR:
12288 tem = fold_comparison (code, type, op0, op1);
12289 if (tem != NULL_TREE)
12290 return tem;
12292 /* Transform comparisons of the form X +- C CMP X. */
12293 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12294 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12295 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12296 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12297 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12298 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12300 tree arg01 = TREE_OPERAND (arg0, 1);
12301 enum tree_code code0 = TREE_CODE (arg0);
12302 int is_positive;
12304 if (TREE_CODE (arg01) == REAL_CST)
12305 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12306 else
12307 is_positive = tree_int_cst_sgn (arg01);
12309 /* (X - c) > X becomes false. */
12310 if (code == GT_EXPR
12311 && ((code0 == MINUS_EXPR && is_positive >= 0)
12312 || (code0 == PLUS_EXPR && is_positive <= 0)))
12314 if (TREE_CODE (arg01) == INTEGER_CST
12315 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12316 fold_overflow_warning (("assuming signed overflow does not "
12317 "occur when assuming that (X - c) > X "
12318 "is always false"),
12319 WARN_STRICT_OVERFLOW_ALL);
12320 return constant_boolean_node (0, type);
12323 /* Likewise (X + c) < X becomes false. */
12324 if (code == LT_EXPR
12325 && ((code0 == PLUS_EXPR && is_positive >= 0)
12326 || (code0 == MINUS_EXPR && is_positive <= 0)))
12328 if (TREE_CODE (arg01) == INTEGER_CST
12329 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12330 fold_overflow_warning (("assuming signed overflow does not "
12331 "occur when assuming that "
12332 "(X + c) < X is always false"),
12333 WARN_STRICT_OVERFLOW_ALL);
12334 return constant_boolean_node (0, type);
12337 /* Convert (X - c) <= X to true. */
12338 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12339 && code == LE_EXPR
12340 && ((code0 == MINUS_EXPR && is_positive >= 0)
12341 || (code0 == PLUS_EXPR && is_positive <= 0)))
12343 if (TREE_CODE (arg01) == INTEGER_CST
12344 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12345 fold_overflow_warning (("assuming signed overflow does not "
12346 "occur when assuming that "
12347 "(X - c) <= X is always true"),
12348 WARN_STRICT_OVERFLOW_ALL);
12349 return constant_boolean_node (1, type);
12352 /* Convert (X + c) >= X to true. */
12353 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12354 && code == GE_EXPR
12355 && ((code0 == PLUS_EXPR && is_positive >= 0)
12356 || (code0 == MINUS_EXPR && is_positive <= 0)))
12358 if (TREE_CODE (arg01) == INTEGER_CST
12359 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12360 fold_overflow_warning (("assuming signed overflow does not "
12361 "occur when assuming that "
12362 "(X + c) >= X is always true"),
12363 WARN_STRICT_OVERFLOW_ALL);
12364 return constant_boolean_node (1, type);
12367 if (TREE_CODE (arg01) == INTEGER_CST)
12369 /* Convert X + c > X and X - c < X to true for integers. */
12370 if (code == GT_EXPR
12371 && ((code0 == PLUS_EXPR && is_positive > 0)
12372 || (code0 == MINUS_EXPR && is_positive < 0)))
12374 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12375 fold_overflow_warning (("assuming signed overflow does "
12376 "not occur when assuming that "
12377 "(X + c) > X is always true"),
12378 WARN_STRICT_OVERFLOW_ALL);
12379 return constant_boolean_node (1, type);
12382 if (code == LT_EXPR
12383 && ((code0 == MINUS_EXPR && is_positive > 0)
12384 || (code0 == PLUS_EXPR && is_positive < 0)))
12386 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12387 fold_overflow_warning (("assuming signed overflow does "
12388 "not occur when assuming that "
12389 "(X - c) < X is always true"),
12390 WARN_STRICT_OVERFLOW_ALL);
12391 return constant_boolean_node (1, type);
12394 /* Convert X + c <= X and X - c >= X to false for integers. */
12395 if (code == LE_EXPR
12396 && ((code0 == PLUS_EXPR && is_positive > 0)
12397 || (code0 == MINUS_EXPR && is_positive < 0)))
12399 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12400 fold_overflow_warning (("assuming signed overflow does "
12401 "not occur when assuming that "
12402 "(X + c) <= X is always false"),
12403 WARN_STRICT_OVERFLOW_ALL);
12404 return constant_boolean_node (0, type);
12407 if (code == GE_EXPR
12408 && ((code0 == MINUS_EXPR && is_positive > 0)
12409 || (code0 == PLUS_EXPR && is_positive < 0)))
12411 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12412 fold_overflow_warning (("assuming signed overflow does "
12413 "not occur when assuming that "
12414 "(X - c) >= X is always false"),
12415 WARN_STRICT_OVERFLOW_ALL);
12416 return constant_boolean_node (0, type);
12421 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12422 This transformation affects the cases which are handled in later
12423 optimizations involving comparisons with non-negative constants. */
12424 if (TREE_CODE (arg1) == INTEGER_CST
12425 && TREE_CODE (arg0) != INTEGER_CST
12426 && tree_int_cst_sgn (arg1) > 0)
12428 if (code == GE_EXPR)
12430 arg1 = const_binop (MINUS_EXPR, arg1,
12431 build_int_cst (TREE_TYPE (arg1), 1), 0);
12432 return fold_build2 (GT_EXPR, type, arg0,
12433 fold_convert (TREE_TYPE (arg0), arg1));
12435 if (code == LT_EXPR)
12437 arg1 = const_binop (MINUS_EXPR, arg1,
12438 build_int_cst (TREE_TYPE (arg1), 1), 0);
12439 return fold_build2 (LE_EXPR, type, arg0,
12440 fold_convert (TREE_TYPE (arg0), arg1));
12444 /* Comparisons with the highest or lowest possible integer of
12445 the specified precision will have known values. */
12447 tree arg1_type = TREE_TYPE (arg1);
12448 unsigned int width = TYPE_PRECISION (arg1_type);
12450 if (TREE_CODE (arg1) == INTEGER_CST
12451 && !TREE_OVERFLOW (arg1)
12452 && width <= 2 * HOST_BITS_PER_WIDE_INT
12453 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12455 HOST_WIDE_INT signed_max_hi;
12456 unsigned HOST_WIDE_INT signed_max_lo;
12457 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12459 if (width <= HOST_BITS_PER_WIDE_INT)
12461 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12462 - 1;
12463 signed_max_hi = 0;
12464 max_hi = 0;
12466 if (TYPE_UNSIGNED (arg1_type))
12468 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12469 min_lo = 0;
12470 min_hi = 0;
12472 else
12474 max_lo = signed_max_lo;
12475 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12476 min_hi = -1;
12479 else
12481 width -= HOST_BITS_PER_WIDE_INT;
12482 signed_max_lo = -1;
12483 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12484 - 1;
12485 max_lo = -1;
12486 min_lo = 0;
12488 if (TYPE_UNSIGNED (arg1_type))
12490 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12491 min_hi = 0;
12493 else
12495 max_hi = signed_max_hi;
12496 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12500 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12501 && TREE_INT_CST_LOW (arg1) == max_lo)
12502 switch (code)
12504 case GT_EXPR:
12505 return omit_one_operand (type, integer_zero_node, arg0);
12507 case GE_EXPR:
12508 return fold_build2 (EQ_EXPR, type, op0, op1);
12510 case LE_EXPR:
12511 return omit_one_operand (type, integer_one_node, arg0);
12513 case LT_EXPR:
12514 return fold_build2 (NE_EXPR, type, op0, op1);
12516 /* The GE_EXPR and LT_EXPR cases above are not normally
12517 reached because of previous transformations. */
12519 default:
12520 break;
12522 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12523 == max_hi
12524 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12525 switch (code)
12527 case GT_EXPR:
12528 arg1 = const_binop (PLUS_EXPR, arg1,
12529 build_int_cst (TREE_TYPE (arg1), 1), 0);
12530 return fold_build2 (EQ_EXPR, type,
12531 fold_convert (TREE_TYPE (arg1), arg0),
12532 arg1);
12533 case LE_EXPR:
12534 arg1 = const_binop (PLUS_EXPR, arg1,
12535 build_int_cst (TREE_TYPE (arg1), 1), 0);
12536 return fold_build2 (NE_EXPR, type,
12537 fold_convert (TREE_TYPE (arg1), arg0),
12538 arg1);
12539 default:
12540 break;
12542 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12543 == min_hi
12544 && TREE_INT_CST_LOW (arg1) == min_lo)
12545 switch (code)
12547 case LT_EXPR:
12548 return omit_one_operand (type, integer_zero_node, arg0);
12550 case LE_EXPR:
12551 return fold_build2 (EQ_EXPR, type, op0, op1);
12553 case GE_EXPR:
12554 return omit_one_operand (type, integer_one_node, arg0);
12556 case GT_EXPR:
12557 return fold_build2 (NE_EXPR, type, op0, op1);
12559 default:
12560 break;
12562 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12563 == min_hi
12564 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12565 switch (code)
12567 case GE_EXPR:
12568 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12569 return fold_build2 (NE_EXPR, type,
12570 fold_convert (TREE_TYPE (arg1), arg0),
12571 arg1);
12572 case LT_EXPR:
12573 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12574 return fold_build2 (EQ_EXPR, type,
12575 fold_convert (TREE_TYPE (arg1), arg0),
12576 arg1);
12577 default:
12578 break;
12581 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12582 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12583 && TYPE_UNSIGNED (arg1_type)
12584 /* We will flip the signedness of the comparison operator
12585 associated with the mode of arg1, so the sign bit is
12586 specified by this mode. Check that arg1 is the signed
12587 max associated with this sign bit. */
12588 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12589 /* signed_type does not work on pointer types. */
12590 && INTEGRAL_TYPE_P (arg1_type))
12592 /* The following case also applies to X < signed_max+1
12593 and X >= signed_max+1 because previous transformations. */
12594 if (code == LE_EXPR || code == GT_EXPR)
12596 tree st;
12597 st = signed_type_for (TREE_TYPE (arg1));
12598 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12599 type, fold_convert (st, arg0),
12600 build_int_cst (st, 0));
12606 /* If we are comparing an ABS_EXPR with a constant, we can
12607 convert all the cases into explicit comparisons, but they may
12608 well not be faster than doing the ABS and one comparison.
12609 But ABS (X) <= C is a range comparison, which becomes a subtraction
12610 and a comparison, and is probably faster. */
12611 if (code == LE_EXPR
12612 && TREE_CODE (arg1) == INTEGER_CST
12613 && TREE_CODE (arg0) == ABS_EXPR
12614 && ! TREE_SIDE_EFFECTS (arg0)
12615 && (0 != (tem = negate_expr (arg1)))
12616 && TREE_CODE (tem) == INTEGER_CST
12617 && !TREE_OVERFLOW (tem))
12618 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12619 build2 (GE_EXPR, type,
12620 TREE_OPERAND (arg0, 0), tem),
12621 build2 (LE_EXPR, type,
12622 TREE_OPERAND (arg0, 0), arg1));
12624 /* Convert ABS_EXPR<x> >= 0 to true. */
12625 strict_overflow_p = false;
12626 if (code == GE_EXPR
12627 && (integer_zerop (arg1)
12628 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12629 && real_zerop (arg1)))
12630 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12632 if (strict_overflow_p)
12633 fold_overflow_warning (("assuming signed overflow does not occur "
12634 "when simplifying comparison of "
12635 "absolute value and zero"),
12636 WARN_STRICT_OVERFLOW_CONDITIONAL);
12637 return omit_one_operand (type, integer_one_node, arg0);
12640 /* Convert ABS_EXPR<x> < 0 to false. */
12641 strict_overflow_p = false;
12642 if (code == LT_EXPR
12643 && (integer_zerop (arg1) || real_zerop (arg1))
12644 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12646 if (strict_overflow_p)
12647 fold_overflow_warning (("assuming signed overflow does not occur "
12648 "when simplifying comparison of "
12649 "absolute value and zero"),
12650 WARN_STRICT_OVERFLOW_CONDITIONAL);
12651 return omit_one_operand (type, integer_zero_node, arg0);
12654 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12655 and similarly for >= into !=. */
12656 if ((code == LT_EXPR || code == GE_EXPR)
12657 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12658 && TREE_CODE (arg1) == LSHIFT_EXPR
12659 && integer_onep (TREE_OPERAND (arg1, 0)))
12660 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12661 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12662 TREE_OPERAND (arg1, 1)),
12663 build_int_cst (TREE_TYPE (arg0), 0));
12665 if ((code == LT_EXPR || code == GE_EXPR)
12666 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12667 && CONVERT_EXPR_P (arg1)
12668 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12669 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12670 return
12671 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12672 fold_convert (TREE_TYPE (arg0),
12673 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12674 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12675 1))),
12676 build_int_cst (TREE_TYPE (arg0), 0));
12678 return NULL_TREE;
12680 case UNORDERED_EXPR:
12681 case ORDERED_EXPR:
12682 case UNLT_EXPR:
12683 case UNLE_EXPR:
12684 case UNGT_EXPR:
12685 case UNGE_EXPR:
12686 case UNEQ_EXPR:
12687 case LTGT_EXPR:
12688 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12690 t1 = fold_relational_const (code, type, arg0, arg1);
12691 if (t1 != NULL_TREE)
12692 return t1;
12695 /* If the first operand is NaN, the result is constant. */
12696 if (TREE_CODE (arg0) == REAL_CST
12697 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12698 && (code != LTGT_EXPR || ! flag_trapping_math))
12700 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12701 ? integer_zero_node
12702 : integer_one_node;
12703 return omit_one_operand (type, t1, arg1);
12706 /* If the second operand is NaN, the result is constant. */
12707 if (TREE_CODE (arg1) == REAL_CST
12708 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12709 && (code != LTGT_EXPR || ! flag_trapping_math))
12711 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12712 ? integer_zero_node
12713 : integer_one_node;
12714 return omit_one_operand (type, t1, arg0);
12717 /* Simplify unordered comparison of something with itself. */
12718 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12719 && operand_equal_p (arg0, arg1, 0))
12720 return constant_boolean_node (1, type);
12722 if (code == LTGT_EXPR
12723 && !flag_trapping_math
12724 && operand_equal_p (arg0, arg1, 0))
12725 return constant_boolean_node (0, type);
12727 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12729 tree targ0 = strip_float_extensions (arg0);
12730 tree targ1 = strip_float_extensions (arg1);
12731 tree newtype = TREE_TYPE (targ0);
12733 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12734 newtype = TREE_TYPE (targ1);
12736 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12737 return fold_build2 (code, type, fold_convert (newtype, targ0),
12738 fold_convert (newtype, targ1));
12741 return NULL_TREE;
12743 case COMPOUND_EXPR:
12744 /* When pedantic, a compound expression can be neither an lvalue
12745 nor an integer constant expression. */
12746 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12747 return NULL_TREE;
12748 /* Don't let (0, 0) be null pointer constant. */
12749 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12750 : fold_convert (type, arg1);
12751 return pedantic_non_lvalue (tem);
12753 case COMPLEX_EXPR:
12754 if ((TREE_CODE (arg0) == REAL_CST
12755 && TREE_CODE (arg1) == REAL_CST)
12756 || (TREE_CODE (arg0) == INTEGER_CST
12757 && TREE_CODE (arg1) == INTEGER_CST))
12758 return build_complex (type, arg0, arg1);
12759 return NULL_TREE;
12761 case ASSERT_EXPR:
12762 /* An ASSERT_EXPR should never be passed to fold_binary. */
12763 gcc_unreachable ();
12765 default:
12766 return NULL_TREE;
12767 } /* switch (code) */
12770 /* Callback for walk_tree, looking for LABEL_EXPR.
12771 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12772 Do not check the sub-tree of GOTO_EXPR. */
12774 static tree
12775 contains_label_1 (tree *tp,
12776 int *walk_subtrees,
12777 void *data ATTRIBUTE_UNUSED)
12779 switch (TREE_CODE (*tp))
12781 case LABEL_EXPR:
12782 return *tp;
12783 case GOTO_EXPR:
12784 *walk_subtrees = 0;
12785 /* no break */
12786 default:
12787 return NULL_TREE;
12791 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12792 accessible from outside the sub-tree. Returns NULL_TREE if no
12793 addressable label is found. */
12795 static bool
12796 contains_label_p (tree st)
12798 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12801 /* Fold a ternary expression of code CODE and type TYPE with operands
12802 OP0, OP1, and OP2. Return the folded expression if folding is
12803 successful. Otherwise, return NULL_TREE. */
12805 tree
12806 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12808 tree tem;
12809 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12810 enum tree_code_class kind = TREE_CODE_CLASS (code);
12812 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12813 && TREE_CODE_LENGTH (code) == 3);
12815 /* Strip any conversions that don't change the mode. This is safe
12816 for every expression, except for a comparison expression because
12817 its signedness is derived from its operands. So, in the latter
12818 case, only strip conversions that don't change the signedness.
12820 Note that this is done as an internal manipulation within the
12821 constant folder, in order to find the simplest representation of
12822 the arguments so that their form can be studied. In any cases,
12823 the appropriate type conversions should be put back in the tree
12824 that will get out of the constant folder. */
12825 if (op0)
12827 arg0 = op0;
12828 STRIP_NOPS (arg0);
12831 if (op1)
12833 arg1 = op1;
12834 STRIP_NOPS (arg1);
12837 switch (code)
12839 case COMPONENT_REF:
12840 if (TREE_CODE (arg0) == CONSTRUCTOR
12841 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12843 unsigned HOST_WIDE_INT idx;
12844 tree field, value;
12845 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12846 if (field == arg1)
12847 return value;
12849 return NULL_TREE;
12851 case COND_EXPR:
12852 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12853 so all simple results must be passed through pedantic_non_lvalue. */
12854 if (TREE_CODE (arg0) == INTEGER_CST)
12856 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12857 tem = integer_zerop (arg0) ? op2 : op1;
12858 /* Only optimize constant conditions when the selected branch
12859 has the same type as the COND_EXPR. This avoids optimizing
12860 away "c ? x : throw", where the throw has a void type.
12861 Avoid throwing away that operand which contains label. */
12862 if ((!TREE_SIDE_EFFECTS (unused_op)
12863 || !contains_label_p (unused_op))
12864 && (! VOID_TYPE_P (TREE_TYPE (tem))
12865 || VOID_TYPE_P (type)))
12866 return pedantic_non_lvalue (tem);
12867 return NULL_TREE;
12869 if (operand_equal_p (arg1, op2, 0))
12870 return pedantic_omit_one_operand (type, arg1, arg0);
12872 /* If we have A op B ? A : C, we may be able to convert this to a
12873 simpler expression, depending on the operation and the values
12874 of B and C. Signed zeros prevent all of these transformations,
12875 for reasons given above each one.
12877 Also try swapping the arguments and inverting the conditional. */
12878 if (COMPARISON_CLASS_P (arg0)
12879 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12880 arg1, TREE_OPERAND (arg0, 1))
12881 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12883 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12884 if (tem)
12885 return tem;
12888 if (COMPARISON_CLASS_P (arg0)
12889 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12890 op2,
12891 TREE_OPERAND (arg0, 1))
12892 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12894 tem = fold_truth_not_expr (arg0);
12895 if (tem && COMPARISON_CLASS_P (tem))
12897 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12898 if (tem)
12899 return tem;
12903 /* If the second operand is simpler than the third, swap them
12904 since that produces better jump optimization results. */
12905 if (truth_value_p (TREE_CODE (arg0))
12906 && tree_swap_operands_p (op1, op2, false))
12908 /* See if this can be inverted. If it can't, possibly because
12909 it was a floating-point inequality comparison, don't do
12910 anything. */
12911 tem = fold_truth_not_expr (arg0);
12912 if (tem)
12913 return fold_build3 (code, type, tem, op2, op1);
12916 /* Convert A ? 1 : 0 to simply A. */
12917 if (integer_onep (op1)
12918 && integer_zerop (op2)
12919 /* If we try to convert OP0 to our type, the
12920 call to fold will try to move the conversion inside
12921 a COND, which will recurse. In that case, the COND_EXPR
12922 is probably the best choice, so leave it alone. */
12923 && type == TREE_TYPE (arg0))
12924 return pedantic_non_lvalue (arg0);
12926 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12927 over COND_EXPR in cases such as floating point comparisons. */
12928 if (integer_zerop (op1)
12929 && integer_onep (op2)
12930 && truth_value_p (TREE_CODE (arg0)))
12931 return pedantic_non_lvalue (fold_convert (type,
12932 invert_truthvalue (arg0)));
12934 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12935 if (TREE_CODE (arg0) == LT_EXPR
12936 && integer_zerop (TREE_OPERAND (arg0, 1))
12937 && integer_zerop (op2)
12938 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12940 /* sign_bit_p only checks ARG1 bits within A's precision.
12941 If <sign bit of A> has wider type than A, bits outside
12942 of A's precision in <sign bit of A> need to be checked.
12943 If they are all 0, this optimization needs to be done
12944 in unsigned A's type, if they are all 1 in signed A's type,
12945 otherwise this can't be done. */
12946 if (TYPE_PRECISION (TREE_TYPE (tem))
12947 < TYPE_PRECISION (TREE_TYPE (arg1))
12948 && TYPE_PRECISION (TREE_TYPE (tem))
12949 < TYPE_PRECISION (type))
12951 unsigned HOST_WIDE_INT mask_lo;
12952 HOST_WIDE_INT mask_hi;
12953 int inner_width, outer_width;
12954 tree tem_type;
12956 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12957 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12958 if (outer_width > TYPE_PRECISION (type))
12959 outer_width = TYPE_PRECISION (type);
12961 if (outer_width > HOST_BITS_PER_WIDE_INT)
12963 mask_hi = ((unsigned HOST_WIDE_INT) -1
12964 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12965 mask_lo = -1;
12967 else
12969 mask_hi = 0;
12970 mask_lo = ((unsigned HOST_WIDE_INT) -1
12971 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12973 if (inner_width > HOST_BITS_PER_WIDE_INT)
12975 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12976 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12977 mask_lo = 0;
12979 else
12980 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12981 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12983 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12984 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12986 tem_type = signed_type_for (TREE_TYPE (tem));
12987 tem = fold_convert (tem_type, tem);
12989 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12990 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12992 tem_type = unsigned_type_for (TREE_TYPE (tem));
12993 tem = fold_convert (tem_type, tem);
12995 else
12996 tem = NULL;
12999 if (tem)
13000 return fold_convert (type,
13001 fold_build2 (BIT_AND_EXPR,
13002 TREE_TYPE (tem), tem,
13003 fold_convert (TREE_TYPE (tem),
13004 arg1)));
13007 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13008 already handled above. */
13009 if (TREE_CODE (arg0) == BIT_AND_EXPR
13010 && integer_onep (TREE_OPERAND (arg0, 1))
13011 && integer_zerop (op2)
13012 && integer_pow2p (arg1))
13014 tree tem = TREE_OPERAND (arg0, 0);
13015 STRIP_NOPS (tem);
13016 if (TREE_CODE (tem) == RSHIFT_EXPR
13017 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13018 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13019 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13020 return fold_build2 (BIT_AND_EXPR, type,
13021 TREE_OPERAND (tem, 0), arg1);
13024 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13025 is probably obsolete because the first operand should be a
13026 truth value (that's why we have the two cases above), but let's
13027 leave it in until we can confirm this for all front-ends. */
13028 if (integer_zerop (op2)
13029 && TREE_CODE (arg0) == NE_EXPR
13030 && integer_zerop (TREE_OPERAND (arg0, 1))
13031 && integer_pow2p (arg1)
13032 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13033 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13034 arg1, OEP_ONLY_CONST))
13035 return pedantic_non_lvalue (fold_convert (type,
13036 TREE_OPERAND (arg0, 0)));
13038 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13039 if (integer_zerop (op2)
13040 && truth_value_p (TREE_CODE (arg0))
13041 && truth_value_p (TREE_CODE (arg1)))
13042 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13043 fold_convert (type, arg0),
13044 arg1);
13046 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13047 if (integer_onep (op2)
13048 && truth_value_p (TREE_CODE (arg0))
13049 && truth_value_p (TREE_CODE (arg1)))
13051 /* Only perform transformation if ARG0 is easily inverted. */
13052 tem = fold_truth_not_expr (arg0);
13053 if (tem)
13054 return fold_build2 (TRUTH_ORIF_EXPR, type,
13055 fold_convert (type, tem),
13056 arg1);
13059 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13060 if (integer_zerop (arg1)
13061 && truth_value_p (TREE_CODE (arg0))
13062 && truth_value_p (TREE_CODE (op2)))
13064 /* Only perform transformation if ARG0 is easily inverted. */
13065 tem = fold_truth_not_expr (arg0);
13066 if (tem)
13067 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13068 fold_convert (type, tem),
13069 op2);
13072 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13073 if (integer_onep (arg1)
13074 && truth_value_p (TREE_CODE (arg0))
13075 && truth_value_p (TREE_CODE (op2)))
13076 return fold_build2 (TRUTH_ORIF_EXPR, type,
13077 fold_convert (type, arg0),
13078 op2);
13080 return NULL_TREE;
13082 case CALL_EXPR:
13083 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13084 of fold_ternary on them. */
13085 gcc_unreachable ();
13087 case BIT_FIELD_REF:
13088 if ((TREE_CODE (arg0) == VECTOR_CST
13089 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13090 && type == TREE_TYPE (TREE_TYPE (arg0)))
13092 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13093 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13095 if (width != 0
13096 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13097 && (idx % width) == 0
13098 && (idx = idx / width)
13099 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13101 tree elements = NULL_TREE;
13103 if (TREE_CODE (arg0) == VECTOR_CST)
13104 elements = TREE_VECTOR_CST_ELTS (arg0);
13105 else
13107 unsigned HOST_WIDE_INT idx;
13108 tree value;
13110 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13111 elements = tree_cons (NULL_TREE, value, elements);
13113 while (idx-- > 0 && elements)
13114 elements = TREE_CHAIN (elements);
13115 if (elements)
13116 return TREE_VALUE (elements);
13117 else
13118 return fold_convert (type, integer_zero_node);
13122 /* A bit-field-ref that referenced the full argument can be stripped. */
13123 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13124 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13125 && integer_zerop (op2))
13126 return fold_convert (type, arg0);
13128 return NULL_TREE;
13130 default:
13131 return NULL_TREE;
13132 } /* switch (code) */
13135 /* Perform constant folding and related simplification of EXPR.
13136 The related simplifications include x*1 => x, x*0 => 0, etc.,
13137 and application of the associative law.
13138 NOP_EXPR conversions may be removed freely (as long as we
13139 are careful not to change the type of the overall expression).
13140 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13141 but we can constant-fold them if they have constant operands. */
13143 #ifdef ENABLE_FOLD_CHECKING
13144 # define fold(x) fold_1 (x)
13145 static tree fold_1 (tree);
13146 static
13147 #endif
13148 tree
13149 fold (tree expr)
13151 const tree t = expr;
13152 enum tree_code code = TREE_CODE (t);
13153 enum tree_code_class kind = TREE_CODE_CLASS (code);
13154 tree tem;
13156 /* Return right away if a constant. */
13157 if (kind == tcc_constant)
13158 return t;
13160 /* CALL_EXPR-like objects with variable numbers of operands are
13161 treated specially. */
13162 if (kind == tcc_vl_exp)
13164 if (code == CALL_EXPR)
13166 tem = fold_call_expr (expr, false);
13167 return tem ? tem : expr;
13169 return expr;
13172 if (IS_EXPR_CODE_CLASS (kind)
13173 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13175 tree type = TREE_TYPE (t);
13176 tree op0, op1, op2;
13178 switch (TREE_CODE_LENGTH (code))
13180 case 1:
13181 op0 = TREE_OPERAND (t, 0);
13182 tem = fold_unary (code, type, op0);
13183 return tem ? tem : expr;
13184 case 2:
13185 op0 = TREE_OPERAND (t, 0);
13186 op1 = TREE_OPERAND (t, 1);
13187 tem = fold_binary (code, type, op0, op1);
13188 return tem ? tem : expr;
13189 case 3:
13190 op0 = TREE_OPERAND (t, 0);
13191 op1 = TREE_OPERAND (t, 1);
13192 op2 = TREE_OPERAND (t, 2);
13193 tem = fold_ternary (code, type, op0, op1, op2);
13194 return tem ? tem : expr;
13195 default:
13196 break;
13200 switch (code)
13202 case ARRAY_REF:
13204 tree op0 = TREE_OPERAND (t, 0);
13205 tree op1 = TREE_OPERAND (t, 1);
13207 if (TREE_CODE (op1) == INTEGER_CST
13208 && TREE_CODE (op0) == CONSTRUCTOR
13209 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13211 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13212 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13213 unsigned HOST_WIDE_INT begin = 0;
13215 /* Find a matching index by means of a binary search. */
13216 while (begin != end)
13218 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13219 tree index = VEC_index (constructor_elt, elts, middle)->index;
13221 if (TREE_CODE (index) == INTEGER_CST
13222 && tree_int_cst_lt (index, op1))
13223 begin = middle + 1;
13224 else if (TREE_CODE (index) == INTEGER_CST
13225 && tree_int_cst_lt (op1, index))
13226 end = middle;
13227 else if (TREE_CODE (index) == RANGE_EXPR
13228 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13229 begin = middle + 1;
13230 else if (TREE_CODE (index) == RANGE_EXPR
13231 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13232 end = middle;
13233 else
13234 return VEC_index (constructor_elt, elts, middle)->value;
13238 return t;
13241 case CONST_DECL:
13242 return fold (DECL_INITIAL (t));
13244 default:
13245 return t;
13246 } /* switch (code) */
13249 #ifdef ENABLE_FOLD_CHECKING
13250 #undef fold
13252 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13253 static void fold_check_failed (const_tree, const_tree);
13254 void print_fold_checksum (const_tree);
13256 /* When --enable-checking=fold, compute a digest of expr before
13257 and after actual fold call to see if fold did not accidentally
13258 change original expr. */
13260 tree
13261 fold (tree expr)
13263 tree ret;
13264 struct md5_ctx ctx;
13265 unsigned char checksum_before[16], checksum_after[16];
13266 htab_t ht;
13268 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13269 md5_init_ctx (&ctx);
13270 fold_checksum_tree (expr, &ctx, ht);
13271 md5_finish_ctx (&ctx, checksum_before);
13272 htab_empty (ht);
13274 ret = fold_1 (expr);
13276 md5_init_ctx (&ctx);
13277 fold_checksum_tree (expr, &ctx, ht);
13278 md5_finish_ctx (&ctx, checksum_after);
13279 htab_delete (ht);
13281 if (memcmp (checksum_before, checksum_after, 16))
13282 fold_check_failed (expr, ret);
13284 return ret;
13287 void
13288 print_fold_checksum (const_tree expr)
13290 struct md5_ctx ctx;
13291 unsigned char checksum[16], cnt;
13292 htab_t ht;
13294 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13295 md5_init_ctx (&ctx);
13296 fold_checksum_tree (expr, &ctx, ht);
13297 md5_finish_ctx (&ctx, checksum);
13298 htab_delete (ht);
13299 for (cnt = 0; cnt < 16; ++cnt)
13300 fprintf (stderr, "%02x", checksum[cnt]);
13301 putc ('\n', stderr);
13304 static void
13305 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13307 internal_error ("fold check: original tree changed by fold");
13310 static void
13311 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13313 const void **slot;
13314 enum tree_code code;
13315 struct tree_function_decl buf;
13316 int i, len;
13318 recursive_label:
13320 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13321 <= sizeof (struct tree_function_decl))
13322 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13323 if (expr == NULL)
13324 return;
13325 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13326 if (*slot != NULL)
13327 return;
13328 *slot = expr;
13329 code = TREE_CODE (expr);
13330 if (TREE_CODE_CLASS (code) == tcc_declaration
13331 && DECL_ASSEMBLER_NAME_SET_P (expr))
13333 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13334 memcpy ((char *) &buf, expr, tree_size (expr));
13335 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13336 expr = (tree) &buf;
13338 else if (TREE_CODE_CLASS (code) == tcc_type
13339 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13340 || TYPE_CACHED_VALUES_P (expr)
13341 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13343 /* Allow these fields to be modified. */
13344 tree tmp;
13345 memcpy ((char *) &buf, expr, tree_size (expr));
13346 expr = tmp = (tree) &buf;
13347 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13348 TYPE_POINTER_TO (tmp) = NULL;
13349 TYPE_REFERENCE_TO (tmp) = NULL;
13350 if (TYPE_CACHED_VALUES_P (tmp))
13352 TYPE_CACHED_VALUES_P (tmp) = 0;
13353 TYPE_CACHED_VALUES (tmp) = NULL;
13356 md5_process_bytes (expr, tree_size (expr), ctx);
13357 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13358 if (TREE_CODE_CLASS (code) != tcc_type
13359 && TREE_CODE_CLASS (code) != tcc_declaration
13360 && code != TREE_LIST
13361 && code != SSA_NAME)
13362 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13363 switch (TREE_CODE_CLASS (code))
13365 case tcc_constant:
13366 switch (code)
13368 case STRING_CST:
13369 md5_process_bytes (TREE_STRING_POINTER (expr),
13370 TREE_STRING_LENGTH (expr), ctx);
13371 break;
13372 case COMPLEX_CST:
13373 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13374 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13375 break;
13376 case VECTOR_CST:
13377 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13378 break;
13379 default:
13380 break;
13382 break;
13383 case tcc_exceptional:
13384 switch (code)
13386 case TREE_LIST:
13387 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13388 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13389 expr = TREE_CHAIN (expr);
13390 goto recursive_label;
13391 break;
13392 case TREE_VEC:
13393 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13394 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13395 break;
13396 default:
13397 break;
13399 break;
13400 case tcc_expression:
13401 case tcc_reference:
13402 case tcc_comparison:
13403 case tcc_unary:
13404 case tcc_binary:
13405 case tcc_statement:
13406 case tcc_vl_exp:
13407 len = TREE_OPERAND_LENGTH (expr);
13408 for (i = 0; i < len; ++i)
13409 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13410 break;
13411 case tcc_declaration:
13412 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13413 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13414 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13416 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13417 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13418 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13419 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13420 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13422 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13423 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13425 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13427 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13428 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13429 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13431 break;
13432 case tcc_type:
13433 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13434 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13435 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13436 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13437 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13438 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13439 if (INTEGRAL_TYPE_P (expr)
13440 || SCALAR_FLOAT_TYPE_P (expr))
13442 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13443 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13445 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13446 if (TREE_CODE (expr) == RECORD_TYPE
13447 || TREE_CODE (expr) == UNION_TYPE
13448 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13449 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13450 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13451 break;
13452 default:
13453 break;
13457 /* Helper function for outputting the checksum of a tree T. When
13458 debugging with gdb, you can "define mynext" to be "next" followed
13459 by "call debug_fold_checksum (op0)", then just trace down till the
13460 outputs differ. */
13462 void
13463 debug_fold_checksum (const_tree t)
13465 int i;
13466 unsigned char checksum[16];
13467 struct md5_ctx ctx;
13468 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13470 md5_init_ctx (&ctx);
13471 fold_checksum_tree (t, &ctx, ht);
13472 md5_finish_ctx (&ctx, checksum);
13473 htab_empty (ht);
13475 for (i = 0; i < 16; i++)
13476 fprintf (stderr, "%d ", checksum[i]);
13478 fprintf (stderr, "\n");
13481 #endif
13483 /* Fold a unary tree expression with code CODE of type TYPE with an
13484 operand OP0. Return a folded expression if successful. Otherwise,
13485 return a tree expression with code CODE of type TYPE with an
13486 operand OP0. */
13488 tree
13489 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13491 tree tem;
13492 #ifdef ENABLE_FOLD_CHECKING
13493 unsigned char checksum_before[16], checksum_after[16];
13494 struct md5_ctx ctx;
13495 htab_t ht;
13497 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13498 md5_init_ctx (&ctx);
13499 fold_checksum_tree (op0, &ctx, ht);
13500 md5_finish_ctx (&ctx, checksum_before);
13501 htab_empty (ht);
13502 #endif
13504 tem = fold_unary (code, type, op0);
13505 if (!tem)
13506 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13508 #ifdef ENABLE_FOLD_CHECKING
13509 md5_init_ctx (&ctx);
13510 fold_checksum_tree (op0, &ctx, ht);
13511 md5_finish_ctx (&ctx, checksum_after);
13512 htab_delete (ht);
13514 if (memcmp (checksum_before, checksum_after, 16))
13515 fold_check_failed (op0, tem);
13516 #endif
13517 return tem;
13520 /* Fold a binary tree expression with code CODE of type TYPE with
13521 operands OP0 and OP1. Return a folded expression if successful.
13522 Otherwise, return a tree expression with code CODE of type TYPE
13523 with operands OP0 and OP1. */
13525 tree
13526 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13527 MEM_STAT_DECL)
13529 tree tem;
13530 #ifdef ENABLE_FOLD_CHECKING
13531 unsigned char checksum_before_op0[16],
13532 checksum_before_op1[16],
13533 checksum_after_op0[16],
13534 checksum_after_op1[16];
13535 struct md5_ctx ctx;
13536 htab_t ht;
13538 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13539 md5_init_ctx (&ctx);
13540 fold_checksum_tree (op0, &ctx, ht);
13541 md5_finish_ctx (&ctx, checksum_before_op0);
13542 htab_empty (ht);
13544 md5_init_ctx (&ctx);
13545 fold_checksum_tree (op1, &ctx, ht);
13546 md5_finish_ctx (&ctx, checksum_before_op1);
13547 htab_empty (ht);
13548 #endif
13550 tem = fold_binary (code, type, op0, op1);
13551 if (!tem)
13552 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13554 #ifdef ENABLE_FOLD_CHECKING
13555 md5_init_ctx (&ctx);
13556 fold_checksum_tree (op0, &ctx, ht);
13557 md5_finish_ctx (&ctx, checksum_after_op0);
13558 htab_empty (ht);
13560 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13561 fold_check_failed (op0, tem);
13563 md5_init_ctx (&ctx);
13564 fold_checksum_tree (op1, &ctx, ht);
13565 md5_finish_ctx (&ctx, checksum_after_op1);
13566 htab_delete (ht);
13568 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13569 fold_check_failed (op1, tem);
13570 #endif
13571 return tem;
13574 /* Fold a ternary tree expression with code CODE of type TYPE with
13575 operands OP0, OP1, and OP2. Return a folded expression if
13576 successful. Otherwise, return a tree expression with code CODE of
13577 type TYPE with operands OP0, OP1, and OP2. */
13579 tree
13580 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13581 MEM_STAT_DECL)
13583 tree tem;
13584 #ifdef ENABLE_FOLD_CHECKING
13585 unsigned char checksum_before_op0[16],
13586 checksum_before_op1[16],
13587 checksum_before_op2[16],
13588 checksum_after_op0[16],
13589 checksum_after_op1[16],
13590 checksum_after_op2[16];
13591 struct md5_ctx ctx;
13592 htab_t ht;
13594 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13595 md5_init_ctx (&ctx);
13596 fold_checksum_tree (op0, &ctx, ht);
13597 md5_finish_ctx (&ctx, checksum_before_op0);
13598 htab_empty (ht);
13600 md5_init_ctx (&ctx);
13601 fold_checksum_tree (op1, &ctx, ht);
13602 md5_finish_ctx (&ctx, checksum_before_op1);
13603 htab_empty (ht);
13605 md5_init_ctx (&ctx);
13606 fold_checksum_tree (op2, &ctx, ht);
13607 md5_finish_ctx (&ctx, checksum_before_op2);
13608 htab_empty (ht);
13609 #endif
13611 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13612 tem = fold_ternary (code, type, op0, op1, op2);
13613 if (!tem)
13614 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13616 #ifdef ENABLE_FOLD_CHECKING
13617 md5_init_ctx (&ctx);
13618 fold_checksum_tree (op0, &ctx, ht);
13619 md5_finish_ctx (&ctx, checksum_after_op0);
13620 htab_empty (ht);
13622 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13623 fold_check_failed (op0, tem);
13625 md5_init_ctx (&ctx);
13626 fold_checksum_tree (op1, &ctx, ht);
13627 md5_finish_ctx (&ctx, checksum_after_op1);
13628 htab_empty (ht);
13630 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13631 fold_check_failed (op1, tem);
13633 md5_init_ctx (&ctx);
13634 fold_checksum_tree (op2, &ctx, ht);
13635 md5_finish_ctx (&ctx, checksum_after_op2);
13636 htab_delete (ht);
13638 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13639 fold_check_failed (op2, tem);
13640 #endif
13641 return tem;
13644 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13645 arguments in ARGARRAY, and a null static chain.
13646 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13647 of type TYPE from the given operands as constructed by build_call_array. */
13649 tree
13650 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13652 tree tem;
13653 #ifdef ENABLE_FOLD_CHECKING
13654 unsigned char checksum_before_fn[16],
13655 checksum_before_arglist[16],
13656 checksum_after_fn[16],
13657 checksum_after_arglist[16];
13658 struct md5_ctx ctx;
13659 htab_t ht;
13660 int i;
13662 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13663 md5_init_ctx (&ctx);
13664 fold_checksum_tree (fn, &ctx, ht);
13665 md5_finish_ctx (&ctx, checksum_before_fn);
13666 htab_empty (ht);
13668 md5_init_ctx (&ctx);
13669 for (i = 0; i < nargs; i++)
13670 fold_checksum_tree (argarray[i], &ctx, ht);
13671 md5_finish_ctx (&ctx, checksum_before_arglist);
13672 htab_empty (ht);
13673 #endif
13675 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13677 #ifdef ENABLE_FOLD_CHECKING
13678 md5_init_ctx (&ctx);
13679 fold_checksum_tree (fn, &ctx, ht);
13680 md5_finish_ctx (&ctx, checksum_after_fn);
13681 htab_empty (ht);
13683 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13684 fold_check_failed (fn, tem);
13686 md5_init_ctx (&ctx);
13687 for (i = 0; i < nargs; i++)
13688 fold_checksum_tree (argarray[i], &ctx, ht);
13689 md5_finish_ctx (&ctx, checksum_after_arglist);
13690 htab_delete (ht);
13692 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13693 fold_check_failed (NULL_TREE, tem);
13694 #endif
13695 return tem;
13698 /* Perform constant folding and related simplification of initializer
13699 expression EXPR. These behave identically to "fold_buildN" but ignore
13700 potential run-time traps and exceptions that fold must preserve. */
13702 #define START_FOLD_INIT \
13703 int saved_signaling_nans = flag_signaling_nans;\
13704 int saved_trapping_math = flag_trapping_math;\
13705 int saved_rounding_math = flag_rounding_math;\
13706 int saved_trapv = flag_trapv;\
13707 int saved_folding_initializer = folding_initializer;\
13708 flag_signaling_nans = 0;\
13709 flag_trapping_math = 0;\
13710 flag_rounding_math = 0;\
13711 flag_trapv = 0;\
13712 folding_initializer = 1;
13714 #define END_FOLD_INIT \
13715 flag_signaling_nans = saved_signaling_nans;\
13716 flag_trapping_math = saved_trapping_math;\
13717 flag_rounding_math = saved_rounding_math;\
13718 flag_trapv = saved_trapv;\
13719 folding_initializer = saved_folding_initializer;
13721 tree
13722 fold_build1_initializer (enum tree_code code, tree type, tree op)
13724 tree result;
13725 START_FOLD_INIT;
13727 result = fold_build1 (code, type, op);
13729 END_FOLD_INIT;
13730 return result;
13733 tree
13734 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13736 tree result;
13737 START_FOLD_INIT;
13739 result = fold_build2 (code, type, op0, op1);
13741 END_FOLD_INIT;
13742 return result;
13745 tree
13746 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13747 tree op2)
13749 tree result;
13750 START_FOLD_INIT;
13752 result = fold_build3 (code, type, op0, op1, op2);
13754 END_FOLD_INIT;
13755 return result;
13758 tree
13759 fold_build_call_array_initializer (tree type, tree fn,
13760 int nargs, tree *argarray)
13762 tree result;
13763 START_FOLD_INIT;
13765 result = fold_build_call_array (type, fn, nargs, argarray);
13767 END_FOLD_INIT;
13768 return result;
13771 #undef START_FOLD_INIT
13772 #undef END_FOLD_INIT
13774 /* Determine if first argument is a multiple of second argument. Return 0 if
13775 it is not, or we cannot easily determined it to be.
13777 An example of the sort of thing we care about (at this point; this routine
13778 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13779 fold cases do now) is discovering that
13781 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13783 is a multiple of
13785 SAVE_EXPR (J * 8)
13787 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13789 This code also handles discovering that
13791 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13793 is a multiple of 8 so we don't have to worry about dealing with a
13794 possible remainder.
13796 Note that we *look* inside a SAVE_EXPR only to determine how it was
13797 calculated; it is not safe for fold to do much of anything else with the
13798 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13799 at run time. For example, the latter example above *cannot* be implemented
13800 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13801 evaluation time of the original SAVE_EXPR is not necessarily the same at
13802 the time the new expression is evaluated. The only optimization of this
13803 sort that would be valid is changing
13805 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13807 divided by 8 to
13809 SAVE_EXPR (I) * SAVE_EXPR (J)
13811 (where the same SAVE_EXPR (J) is used in the original and the
13812 transformed version). */
13815 multiple_of_p (tree type, const_tree top, const_tree bottom)
13817 if (operand_equal_p (top, bottom, 0))
13818 return 1;
13820 if (TREE_CODE (type) != INTEGER_TYPE)
13821 return 0;
13823 switch (TREE_CODE (top))
13825 case BIT_AND_EXPR:
13826 /* Bitwise and provides a power of two multiple. If the mask is
13827 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13828 if (!integer_pow2p (bottom))
13829 return 0;
13830 /* FALLTHRU */
13832 case MULT_EXPR:
13833 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13834 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13836 case PLUS_EXPR:
13837 case MINUS_EXPR:
13838 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13839 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13841 case LSHIFT_EXPR:
13842 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13844 tree op1, t1;
13846 op1 = TREE_OPERAND (top, 1);
13847 /* const_binop may not detect overflow correctly,
13848 so check for it explicitly here. */
13849 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13850 > TREE_INT_CST_LOW (op1)
13851 && TREE_INT_CST_HIGH (op1) == 0
13852 && 0 != (t1 = fold_convert (type,
13853 const_binop (LSHIFT_EXPR,
13854 size_one_node,
13855 op1, 0)))
13856 && !TREE_OVERFLOW (t1))
13857 return multiple_of_p (type, t1, bottom);
13859 return 0;
13861 case NOP_EXPR:
13862 /* Can't handle conversions from non-integral or wider integral type. */
13863 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13864 || (TYPE_PRECISION (type)
13865 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13866 return 0;
13868 /* .. fall through ... */
13870 case SAVE_EXPR:
13871 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13873 case INTEGER_CST:
13874 if (TREE_CODE (bottom) != INTEGER_CST
13875 || integer_zerop (bottom)
13876 || (TYPE_UNSIGNED (type)
13877 && (tree_int_cst_sgn (top) < 0
13878 || tree_int_cst_sgn (bottom) < 0)))
13879 return 0;
13880 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13881 top, bottom, 0));
13883 default:
13884 return 0;
13888 /* Return true if CODE or TYPE is known to be non-negative. */
13890 static bool
13891 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13893 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13894 && truth_value_p (code))
13895 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13896 have a signed:1 type (where the value is -1 and 0). */
13897 return true;
13898 return false;
13901 /* Return true if (CODE OP0) is known to be non-negative. If the return
13902 value is based on the assumption that signed overflow is undefined,
13903 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13904 *STRICT_OVERFLOW_P. */
13906 bool
13907 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13908 bool *strict_overflow_p)
13910 if (TYPE_UNSIGNED (type))
13911 return true;
13913 switch (code)
13915 case ABS_EXPR:
13916 /* We can't return 1 if flag_wrapv is set because
13917 ABS_EXPR<INT_MIN> = INT_MIN. */
13918 if (!INTEGRAL_TYPE_P (type))
13919 return true;
13920 if (TYPE_OVERFLOW_UNDEFINED (type))
13922 *strict_overflow_p = true;
13923 return true;
13925 break;
13927 case NON_LVALUE_EXPR:
13928 case FLOAT_EXPR:
13929 case FIX_TRUNC_EXPR:
13930 return tree_expr_nonnegative_warnv_p (op0,
13931 strict_overflow_p);
13933 case NOP_EXPR:
13935 tree inner_type = TREE_TYPE (op0);
13936 tree outer_type = type;
13938 if (TREE_CODE (outer_type) == REAL_TYPE)
13940 if (TREE_CODE (inner_type) == REAL_TYPE)
13941 return tree_expr_nonnegative_warnv_p (op0,
13942 strict_overflow_p);
13943 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13945 if (TYPE_UNSIGNED (inner_type))
13946 return true;
13947 return tree_expr_nonnegative_warnv_p (op0,
13948 strict_overflow_p);
13951 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13953 if (TREE_CODE (inner_type) == REAL_TYPE)
13954 return tree_expr_nonnegative_warnv_p (op0,
13955 strict_overflow_p);
13956 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13957 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13958 && TYPE_UNSIGNED (inner_type);
13961 break;
13963 default:
13964 return tree_simple_nonnegative_warnv_p (code, type);
13967 /* We don't know sign of `t', so be conservative and return false. */
13968 return false;
13971 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13972 value is based on the assumption that signed overflow is undefined,
13973 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13974 *STRICT_OVERFLOW_P. */
13976 bool
13977 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13978 tree op1, bool *strict_overflow_p)
13980 if (TYPE_UNSIGNED (type))
13981 return true;
13983 switch (code)
13985 case POINTER_PLUS_EXPR:
13986 case PLUS_EXPR:
13987 if (FLOAT_TYPE_P (type))
13988 return (tree_expr_nonnegative_warnv_p (op0,
13989 strict_overflow_p)
13990 && tree_expr_nonnegative_warnv_p (op1,
13991 strict_overflow_p));
13993 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13994 both unsigned and at least 2 bits shorter than the result. */
13995 if (TREE_CODE (type) == INTEGER_TYPE
13996 && TREE_CODE (op0) == NOP_EXPR
13997 && TREE_CODE (op1) == NOP_EXPR)
13999 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14000 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14001 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14002 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14004 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14005 TYPE_PRECISION (inner2)) + 1;
14006 return prec < TYPE_PRECISION (type);
14009 break;
14011 case MULT_EXPR:
14012 if (FLOAT_TYPE_P (type))
14014 /* x * x for floating point x is always non-negative. */
14015 if (operand_equal_p (op0, op1, 0))
14016 return true;
14017 return (tree_expr_nonnegative_warnv_p (op0,
14018 strict_overflow_p)
14019 && tree_expr_nonnegative_warnv_p (op1,
14020 strict_overflow_p));
14023 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14024 both unsigned and their total bits is shorter than the result. */
14025 if (TREE_CODE (type) == INTEGER_TYPE
14026 && TREE_CODE (op0) == NOP_EXPR
14027 && TREE_CODE (op1) == NOP_EXPR)
14029 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14030 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14031 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14032 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14033 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14034 < TYPE_PRECISION (type);
14036 return false;
14038 case BIT_AND_EXPR:
14039 case MAX_EXPR:
14040 return (tree_expr_nonnegative_warnv_p (op0,
14041 strict_overflow_p)
14042 || tree_expr_nonnegative_warnv_p (op1,
14043 strict_overflow_p));
14045 case BIT_IOR_EXPR:
14046 case BIT_XOR_EXPR:
14047 case MIN_EXPR:
14048 case RDIV_EXPR:
14049 case TRUNC_DIV_EXPR:
14050 case CEIL_DIV_EXPR:
14051 case FLOOR_DIV_EXPR:
14052 case ROUND_DIV_EXPR:
14053 return (tree_expr_nonnegative_warnv_p (op0,
14054 strict_overflow_p)
14055 && tree_expr_nonnegative_warnv_p (op1,
14056 strict_overflow_p));
14058 case TRUNC_MOD_EXPR:
14059 case CEIL_MOD_EXPR:
14060 case FLOOR_MOD_EXPR:
14061 case ROUND_MOD_EXPR:
14062 return tree_expr_nonnegative_warnv_p (op0,
14063 strict_overflow_p);
14064 default:
14065 return tree_simple_nonnegative_warnv_p (code, type);
14068 /* We don't know sign of `t', so be conservative and return false. */
14069 return false;
14072 /* Return true if T is known to be non-negative. If the return
14073 value is based on the assumption that signed overflow is undefined,
14074 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14075 *STRICT_OVERFLOW_P. */
14077 bool
14078 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14080 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14081 return true;
14083 switch (TREE_CODE (t))
14085 case INTEGER_CST:
14086 return tree_int_cst_sgn (t) >= 0;
14088 case REAL_CST:
14089 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14091 case FIXED_CST:
14092 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14094 case COND_EXPR:
14095 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14096 strict_overflow_p)
14097 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14098 strict_overflow_p));
14099 default:
14100 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14101 TREE_TYPE (t));
14103 /* We don't know sign of `t', so be conservative and return false. */
14104 return false;
14107 /* Return true if T is known to be non-negative. If the return
14108 value is based on the assumption that signed overflow is undefined,
14109 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14110 *STRICT_OVERFLOW_P. */
14112 bool
14113 tree_call_nonnegative_warnv_p (enum tree_code code, tree type, tree fndecl,
14114 tree arg0, tree arg1, bool *strict_overflow_p)
14116 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14117 switch (DECL_FUNCTION_CODE (fndecl))
14119 CASE_FLT_FN (BUILT_IN_ACOS):
14120 CASE_FLT_FN (BUILT_IN_ACOSH):
14121 CASE_FLT_FN (BUILT_IN_CABS):
14122 CASE_FLT_FN (BUILT_IN_COSH):
14123 CASE_FLT_FN (BUILT_IN_ERFC):
14124 CASE_FLT_FN (BUILT_IN_EXP):
14125 CASE_FLT_FN (BUILT_IN_EXP10):
14126 CASE_FLT_FN (BUILT_IN_EXP2):
14127 CASE_FLT_FN (BUILT_IN_FABS):
14128 CASE_FLT_FN (BUILT_IN_FDIM):
14129 CASE_FLT_FN (BUILT_IN_HYPOT):
14130 CASE_FLT_FN (BUILT_IN_POW10):
14131 CASE_INT_FN (BUILT_IN_FFS):
14132 CASE_INT_FN (BUILT_IN_PARITY):
14133 CASE_INT_FN (BUILT_IN_POPCOUNT):
14134 case BUILT_IN_BSWAP32:
14135 case BUILT_IN_BSWAP64:
14136 /* Always true. */
14137 return true;
14139 CASE_FLT_FN (BUILT_IN_SQRT):
14140 /* sqrt(-0.0) is -0.0. */
14141 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14142 return true;
14143 return tree_expr_nonnegative_warnv_p (arg0,
14144 strict_overflow_p);
14146 CASE_FLT_FN (BUILT_IN_ASINH):
14147 CASE_FLT_FN (BUILT_IN_ATAN):
14148 CASE_FLT_FN (BUILT_IN_ATANH):
14149 CASE_FLT_FN (BUILT_IN_CBRT):
14150 CASE_FLT_FN (BUILT_IN_CEIL):
14151 CASE_FLT_FN (BUILT_IN_ERF):
14152 CASE_FLT_FN (BUILT_IN_EXPM1):
14153 CASE_FLT_FN (BUILT_IN_FLOOR):
14154 CASE_FLT_FN (BUILT_IN_FMOD):
14155 CASE_FLT_FN (BUILT_IN_FREXP):
14156 CASE_FLT_FN (BUILT_IN_LCEIL):
14157 CASE_FLT_FN (BUILT_IN_LDEXP):
14158 CASE_FLT_FN (BUILT_IN_LFLOOR):
14159 CASE_FLT_FN (BUILT_IN_LLCEIL):
14160 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14161 CASE_FLT_FN (BUILT_IN_LLRINT):
14162 CASE_FLT_FN (BUILT_IN_LLROUND):
14163 CASE_FLT_FN (BUILT_IN_LRINT):
14164 CASE_FLT_FN (BUILT_IN_LROUND):
14165 CASE_FLT_FN (BUILT_IN_MODF):
14166 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14167 CASE_FLT_FN (BUILT_IN_RINT):
14168 CASE_FLT_FN (BUILT_IN_ROUND):
14169 CASE_FLT_FN (BUILT_IN_SCALB):
14170 CASE_FLT_FN (BUILT_IN_SCALBLN):
14171 CASE_FLT_FN (BUILT_IN_SCALBN):
14172 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14173 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14174 CASE_FLT_FN (BUILT_IN_SINH):
14175 CASE_FLT_FN (BUILT_IN_TANH):
14176 CASE_FLT_FN (BUILT_IN_TRUNC):
14177 /* True if the 1st argument is nonnegative. */
14178 return tree_expr_nonnegative_warnv_p (arg0,
14179 strict_overflow_p);
14181 CASE_FLT_FN (BUILT_IN_FMAX):
14182 /* True if the 1st OR 2nd arguments are nonnegative. */
14183 return (tree_expr_nonnegative_warnv_p (arg0,
14184 strict_overflow_p)
14185 || (tree_expr_nonnegative_warnv_p (arg1,
14186 strict_overflow_p)));
14188 CASE_FLT_FN (BUILT_IN_FMIN):
14189 /* True if the 1st AND 2nd arguments are nonnegative. */
14190 return (tree_expr_nonnegative_warnv_p (arg0,
14191 strict_overflow_p)
14192 && (tree_expr_nonnegative_warnv_p (arg1,
14193 strict_overflow_p)));
14195 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14196 /* True if the 2nd argument is nonnegative. */
14197 return tree_expr_nonnegative_warnv_p (arg1,
14198 strict_overflow_p);
14200 CASE_FLT_FN (BUILT_IN_POWI):
14201 /* True if the 1st argument is nonnegative or the second
14202 argument is an even integer. */
14203 if (TREE_CODE (arg1) == INTEGER_CST
14204 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14205 return true;
14206 return tree_expr_nonnegative_warnv_p (arg0,
14207 strict_overflow_p);
14209 CASE_FLT_FN (BUILT_IN_POW):
14210 /* True if the 1st argument is nonnegative or the second
14211 argument is an even integer valued real. */
14212 if (TREE_CODE (arg1) == REAL_CST)
14214 REAL_VALUE_TYPE c;
14215 HOST_WIDE_INT n;
14217 c = TREE_REAL_CST (arg1);
14218 n = real_to_integer (&c);
14219 if ((n & 1) == 0)
14221 REAL_VALUE_TYPE cint;
14222 real_from_integer (&cint, VOIDmode, n,
14223 n < 0 ? -1 : 0, 0);
14224 if (real_identical (&c, &cint))
14225 return true;
14228 return tree_expr_nonnegative_warnv_p (arg0,
14229 strict_overflow_p);
14231 default:
14232 break;
14234 return tree_simple_nonnegative_warnv_p (code,
14235 type);
14238 /* Return true if T is known to be non-negative. If the return
14239 value is based on the assumption that signed overflow is undefined,
14240 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14241 *STRICT_OVERFLOW_P. */
14243 bool
14244 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14246 enum tree_code code = TREE_CODE (t);
14247 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14248 return true;
14250 switch (code)
14252 case TARGET_EXPR:
14254 tree temp = TARGET_EXPR_SLOT (t);
14255 t = TARGET_EXPR_INITIAL (t);
14257 /* If the initializer is non-void, then it's a normal expression
14258 that will be assigned to the slot. */
14259 if (!VOID_TYPE_P (t))
14260 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14262 /* Otherwise, the initializer sets the slot in some way. One common
14263 way is an assignment statement at the end of the initializer. */
14264 while (1)
14266 if (TREE_CODE (t) == BIND_EXPR)
14267 t = expr_last (BIND_EXPR_BODY (t));
14268 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14269 || TREE_CODE (t) == TRY_CATCH_EXPR)
14270 t = expr_last (TREE_OPERAND (t, 0));
14271 else if (TREE_CODE (t) == STATEMENT_LIST)
14272 t = expr_last (t);
14273 else
14274 break;
14276 if ((TREE_CODE (t) == MODIFY_EXPR
14277 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14278 && GENERIC_TREE_OPERAND (t, 0) == temp)
14279 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14280 strict_overflow_p);
14282 return false;
14285 case CALL_EXPR:
14287 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14288 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14290 return tree_call_nonnegative_warnv_p (TREE_CODE (t),
14291 TREE_TYPE (t),
14292 get_callee_fndecl (t),
14293 arg0,
14294 arg1,
14295 strict_overflow_p);
14297 case COMPOUND_EXPR:
14298 case MODIFY_EXPR:
14299 case GIMPLE_MODIFY_STMT:
14300 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14301 strict_overflow_p);
14302 case BIND_EXPR:
14303 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14304 strict_overflow_p);
14305 case SAVE_EXPR:
14306 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14307 strict_overflow_p);
14309 default:
14310 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14311 TREE_TYPE (t));
14314 /* We don't know sign of `t', so be conservative and return false. */
14315 return false;
14318 /* Return true if T is known to be non-negative. If the return
14319 value is based on the assumption that signed overflow is undefined,
14320 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14321 *STRICT_OVERFLOW_P. */
14323 bool
14324 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14326 enum tree_code code;
14327 if (t == error_mark_node)
14328 return false;
14330 code = TREE_CODE (t);
14331 switch (TREE_CODE_CLASS (code))
14333 case tcc_binary:
14334 case tcc_comparison:
14335 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14336 TREE_TYPE (t),
14337 TREE_OPERAND (t, 0),
14338 TREE_OPERAND (t, 1),
14339 strict_overflow_p);
14341 case tcc_unary:
14342 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14343 TREE_TYPE (t),
14344 TREE_OPERAND (t, 0),
14345 strict_overflow_p);
14347 case tcc_constant:
14348 case tcc_declaration:
14349 case tcc_reference:
14350 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14352 default:
14353 break;
14356 switch (code)
14358 case TRUTH_AND_EXPR:
14359 case TRUTH_OR_EXPR:
14360 case TRUTH_XOR_EXPR:
14361 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14362 TREE_TYPE (t),
14363 TREE_OPERAND (t, 0),
14364 TREE_OPERAND (t, 1),
14365 strict_overflow_p);
14366 case TRUTH_NOT_EXPR:
14367 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14368 TREE_TYPE (t),
14369 TREE_OPERAND (t, 0),
14370 strict_overflow_p);
14372 case COND_EXPR:
14373 case CONSTRUCTOR:
14374 case OBJ_TYPE_REF:
14375 case ASSERT_EXPR:
14376 case ADDR_EXPR:
14377 case WITH_SIZE_EXPR:
14378 case EXC_PTR_EXPR:
14379 case SSA_NAME:
14380 case FILTER_EXPR:
14381 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14383 default:
14384 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14388 /* Return true if `t' is known to be non-negative. Handle warnings
14389 about undefined signed overflow. */
14391 bool
14392 tree_expr_nonnegative_p (tree t)
14394 bool ret, strict_overflow_p;
14396 strict_overflow_p = false;
14397 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14398 if (strict_overflow_p)
14399 fold_overflow_warning (("assuming signed overflow does not occur when "
14400 "determining that expression is always "
14401 "non-negative"),
14402 WARN_STRICT_OVERFLOW_MISC);
14403 return ret;
14407 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14408 For floating point we further ensure that T is not denormal.
14409 Similar logic is present in nonzero_address in rtlanal.h.
14411 If the return value is based on the assumption that signed overflow
14412 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14413 change *STRICT_OVERFLOW_P. */
14415 bool
14416 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14417 bool *strict_overflow_p)
14419 switch (code)
14421 case ABS_EXPR:
14422 return tree_expr_nonzero_warnv_p (op0,
14423 strict_overflow_p);
14425 case NOP_EXPR:
14427 tree inner_type = TREE_TYPE (op0);
14428 tree outer_type = type;
14430 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14431 && tree_expr_nonzero_warnv_p (op0,
14432 strict_overflow_p));
14434 break;
14436 case NON_LVALUE_EXPR:
14437 return tree_expr_nonzero_warnv_p (op0,
14438 strict_overflow_p);
14440 default:
14441 break;
14444 return false;
14447 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14448 For floating point we further ensure that T is not denormal.
14449 Similar logic is present in nonzero_address in rtlanal.h.
14451 If the return value is based on the assumption that signed overflow
14452 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14453 change *STRICT_OVERFLOW_P. */
14455 bool
14456 tree_binary_nonzero_warnv_p (enum tree_code code,
14457 tree type,
14458 tree op0,
14459 tree op1, bool *strict_overflow_p)
14461 bool sub_strict_overflow_p;
14462 switch (code)
14464 case POINTER_PLUS_EXPR:
14465 case PLUS_EXPR:
14466 if (TYPE_OVERFLOW_UNDEFINED (type))
14468 /* With the presence of negative values it is hard
14469 to say something. */
14470 sub_strict_overflow_p = false;
14471 if (!tree_expr_nonnegative_warnv_p (op0,
14472 &sub_strict_overflow_p)
14473 || !tree_expr_nonnegative_warnv_p (op1,
14474 &sub_strict_overflow_p))
14475 return false;
14476 /* One of operands must be positive and the other non-negative. */
14477 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14478 overflows, on a twos-complement machine the sum of two
14479 nonnegative numbers can never be zero. */
14480 return (tree_expr_nonzero_warnv_p (op0,
14481 strict_overflow_p)
14482 || tree_expr_nonzero_warnv_p (op1,
14483 strict_overflow_p));
14485 break;
14487 case MULT_EXPR:
14488 if (TYPE_OVERFLOW_UNDEFINED (type))
14490 if (tree_expr_nonzero_warnv_p (op0,
14491 strict_overflow_p)
14492 && tree_expr_nonzero_warnv_p (op1,
14493 strict_overflow_p))
14495 *strict_overflow_p = true;
14496 return true;
14499 break;
14501 case MIN_EXPR:
14502 sub_strict_overflow_p = false;
14503 if (tree_expr_nonzero_warnv_p (op0,
14504 &sub_strict_overflow_p)
14505 && tree_expr_nonzero_warnv_p (op1,
14506 &sub_strict_overflow_p))
14508 if (sub_strict_overflow_p)
14509 *strict_overflow_p = true;
14511 break;
14513 case MAX_EXPR:
14514 sub_strict_overflow_p = false;
14515 if (tree_expr_nonzero_warnv_p (op0,
14516 &sub_strict_overflow_p))
14518 if (sub_strict_overflow_p)
14519 *strict_overflow_p = true;
14521 /* When both operands are nonzero, then MAX must be too. */
14522 if (tree_expr_nonzero_warnv_p (op1,
14523 strict_overflow_p))
14524 return true;
14526 /* MAX where operand 0 is positive is positive. */
14527 return tree_expr_nonnegative_warnv_p (op0,
14528 strict_overflow_p);
14530 /* MAX where operand 1 is positive is positive. */
14531 else if (tree_expr_nonzero_warnv_p (op1,
14532 &sub_strict_overflow_p)
14533 && tree_expr_nonnegative_warnv_p (op1,
14534 &sub_strict_overflow_p))
14536 if (sub_strict_overflow_p)
14537 *strict_overflow_p = true;
14538 return true;
14540 break;
14542 case BIT_IOR_EXPR:
14543 return (tree_expr_nonzero_warnv_p (op1,
14544 strict_overflow_p)
14545 || tree_expr_nonzero_warnv_p (op0,
14546 strict_overflow_p));
14548 default:
14549 break;
14552 return false;
14555 /* Return true when T is an address and is known to be nonzero.
14556 For floating point we further ensure that T is not denormal.
14557 Similar logic is present in nonzero_address in rtlanal.h.
14559 If the return value is based on the assumption that signed overflow
14560 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14561 change *STRICT_OVERFLOW_P. */
14563 bool
14564 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14566 bool sub_strict_overflow_p;
14567 switch (TREE_CODE (t))
14569 case INTEGER_CST:
14570 return !integer_zerop (t);
14572 case ADDR_EXPR:
14574 tree base = get_base_address (TREE_OPERAND (t, 0));
14576 if (!base)
14577 return false;
14579 /* Weak declarations may link to NULL. */
14580 if (VAR_OR_FUNCTION_DECL_P (base))
14581 return !DECL_WEAK (base);
14583 /* Constants are never weak. */
14584 if (CONSTANT_CLASS_P (base))
14585 return true;
14587 return false;
14590 case COND_EXPR:
14591 sub_strict_overflow_p = false;
14592 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14593 &sub_strict_overflow_p)
14594 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14595 &sub_strict_overflow_p))
14597 if (sub_strict_overflow_p)
14598 *strict_overflow_p = true;
14599 return true;
14601 break;
14603 default:
14604 break;
14606 return false;
14609 /* Return true when T is an address and is known to be nonzero.
14610 For floating point we further ensure that T is not denormal.
14611 Similar logic is present in nonzero_address in rtlanal.h.
14613 If the return value is based on the assumption that signed overflow
14614 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14615 change *STRICT_OVERFLOW_P. */
14617 bool
14618 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14620 tree type = TREE_TYPE (t);
14621 enum tree_code code;
14623 /* Doing something useful for floating point would need more work. */
14624 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14625 return false;
14627 code = TREE_CODE (t);
14628 switch (TREE_CODE_CLASS (code))
14630 case tcc_unary:
14631 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14632 strict_overflow_p);
14633 case tcc_binary:
14634 case tcc_comparison:
14635 return tree_binary_nonzero_warnv_p (code, type,
14636 TREE_OPERAND (t, 0),
14637 TREE_OPERAND (t, 1),
14638 strict_overflow_p);
14639 case tcc_constant:
14640 case tcc_declaration:
14641 case tcc_reference:
14642 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14644 default:
14645 break;
14648 switch (code)
14650 case TRUTH_NOT_EXPR:
14651 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14652 strict_overflow_p);
14654 case TRUTH_AND_EXPR:
14655 case TRUTH_OR_EXPR:
14656 case TRUTH_XOR_EXPR:
14657 return tree_binary_nonzero_warnv_p (code, type,
14658 TREE_OPERAND (t, 0),
14659 TREE_OPERAND (t, 1),
14660 strict_overflow_p);
14662 case COND_EXPR:
14663 case CONSTRUCTOR:
14664 case OBJ_TYPE_REF:
14665 case ASSERT_EXPR:
14666 case ADDR_EXPR:
14667 case WITH_SIZE_EXPR:
14668 case EXC_PTR_EXPR:
14669 case SSA_NAME:
14670 case FILTER_EXPR:
14671 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14673 case COMPOUND_EXPR:
14674 case MODIFY_EXPR:
14675 case GIMPLE_MODIFY_STMT:
14676 case BIND_EXPR:
14677 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14678 strict_overflow_p);
14680 case SAVE_EXPR:
14681 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14682 strict_overflow_p);
14684 case CALL_EXPR:
14685 return alloca_call_p (t);
14687 default:
14688 break;
14690 return false;
14693 /* Return true when T is an address and is known to be nonzero.
14694 Handle warnings about undefined signed overflow. */
14696 bool
14697 tree_expr_nonzero_p (tree t)
14699 bool ret, strict_overflow_p;
14701 strict_overflow_p = false;
14702 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14703 if (strict_overflow_p)
14704 fold_overflow_warning (("assuming signed overflow does not occur when "
14705 "determining that expression is always "
14706 "non-zero"),
14707 WARN_STRICT_OVERFLOW_MISC);
14708 return ret;
14711 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14712 attempt to fold the expression to a constant without modifying TYPE,
14713 OP0 or OP1.
14715 If the expression could be simplified to a constant, then return
14716 the constant. If the expression would not be simplified to a
14717 constant, then return NULL_TREE. */
14719 tree
14720 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14722 tree tem = fold_binary (code, type, op0, op1);
14723 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14726 /* Given the components of a unary expression CODE, TYPE and OP0,
14727 attempt to fold the expression to a constant without modifying
14728 TYPE or OP0.
14730 If the expression could be simplified to a constant, then return
14731 the constant. If the expression would not be simplified to a
14732 constant, then return NULL_TREE. */
14734 tree
14735 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14737 tree tem = fold_unary (code, type, op0);
14738 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14741 /* If EXP represents referencing an element in a constant string
14742 (either via pointer arithmetic or array indexing), return the
14743 tree representing the value accessed, otherwise return NULL. */
14745 tree
14746 fold_read_from_constant_string (tree exp)
14748 if ((TREE_CODE (exp) == INDIRECT_REF
14749 || TREE_CODE (exp) == ARRAY_REF)
14750 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14752 tree exp1 = TREE_OPERAND (exp, 0);
14753 tree index;
14754 tree string;
14756 if (TREE_CODE (exp) == INDIRECT_REF)
14757 string = string_constant (exp1, &index);
14758 else
14760 tree low_bound = array_ref_low_bound (exp);
14761 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14763 /* Optimize the special-case of a zero lower bound.
14765 We convert the low_bound to sizetype to avoid some problems
14766 with constant folding. (E.g. suppose the lower bound is 1,
14767 and its mode is QI. Without the conversion,l (ARRAY
14768 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14769 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14770 if (! integer_zerop (low_bound))
14771 index = size_diffop (index, fold_convert (sizetype, low_bound));
14773 string = exp1;
14776 if (string
14777 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14778 && TREE_CODE (string) == STRING_CST
14779 && TREE_CODE (index) == INTEGER_CST
14780 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14781 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14782 == MODE_INT)
14783 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14784 return build_int_cst_type (TREE_TYPE (exp),
14785 (TREE_STRING_POINTER (string)
14786 [TREE_INT_CST_LOW (index)]));
14788 return NULL;
14791 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14792 an integer constant, real, or fixed-point constant.
14794 TYPE is the type of the result. */
14796 static tree
14797 fold_negate_const (tree arg0, tree type)
14799 tree t = NULL_TREE;
14801 switch (TREE_CODE (arg0))
14803 case INTEGER_CST:
14805 unsigned HOST_WIDE_INT low;
14806 HOST_WIDE_INT high;
14807 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14808 TREE_INT_CST_HIGH (arg0),
14809 &low, &high);
14810 t = force_fit_type_double (type, low, high, 1,
14811 (overflow | TREE_OVERFLOW (arg0))
14812 && !TYPE_UNSIGNED (type));
14813 break;
14816 case REAL_CST:
14817 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14818 break;
14820 case FIXED_CST:
14822 FIXED_VALUE_TYPE f;
14823 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14824 &(TREE_FIXED_CST (arg0)), NULL,
14825 TYPE_SATURATING (type));
14826 t = build_fixed (type, f);
14827 /* Propagate overflow flags. */
14828 if (overflow_p | TREE_OVERFLOW (arg0))
14830 TREE_OVERFLOW (t) = 1;
14831 TREE_CONSTANT_OVERFLOW (t) = 1;
14833 else if (TREE_CONSTANT_OVERFLOW (arg0))
14834 TREE_CONSTANT_OVERFLOW (t) = 1;
14835 break;
14838 default:
14839 gcc_unreachable ();
14842 return t;
14845 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14846 an integer constant or real constant.
14848 TYPE is the type of the result. */
14850 tree
14851 fold_abs_const (tree arg0, tree type)
14853 tree t = NULL_TREE;
14855 switch (TREE_CODE (arg0))
14857 case INTEGER_CST:
14858 /* If the value is unsigned, then the absolute value is
14859 the same as the ordinary value. */
14860 if (TYPE_UNSIGNED (type))
14861 t = arg0;
14862 /* Similarly, if the value is non-negative. */
14863 else if (INT_CST_LT (integer_minus_one_node, arg0))
14864 t = arg0;
14865 /* If the value is negative, then the absolute value is
14866 its negation. */
14867 else
14869 unsigned HOST_WIDE_INT low;
14870 HOST_WIDE_INT high;
14871 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14872 TREE_INT_CST_HIGH (arg0),
14873 &low, &high);
14874 t = force_fit_type_double (type, low, high, -1,
14875 overflow | TREE_OVERFLOW (arg0));
14877 break;
14879 case REAL_CST:
14880 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14881 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14882 else
14883 t = arg0;
14884 break;
14886 default:
14887 gcc_unreachable ();
14890 return t;
14893 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14894 constant. TYPE is the type of the result. */
14896 static tree
14897 fold_not_const (tree arg0, tree type)
14899 tree t = NULL_TREE;
14901 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14903 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14904 ~TREE_INT_CST_HIGH (arg0), 0,
14905 TREE_OVERFLOW (arg0));
14907 return t;
14910 /* Given CODE, a relational operator, the target type, TYPE and two
14911 constant operands OP0 and OP1, return the result of the
14912 relational operation. If the result is not a compile time
14913 constant, then return NULL_TREE. */
14915 static tree
14916 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14918 int result, invert;
14920 /* From here on, the only cases we handle are when the result is
14921 known to be a constant. */
14923 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14925 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14926 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14928 /* Handle the cases where either operand is a NaN. */
14929 if (real_isnan (c0) || real_isnan (c1))
14931 switch (code)
14933 case EQ_EXPR:
14934 case ORDERED_EXPR:
14935 result = 0;
14936 break;
14938 case NE_EXPR:
14939 case UNORDERED_EXPR:
14940 case UNLT_EXPR:
14941 case UNLE_EXPR:
14942 case UNGT_EXPR:
14943 case UNGE_EXPR:
14944 case UNEQ_EXPR:
14945 result = 1;
14946 break;
14948 case LT_EXPR:
14949 case LE_EXPR:
14950 case GT_EXPR:
14951 case GE_EXPR:
14952 case LTGT_EXPR:
14953 if (flag_trapping_math)
14954 return NULL_TREE;
14955 result = 0;
14956 break;
14958 default:
14959 gcc_unreachable ();
14962 return constant_boolean_node (result, type);
14965 return constant_boolean_node (real_compare (code, c0, c1), type);
14968 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14970 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14971 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14972 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14975 /* Handle equality/inequality of complex constants. */
14976 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14978 tree rcond = fold_relational_const (code, type,
14979 TREE_REALPART (op0),
14980 TREE_REALPART (op1));
14981 tree icond = fold_relational_const (code, type,
14982 TREE_IMAGPART (op0),
14983 TREE_IMAGPART (op1));
14984 if (code == EQ_EXPR)
14985 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14986 else if (code == NE_EXPR)
14987 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14988 else
14989 return NULL_TREE;
14992 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14994 To compute GT, swap the arguments and do LT.
14995 To compute GE, do LT and invert the result.
14996 To compute LE, swap the arguments, do LT and invert the result.
14997 To compute NE, do EQ and invert the result.
14999 Therefore, the code below must handle only EQ and LT. */
15001 if (code == LE_EXPR || code == GT_EXPR)
15003 tree tem = op0;
15004 op0 = op1;
15005 op1 = tem;
15006 code = swap_tree_comparison (code);
15009 /* Note that it is safe to invert for real values here because we
15010 have already handled the one case that it matters. */
15012 invert = 0;
15013 if (code == NE_EXPR || code == GE_EXPR)
15015 invert = 1;
15016 code = invert_tree_comparison (code, false);
15019 /* Compute a result for LT or EQ if args permit;
15020 Otherwise return T. */
15021 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15023 if (code == EQ_EXPR)
15024 result = tree_int_cst_equal (op0, op1);
15025 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15026 result = INT_CST_LT_UNSIGNED (op0, op1);
15027 else
15028 result = INT_CST_LT (op0, op1);
15030 else
15031 return NULL_TREE;
15033 if (invert)
15034 result ^= 1;
15035 return constant_boolean_node (result, type);
15038 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15039 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15040 itself. */
15042 tree
15043 fold_build_cleanup_point_expr (tree type, tree expr)
15045 /* If the expression does not have side effects then we don't have to wrap
15046 it with a cleanup point expression. */
15047 if (!TREE_SIDE_EFFECTS (expr))
15048 return expr;
15050 /* If the expression is a return, check to see if the expression inside the
15051 return has no side effects or the right hand side of the modify expression
15052 inside the return. If either don't have side effects set we don't need to
15053 wrap the expression in a cleanup point expression. Note we don't check the
15054 left hand side of the modify because it should always be a return decl. */
15055 if (TREE_CODE (expr) == RETURN_EXPR)
15057 tree op = TREE_OPERAND (expr, 0);
15058 if (!op || !TREE_SIDE_EFFECTS (op))
15059 return expr;
15060 op = TREE_OPERAND (op, 1);
15061 if (!TREE_SIDE_EFFECTS (op))
15062 return expr;
15065 return build1 (CLEANUP_POINT_EXPR, type, expr);
15068 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15069 of an indirection through OP0, or NULL_TREE if no simplification is
15070 possible. */
15072 tree
15073 fold_indirect_ref_1 (tree type, tree op0)
15075 tree sub = op0;
15076 tree subtype;
15078 STRIP_NOPS (sub);
15079 subtype = TREE_TYPE (sub);
15080 if (!POINTER_TYPE_P (subtype))
15081 return NULL_TREE;
15083 if (TREE_CODE (sub) == ADDR_EXPR)
15085 tree op = TREE_OPERAND (sub, 0);
15086 tree optype = TREE_TYPE (op);
15087 /* *&CONST_DECL -> to the value of the const decl. */
15088 if (TREE_CODE (op) == CONST_DECL)
15089 return DECL_INITIAL (op);
15090 /* *&p => p; make sure to handle *&"str"[cst] here. */
15091 if (type == optype)
15093 tree fop = fold_read_from_constant_string (op);
15094 if (fop)
15095 return fop;
15096 else
15097 return op;
15099 /* *(foo *)&fooarray => fooarray[0] */
15100 else if (TREE_CODE (optype) == ARRAY_TYPE
15101 && type == TREE_TYPE (optype))
15103 tree type_domain = TYPE_DOMAIN (optype);
15104 tree min_val = size_zero_node;
15105 if (type_domain && TYPE_MIN_VALUE (type_domain))
15106 min_val = TYPE_MIN_VALUE (type_domain);
15107 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15109 /* *(foo *)&complexfoo => __real__ complexfoo */
15110 else if (TREE_CODE (optype) == COMPLEX_TYPE
15111 && type == TREE_TYPE (optype))
15112 return fold_build1 (REALPART_EXPR, type, op);
15113 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15114 else if (TREE_CODE (optype) == VECTOR_TYPE
15115 && type == TREE_TYPE (optype))
15117 tree part_width = TYPE_SIZE (type);
15118 tree index = bitsize_int (0);
15119 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15123 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15124 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15125 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15127 tree op00 = TREE_OPERAND (sub, 0);
15128 tree op01 = TREE_OPERAND (sub, 1);
15129 tree op00type;
15131 STRIP_NOPS (op00);
15132 op00type = TREE_TYPE (op00);
15133 if (TREE_CODE (op00) == ADDR_EXPR
15134 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15135 && type == TREE_TYPE (TREE_TYPE (op00type)))
15137 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15138 tree part_width = TYPE_SIZE (type);
15139 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15140 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15141 tree index = bitsize_int (indexi);
15143 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15144 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15145 part_width, index);
15151 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15152 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15153 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15155 tree op00 = TREE_OPERAND (sub, 0);
15156 tree op01 = TREE_OPERAND (sub, 1);
15157 tree op00type;
15159 STRIP_NOPS (op00);
15160 op00type = TREE_TYPE (op00);
15161 if (TREE_CODE (op00) == ADDR_EXPR
15162 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15163 && type == TREE_TYPE (TREE_TYPE (op00type)))
15165 tree size = TYPE_SIZE_UNIT (type);
15166 if (tree_int_cst_equal (size, op01))
15167 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15171 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15172 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15173 && type == TREE_TYPE (TREE_TYPE (subtype)))
15175 tree type_domain;
15176 tree min_val = size_zero_node;
15177 sub = build_fold_indirect_ref (sub);
15178 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15179 if (type_domain && TYPE_MIN_VALUE (type_domain))
15180 min_val = TYPE_MIN_VALUE (type_domain);
15181 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15184 return NULL_TREE;
15187 /* Builds an expression for an indirection through T, simplifying some
15188 cases. */
15190 tree
15191 build_fold_indirect_ref (tree t)
15193 tree type = TREE_TYPE (TREE_TYPE (t));
15194 tree sub = fold_indirect_ref_1 (type, t);
15196 if (sub)
15197 return sub;
15198 else
15199 return build1 (INDIRECT_REF, type, t);
15202 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15204 tree
15205 fold_indirect_ref (tree t)
15207 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15209 if (sub)
15210 return sub;
15211 else
15212 return t;
15215 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15216 whose result is ignored. The type of the returned tree need not be
15217 the same as the original expression. */
15219 tree
15220 fold_ignored_result (tree t)
15222 if (!TREE_SIDE_EFFECTS (t))
15223 return integer_zero_node;
15225 for (;;)
15226 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15228 case tcc_unary:
15229 t = TREE_OPERAND (t, 0);
15230 break;
15232 case tcc_binary:
15233 case tcc_comparison:
15234 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15235 t = TREE_OPERAND (t, 0);
15236 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15237 t = TREE_OPERAND (t, 1);
15238 else
15239 return t;
15240 break;
15242 case tcc_expression:
15243 switch (TREE_CODE (t))
15245 case COMPOUND_EXPR:
15246 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15247 return t;
15248 t = TREE_OPERAND (t, 0);
15249 break;
15251 case COND_EXPR:
15252 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15253 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15254 return t;
15255 t = TREE_OPERAND (t, 0);
15256 break;
15258 default:
15259 return t;
15261 break;
15263 default:
15264 return t;
15268 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15269 This can only be applied to objects of a sizetype. */
15271 tree
15272 round_up (tree value, int divisor)
15274 tree div = NULL_TREE;
15276 gcc_assert (divisor > 0);
15277 if (divisor == 1)
15278 return value;
15280 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15281 have to do anything. Only do this when we are not given a const,
15282 because in that case, this check is more expensive than just
15283 doing it. */
15284 if (TREE_CODE (value) != INTEGER_CST)
15286 div = build_int_cst (TREE_TYPE (value), divisor);
15288 if (multiple_of_p (TREE_TYPE (value), value, div))
15289 return value;
15292 /* If divisor is a power of two, simplify this to bit manipulation. */
15293 if (divisor == (divisor & -divisor))
15295 if (TREE_CODE (value) == INTEGER_CST)
15297 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15298 unsigned HOST_WIDE_INT high;
15299 bool overflow_p;
15301 if ((low & (divisor - 1)) == 0)
15302 return value;
15304 overflow_p = TREE_OVERFLOW (value);
15305 high = TREE_INT_CST_HIGH (value);
15306 low &= ~(divisor - 1);
15307 low += divisor;
15308 if (low == 0)
15310 high++;
15311 if (high == 0)
15312 overflow_p = true;
15315 return force_fit_type_double (TREE_TYPE (value), low, high,
15316 -1, overflow_p);
15318 else
15320 tree t;
15322 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15323 value = size_binop (PLUS_EXPR, value, t);
15324 t = build_int_cst (TREE_TYPE (value), -divisor);
15325 value = size_binop (BIT_AND_EXPR, value, t);
15328 else
15330 if (!div)
15331 div = build_int_cst (TREE_TYPE (value), divisor);
15332 value = size_binop (CEIL_DIV_EXPR, value, div);
15333 value = size_binop (MULT_EXPR, value, div);
15336 return value;
15339 /* Likewise, but round down. */
15341 tree
15342 round_down (tree value, int divisor)
15344 tree div = NULL_TREE;
15346 gcc_assert (divisor > 0);
15347 if (divisor == 1)
15348 return value;
15350 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15351 have to do anything. Only do this when we are not given a const,
15352 because in that case, this check is more expensive than just
15353 doing it. */
15354 if (TREE_CODE (value) != INTEGER_CST)
15356 div = build_int_cst (TREE_TYPE (value), divisor);
15358 if (multiple_of_p (TREE_TYPE (value), value, div))
15359 return value;
15362 /* If divisor is a power of two, simplify this to bit manipulation. */
15363 if (divisor == (divisor & -divisor))
15365 tree t;
15367 t = build_int_cst (TREE_TYPE (value), -divisor);
15368 value = size_binop (BIT_AND_EXPR, value, t);
15370 else
15372 if (!div)
15373 div = build_int_cst (TREE_TYPE (value), divisor);
15374 value = size_binop (FLOOR_DIV_EXPR, value, div);
15375 value = size_binop (MULT_EXPR, value, div);
15378 return value;
15381 /* Returns the pointer to the base of the object addressed by EXP and
15382 extracts the information about the offset of the access, storing it
15383 to PBITPOS and POFFSET. */
15385 static tree
15386 split_address_to_core_and_offset (tree exp,
15387 HOST_WIDE_INT *pbitpos, tree *poffset)
15389 tree core;
15390 enum machine_mode mode;
15391 int unsignedp, volatilep;
15392 HOST_WIDE_INT bitsize;
15394 if (TREE_CODE (exp) == ADDR_EXPR)
15396 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15397 poffset, &mode, &unsignedp, &volatilep,
15398 false);
15399 core = fold_addr_expr (core);
15401 else
15403 core = exp;
15404 *pbitpos = 0;
15405 *poffset = NULL_TREE;
15408 return core;
15411 /* Returns true if addresses of E1 and E2 differ by a constant, false
15412 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15414 bool
15415 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15417 tree core1, core2;
15418 HOST_WIDE_INT bitpos1, bitpos2;
15419 tree toffset1, toffset2, tdiff, type;
15421 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15422 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15424 if (bitpos1 % BITS_PER_UNIT != 0
15425 || bitpos2 % BITS_PER_UNIT != 0
15426 || !operand_equal_p (core1, core2, 0))
15427 return false;
15429 if (toffset1 && toffset2)
15431 type = TREE_TYPE (toffset1);
15432 if (type != TREE_TYPE (toffset2))
15433 toffset2 = fold_convert (type, toffset2);
15435 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15436 if (!cst_and_fits_in_hwi (tdiff))
15437 return false;
15439 *diff = int_cst_value (tdiff);
15441 else if (toffset1 || toffset2)
15443 /* If only one of the offsets is non-constant, the difference cannot
15444 be a constant. */
15445 return false;
15447 else
15448 *diff = 0;
15450 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15451 return true;
15454 /* Simplify the floating point expression EXP when the sign of the
15455 result is not significant. Return NULL_TREE if no simplification
15456 is possible. */
15458 tree
15459 fold_strip_sign_ops (tree exp)
15461 tree arg0, arg1;
15463 switch (TREE_CODE (exp))
15465 case ABS_EXPR:
15466 case NEGATE_EXPR:
15467 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15468 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15470 case MULT_EXPR:
15471 case RDIV_EXPR:
15472 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15473 return NULL_TREE;
15474 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15475 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15476 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15477 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15478 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15479 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15480 break;
15482 case COMPOUND_EXPR:
15483 arg0 = TREE_OPERAND (exp, 0);
15484 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15485 if (arg1)
15486 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15487 break;
15489 case COND_EXPR:
15490 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15491 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15492 if (arg0 || arg1)
15493 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15494 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15495 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15496 break;
15498 case CALL_EXPR:
15500 const enum built_in_function fcode = builtin_mathfn_code (exp);
15501 switch (fcode)
15503 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15504 /* Strip copysign function call, return the 1st argument. */
15505 arg0 = CALL_EXPR_ARG (exp, 0);
15506 arg1 = CALL_EXPR_ARG (exp, 1);
15507 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15509 default:
15510 /* Strip sign ops from the argument of "odd" math functions. */
15511 if (negate_mathfn_p (fcode))
15513 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15514 if (arg0)
15515 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15517 break;
15520 break;
15522 default:
15523 break;
15525 return NULL_TREE;