PR libgomp/38270
[official-gcc.git] / gcc / fold-const.c
blob8dddca19c08874c859ad3cc39e5c2d08aa319f2a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *, bool *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136 tree, tree, tree);
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 #define LOWPART(x) \
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
170 static void
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 static void
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
185 HOST_WIDE_INT *hi)
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
198 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 unsigned HOST_WIDE_INT low0 = l1;
201 HOST_WIDE_INT high0 = h1;
202 unsigned int prec;
203 int sign_extended_type;
205 if (POINTER_TYPE_P (type)
206 || TREE_CODE (type) == OFFSET_TYPE)
207 prec = POINTER_SIZE;
208 else
209 prec = TYPE_PRECISION (type);
211 /* Size types *are* sign extended. */
212 sign_extended_type = (!TYPE_UNSIGNED (type)
213 || (TREE_CODE (type) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
221 else
223 h1 = 0;
224 if (prec < HOST_BITS_PER_WIDE_INT)
225 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type)
230 /* No sign extension */;
231 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
232 /* Correct width already. */;
233 else if (prec > HOST_BITS_PER_WIDE_INT)
235 /* Sign extend top half? */
236 if (h1 & ((unsigned HOST_WIDE_INT)1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 else if (prec == HOST_BITS_PER_WIDE_INT)
242 if ((HOST_WIDE_INT)l1 < 0)
243 h1 = -1;
245 else
247 /* Sign extend bottom half? */
248 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250 h1 = -1;
251 l1 |= (HOST_WIDE_INT)(-1) << prec;
255 *lv = l1;
256 *hv = h1;
258 /* If the value didn't fit, signal overflow. */
259 return l1 != low0 || h1 != high0;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
277 tree
278 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
279 HOST_WIDE_INT high, int overflowable,
280 bool overflowed)
282 int sign_extended_type;
283 bool overflow;
285 /* Size types *are* sign extended. */
286 sign_extended_type = (!TYPE_UNSIGNED (type)
287 || (TREE_CODE (type) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type)));
290 overflow = fit_double_type (low, high, &low, &high, type);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed || overflow)
295 if (overflowed
296 || overflowable < 0
297 || (overflowable > 0 && sign_extended_type))
299 tree t = make_node (INTEGER_CST);
300 TREE_INT_CST_LOW (t) = low;
301 TREE_INT_CST_HIGH (t) = high;
302 TREE_TYPE (t) = type;
303 TREE_OVERFLOW (t) = 1;
304 return t;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type, low, high);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
322 bool unsigned_p)
324 unsigned HOST_WIDE_INT l;
325 HOST_WIDE_INT h;
327 l = l1 + l2;
328 h = h1 + h2 + (l < l1);
330 *lv = l;
331 *hv = h;
333 if (unsigned_p)
334 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
335 else
336 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 if (l1 == 0)
350 *lv = 0;
351 *hv = - h1;
352 return (*hv & h1) < 0;
354 else
356 *lv = -l1;
357 *hv = ~h1;
358 return 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
372 bool unsigned_p)
374 HOST_WIDE_INT arg1[4];
375 HOST_WIDE_INT arg2[4];
376 HOST_WIDE_INT prod[4 * 2];
377 unsigned HOST_WIDE_INT carry;
378 int i, j, k;
379 unsigned HOST_WIDE_INT toplow, neglow;
380 HOST_WIDE_INT tophigh, neghigh;
382 encode (arg1, l1, h1);
383 encode (arg2, l2, h2);
385 memset (prod, 0, sizeof prod);
387 for (i = 0; i < 4; i++)
389 carry = 0;
390 for (j = 0; j < 4; j++)
392 k = i + j;
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry += arg1[i] * arg2[j];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
396 carry += prod[k];
397 prod[k] = LOWPART (carry);
398 carry = HIGHPART (carry);
400 prod[i + 4] = carry;
403 decode (prod, lv, hv);
404 decode (prod + 4, &toplow, &tophigh);
406 /* Unsigned overflow is immediate. */
407 if (unsigned_p)
408 return (toplow | tophigh) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
412 if (h1 < 0)
414 neg_double (l2, h2, &neglow, &neghigh);
415 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
417 if (h2 < 0)
419 neg_double (l1, h1, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431 void
432 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
433 HOST_WIDE_INT count, unsigned int prec,
434 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 unsigned HOST_WIDE_INT signmask;
438 if (count < 0)
440 rshift_double (l1, h1, -count, prec, lv, hv, arith);
441 return;
444 if (SHIFT_COUNT_TRUNCATED)
445 count %= prec;
447 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
451 *hv = 0;
452 *lv = 0;
454 else if (count >= HOST_BITS_PER_WIDE_INT)
456 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
457 *lv = 0;
459 else
461 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
462 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
463 *lv = l1 << count;
466 /* Sign extend all bits that are beyond the precision. */
468 signmask = -((prec > HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT) *hv
470 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
471 : (*lv >> (prec - 1))) & 1);
473 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
478 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
480 else
482 *hv = signmask;
483 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
484 *lv |= signmask << prec;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
493 void
494 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
495 HOST_WIDE_INT count, unsigned int prec,
496 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
497 int arith)
499 unsigned HOST_WIDE_INT signmask;
501 signmask = (arith
502 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
503 : 0);
505 if (SHIFT_COUNT_TRUNCATED)
506 count %= prec;
508 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
512 *hv = 0;
513 *lv = 0;
515 else if (count >= HOST_BITS_PER_WIDE_INT)
517 *hv = 0;
518 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
520 else
522 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
523 *lv = ((l1 >> count)
524 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count >= (HOST_WIDE_INT)prec)
531 *hv = signmask;
532 *lv = signmask;
534 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
539 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
541 else
543 *hv = signmask;
544 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
545 *lv |= signmask << (prec - count);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
554 void
555 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
556 HOST_WIDE_INT count, unsigned int prec,
557 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 unsigned HOST_WIDE_INT s1l, s2l;
560 HOST_WIDE_INT s1h, s2h;
562 count %= prec;
563 if (count < 0)
564 count += prec;
566 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
567 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
568 *lv = s1l | s2l;
569 *hv = s1h | s2h;
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
576 void
577 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
578 HOST_WIDE_INT count, unsigned int prec,
579 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 unsigned HOST_WIDE_INT s1l, s2l;
582 HOST_WIDE_INT s1h, s2h;
584 count %= prec;
585 if (count < 0)
586 count += prec;
588 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
589 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
590 *lv = s1l | s2l;
591 *hv = s1h | s2h;
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
598 or EXACT_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code, int uns,
605 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig,
607 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig,
609 unsigned HOST_WIDE_INT *lquo,
610 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
611 HOST_WIDE_INT *hrem)
613 int quo_neg = 0;
614 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den[4], quo[4];
616 int i, j;
617 unsigned HOST_WIDE_INT work;
618 unsigned HOST_WIDE_INT carry = 0;
619 unsigned HOST_WIDE_INT lnum = lnum_orig;
620 HOST_WIDE_INT hnum = hnum_orig;
621 unsigned HOST_WIDE_INT lden = lden_orig;
622 HOST_WIDE_INT hden = hden_orig;
623 int overflow = 0;
625 if (hden == 0 && lden == 0)
626 overflow = 1, lden = 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
629 if (!uns)
631 if (hnum < 0)
633 quo_neg = ~ quo_neg;
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum, hnum, &lnum, &hnum)
636 && ((HOST_WIDE_INT) lden & hden) == -1)
637 overflow = 1;
639 if (hden < 0)
641 quo_neg = ~ quo_neg;
642 neg_double (lden, hden, &lden, &hden);
646 if (hnum == 0 && hden == 0)
647 { /* single precision */
648 *hquo = *hrem = 0;
649 /* This unsigned division rounds toward zero. */
650 *lquo = lnum / lden;
651 goto finish_up;
654 if (hnum == 0)
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
657 *hquo = *lquo = 0;
658 *hrem = hnum;
659 *lrem = lnum;
660 goto finish_up;
663 memset (quo, 0, sizeof quo);
665 memset (num, 0, sizeof num); /* to zero 9th element */
666 memset (den, 0, sizeof den);
668 encode (num, lnum, hnum);
669 encode (den, lden, hden);
671 /* Special code for when the divisor < BASE. */
672 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 /* hnum != 0 already checked. */
675 for (i = 4 - 1; i >= 0; i--)
677 work = num[i] + carry * BASE;
678 quo[i] = work / lden;
679 carry = work % lden;
682 else
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig, den_hi_sig;
687 unsigned HOST_WIDE_INT quo_est, scale;
689 /* Find the highest nonzero divisor digit. */
690 for (i = 4 - 1;; i--)
691 if (den[i] != 0)
693 den_hi_sig = i;
694 break;
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale = BASE / (den[den_hi_sig] + 1);
701 if (scale > 1)
702 { /* scale divisor and dividend */
703 carry = 0;
704 for (i = 0; i <= 4 - 1; i++)
706 work = (num[i] * scale) + carry;
707 num[i] = LOWPART (work);
708 carry = HIGHPART (work);
711 num[4] = carry;
712 carry = 0;
713 for (i = 0; i <= 4 - 1; i++)
715 work = (den[i] * scale) + carry;
716 den[i] = LOWPART (work);
717 carry = HIGHPART (work);
718 if (den[i] != 0) den_hi_sig = i;
722 num_hi_sig = 4;
724 /* Main loop */
725 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp;
732 num_hi_sig = i + den_hi_sig + 1;
733 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
734 if (num[num_hi_sig] != den[den_hi_sig])
735 quo_est = work / den[den_hi_sig];
736 else
737 quo_est = BASE - 1;
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp = work - quo_est * den[den_hi_sig];
741 if (tmp < BASE
742 && (den[den_hi_sig - 1] * quo_est
743 > (tmp * BASE + num[num_hi_sig - 2])))
744 quo_est--;
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
750 carry = 0;
751 for (j = 0; j <= den_hi_sig; j++)
753 work = quo_est * den[j] + carry;
754 carry = HIGHPART (work);
755 work = num[i + j] - LOWPART (work);
756 num[i + j] = LOWPART (work);
757 carry += HIGHPART (work) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
764 quo_est--;
765 carry = 0; /* add divisor back in */
766 for (j = 0; j <= den_hi_sig; j++)
768 work = num[i + j] + den[j] + carry;
769 carry = HIGHPART (work);
770 num[i + j] = LOWPART (work);
773 num [num_hi_sig] += carry;
776 /* Store the quotient digit. */
777 quo[i] = quo_est;
781 decode (quo, lquo, hquo);
783 finish_up:
784 /* If result is negative, make it so. */
785 if (quo_neg)
786 neg_double (*lquo, *hquo, lquo, hquo);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
790 neg_double (*lrem, *hrem, lrem, hrem);
791 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
793 switch (code)
795 case TRUNC_DIV_EXPR:
796 case TRUNC_MOD_EXPR: /* round toward zero */
797 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
798 return overflow;
800 case FLOOR_DIV_EXPR:
801 case FLOOR_MOD_EXPR: /* round toward negative infinity */
802 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
804 /* quo = quo - 1; */
805 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
806 lquo, hquo);
808 else
809 return overflow;
810 break;
812 case CEIL_DIV_EXPR:
813 case CEIL_MOD_EXPR: /* round toward positive infinity */
814 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
817 lquo, hquo);
819 else
820 return overflow;
821 break;
823 case ROUND_DIV_EXPR:
824 case ROUND_MOD_EXPR: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem = *lrem;
827 HOST_WIDE_INT habs_rem = *hrem;
828 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
829 HOST_WIDE_INT habs_den = hden, htwice;
831 /* Get absolute values. */
832 if (*hrem < 0)
833 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
834 if (hden < 0)
835 neg_double (lden, hden, &labs_den, &habs_den);
837 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
838 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
839 labs_rem, habs_rem, &ltwice, &htwice);
841 if (((unsigned HOST_WIDE_INT) habs_den
842 < (unsigned HOST_WIDE_INT) htwice)
843 || (((unsigned HOST_WIDE_INT) habs_den
844 == (unsigned HOST_WIDE_INT) htwice)
845 && (labs_den <= ltwice)))
847 if (*hquo < 0)
848 /* quo = quo - 1; */
849 add_double (*lquo, *hquo,
850 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
851 else
852 /* quo = quo + 1; */
853 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
854 lquo, hquo);
856 else
857 return overflow;
859 break;
861 default:
862 gcc_unreachable ();
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
867 neg_double (*lrem, *hrem, lrem, hrem);
868 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
869 return overflow;
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
876 static tree
877 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 unsigned HOST_WIDE_INT int1l, int2l;
880 HOST_WIDE_INT int1h, int2h;
881 unsigned HOST_WIDE_INT quol, reml;
882 HOST_WIDE_INT quoh, remh;
883 tree type = TREE_TYPE (arg1);
884 int uns = TYPE_UNSIGNED (type);
886 int1l = TREE_INT_CST_LOW (arg1);
887 int1h = TREE_INT_CST_HIGH (arg1);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type))
892 uns = false;
893 type = signed_type_for (type);
894 fit_double_type (int1l, int1h, &int1l, &int1h,
895 type);
897 else
898 fit_double_type (int1l, int1h, &int1l, &int1h, type);
899 int2l = TREE_INT_CST_LOW (arg2);
900 int2h = TREE_INT_CST_HIGH (arg2);
902 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
903 &quol, &quoh, &reml, &remh);
904 if (remh != 0 || reml != 0)
905 return NULL_TREE;
907 return build_int_cst_wide (type, quol, quoh);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
917 used. */
919 static int fold_deferring_overflow_warnings;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
936 void
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
949 deferred code. */
951 void
952 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
954 const char *warnmsg;
955 location_t locus;
957 gcc_assert (fold_deferring_overflow_warnings > 0);
958 --fold_deferring_overflow_warnings;
959 if (fold_deferring_overflow_warnings > 0)
961 if (fold_deferred_overflow_warning != NULL
962 && code != 0
963 && code < (int) fold_deferred_overflow_code)
964 fold_deferred_overflow_code = code;
965 return;
968 warnmsg = fold_deferred_overflow_warning;
969 fold_deferred_overflow_warning = NULL;
971 if (!issue || warnmsg == NULL)
972 return;
974 if (gimple_no_warning_p (stmt))
975 return;
977 /* Use the smallest code level when deciding to issue the
978 warning. */
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
983 return;
985 if (stmt == NULL)
986 locus = input_location;
987 else
988 locus = gimple_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
993 warnings. */
995 void
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL, 0);
1001 /* Whether we are deferring overflow warnings. */
1003 bool
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1012 static void
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1031 static bool
1032 negate_mathfn_p (enum built_in_function code)
1034 switch (code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1059 return true;
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1067 default:
1068 break;
1070 return false;
1073 /* Check whether we may negate an integer constant T without causing
1074 overflow. */
1076 bool
1077 may_negate_without_overflow_p (const_tree t)
1079 unsigned HOST_WIDE_INT val;
1080 unsigned int prec;
1081 tree type;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1087 return false;
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1093 return true;
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1097 else
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1107 static bool
1108 negate_expr_p (tree t)
1110 tree type;
1112 if (t == 0)
1113 return false;
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1120 case INTEGER_CST:
1121 if (TYPE_OVERFLOW_WRAPS (type))
1122 return true;
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1126 case BIT_NOT_EXPR:
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1130 case FIXED_CST:
1131 case REAL_CST:
1132 case NEGATE_EXPR:
1133 return true;
1135 case COMPLEX_CST:
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1139 case COMPLEX_EXPR:
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1143 case CONJ_EXPR:
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1146 case PLUS_EXPR:
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1149 return false;
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1154 return true;
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1158 case MINUS_EXPR:
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1165 case MULT_EXPR:
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1167 break;
1169 /* Fall through. */
1171 case RDIV_EXPR:
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1175 break;
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1180 case CEIL_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1186 overflow. */
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1189 break;
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1193 case NOP_EXPR:
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1198 if (tem != t)
1199 return negate_expr_p (tem);
1201 break;
1203 case CALL_EXPR:
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1207 break;
1209 case RSHIFT_EXPR:
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1217 return true;
1219 break;
1221 default:
1222 break;
1224 return false;
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1230 returned. */
1232 static tree
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1236 tree tem;
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1241 case BIT_NOT_EXPR:
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1245 break;
1247 case INTEGER_CST:
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1251 return tem;
1252 break;
1254 case REAL_CST:
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1258 return tem;
1259 break;
1261 case FIXED_CST:
1262 tem = fold_negate_const (t, type);
1263 return tem;
1265 case COMPLEX_CST:
1267 tree rpart = negate_expr (TREE_REALPART (t));
1268 tree ipart = negate_expr (TREE_IMAGPART (t));
1270 if ((TREE_CODE (rpart) == REAL_CST
1271 && TREE_CODE (ipart) == REAL_CST)
1272 || (TREE_CODE (rpart) == INTEGER_CST
1273 && TREE_CODE (ipart) == INTEGER_CST))
1274 return build_complex (type, rpart, ipart);
1276 break;
1278 case COMPLEX_EXPR:
1279 if (negate_expr_p (t))
1280 return fold_build2 (COMPLEX_EXPR, type,
1281 fold_negate_expr (TREE_OPERAND (t, 0)),
1282 fold_negate_expr (TREE_OPERAND (t, 1)));
1283 break;
1285 case CONJ_EXPR:
1286 if (negate_expr_p (t))
1287 return fold_build1 (CONJ_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)));
1289 break;
1291 case NEGATE_EXPR:
1292 return TREE_OPERAND (t, 0);
1294 case PLUS_EXPR:
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t, 1))
1300 && reorder_operands_p (TREE_OPERAND (t, 0),
1301 TREE_OPERAND (t, 1)))
1303 tem = negate_expr (TREE_OPERAND (t, 1));
1304 return fold_build2 (MINUS_EXPR, type,
1305 tem, TREE_OPERAND (t, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 tem = negate_expr (TREE_OPERAND (t, 0));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 1));
1316 break;
1318 case MINUS_EXPR:
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1322 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1323 return fold_build2 (MINUS_EXPR, type,
1324 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1325 break;
1327 case MULT_EXPR:
1328 if (TYPE_UNSIGNED (type))
1329 break;
1331 /* Fall through. */
1333 case RDIV_EXPR:
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 tem = TREE_OPERAND (t, 1);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 TREE_OPERAND (t, 0), negate_expr (tem));
1340 tem = TREE_OPERAND (t, 0);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 negate_expr (tem), TREE_OPERAND (t, 1));
1345 break;
1347 case TRUNC_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 case FLOOR_DIV_EXPR:
1350 case CEIL_DIV_EXPR:
1351 case EXACT_DIV_EXPR:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1356 overflow. */
1357 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 const char * const warnmsg = G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem = TREE_OPERAND (t, 1);
1362 if (negate_expr_p (tem))
1364 if (INTEGRAL_TYPE_P (type)
1365 && (TREE_CODE (tem) != INTEGER_CST
1366 || integer_onep (tem)))
1367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1374 if (INTEGRAL_TYPE_P (type)
1375 && (TREE_CODE (tem) != INTEGER_CST
1376 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1377 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1378 return fold_build2 (TREE_CODE (t), type,
1379 negate_expr (tem), TREE_OPERAND (t, 1));
1382 break;
1384 case NOP_EXPR:
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type) == REAL_TYPE)
1388 tem = strip_float_extensions (t);
1389 if (tem != t && negate_expr_p (tem))
1390 return fold_convert (type, negate_expr (tem));
1392 break;
1394 case CALL_EXPR:
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t))
1397 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1399 tree fndecl, arg;
1401 fndecl = get_callee_fndecl (t);
1402 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1403 return build_call_expr (fndecl, 1, arg);
1405 break;
1407 case RSHIFT_EXPR:
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 tree op1 = TREE_OPERAND (t, 1);
1412 if (TREE_INT_CST_HIGH (op1) == 0
1413 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1414 == TREE_INT_CST_LOW (op1))
1416 tree ntype = TYPE_UNSIGNED (type)
1417 ? signed_type_for (type)
1418 : unsigned_type_for (type);
1419 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1420 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1421 return fold_convert (type, temp);
1424 break;
1426 default:
1427 break;
1430 return NULL_TREE;
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1437 static tree
1438 negate_expr (tree t)
1440 tree type, tem;
1442 if (t == NULL_TREE)
1443 return NULL_TREE;
1445 type = TREE_TYPE (t);
1446 STRIP_SIGN_NOPS (t);
1448 tem = fold_negate_expr (t);
1449 if (!tem)
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 return fold_convert (type, tem);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1474 static tree
1475 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1476 tree *minus_litp, int negate_p)
1478 tree var = 0;
1480 *conp = 0;
1481 *litp = 0;
1482 *minus_litp = 0;
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in);
1487 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1488 || TREE_CODE (in) == FIXED_CST)
1489 *litp = in;
1490 else if (TREE_CODE (in) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1498 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 tree op0 = TREE_OPERAND (in, 0);
1501 tree op1 = TREE_OPERAND (in, 1);
1502 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1503 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1507 || TREE_CODE (op0) == FIXED_CST)
1508 *litp = op0, op0 = 0;
1509 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1510 || TREE_CODE (op1) == FIXED_CST)
1511 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513 if (op0 != 0 && TREE_CONSTANT (op0))
1514 *conp = op0, op0 = 0;
1515 else if (op1 != 0 && TREE_CONSTANT (op1))
1516 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0 != 0 && op1 != 0)
1521 var = in;
1522 else if (op0 != 0)
1523 var = op0;
1524 else
1525 var = op1, neg_var_p = neg1_p;
1527 /* Now do any needed negations. */
1528 if (neg_litp_p)
1529 *minus_litp = *litp, *litp = 0;
1530 if (neg_conp_p)
1531 *conp = negate_expr (*conp);
1532 if (neg_var_p)
1533 var = negate_expr (var);
1535 else if (TREE_CONSTANT (in))
1536 *conp = in;
1537 else
1538 var = in;
1540 if (negate_p)
1542 if (*litp)
1543 *minus_litp = *litp, *litp = 0;
1544 else if (*minus_litp)
1545 *litp = *minus_litp, *minus_litp = 0;
1546 *conp = negate_expr (*conp);
1547 var = negate_expr (var);
1550 return var;
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1557 static tree
1558 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1560 if (t1 == 0)
1561 return t2;
1562 else if (t2 == 0)
1563 return t1;
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1569 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 if (code == PLUS_EXPR)
1573 if (TREE_CODE (t1) == NEGATE_EXPR)
1574 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1575 fold_convert (type, TREE_OPERAND (t1, 0)));
1576 else if (TREE_CODE (t2) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1578 fold_convert (type, TREE_OPERAND (t2, 0)));
1579 else if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1582 else if (code == MINUS_EXPR)
1584 if (integer_zerop (t2))
1585 return fold_convert (type, t1);
1588 return build2 (code, type, fold_convert (type, t1),
1589 fold_convert (type, t2));
1592 return fold_build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1599 static bool
1600 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1602 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1603 return false;
1604 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1605 return false;
1607 switch (code)
1609 case LSHIFT_EXPR:
1610 case RSHIFT_EXPR:
1611 case LROTATE_EXPR:
1612 case RROTATE_EXPR:
1613 return true;
1615 default:
1616 break;
1619 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1620 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1621 && TYPE_MODE (type1) == TYPE_MODE (type2);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1631 tree
1632 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 unsigned HOST_WIDE_INT int1l, int2l;
1635 HOST_WIDE_INT int1h, int2h;
1636 unsigned HOST_WIDE_INT low;
1637 HOST_WIDE_INT hi;
1638 unsigned HOST_WIDE_INT garbagel;
1639 HOST_WIDE_INT garbageh;
1640 tree t;
1641 tree type = TREE_TYPE (arg1);
1642 int uns = TYPE_UNSIGNED (type);
1643 int is_sizetype
1644 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1645 int overflow = 0;
1647 int1l = TREE_INT_CST_LOW (arg1);
1648 int1h = TREE_INT_CST_HIGH (arg1);
1649 int2l = TREE_INT_CST_LOW (arg2);
1650 int2h = TREE_INT_CST_HIGH (arg2);
1652 switch (code)
1654 case BIT_IOR_EXPR:
1655 low = int1l | int2l, hi = int1h | int2h;
1656 break;
1658 case BIT_XOR_EXPR:
1659 low = int1l ^ int2l, hi = int1h ^ int2h;
1660 break;
1662 case BIT_AND_EXPR:
1663 low = int1l & int2l, hi = int1h & int2h;
1664 break;
1666 case RSHIFT_EXPR:
1667 int2l = -int2l;
1668 case LSHIFT_EXPR:
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1673 &low, &hi, !uns);
1674 break;
1676 case RROTATE_EXPR:
1677 int2l = - int2l;
1678 case LROTATE_EXPR:
1679 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 &low, &hi);
1681 break;
1683 case PLUS_EXPR:
1684 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1685 break;
1687 case MINUS_EXPR:
1688 neg_double (int2l, int2h, &low, &hi);
1689 add_double (int1l, int1h, low, hi, &low, &hi);
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1691 break;
1693 case MULT_EXPR:
1694 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1695 break;
1697 case TRUNC_DIV_EXPR:
1698 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1699 case EXACT_DIV_EXPR:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1702 && !TREE_OVERFLOW (arg1)
1703 && !TREE_OVERFLOW (arg2)
1704 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 if (code == CEIL_DIV_EXPR)
1707 int1l += int2l - 1;
1709 low = int1l / int2l, hi = 0;
1710 break;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR:
1716 if (int2h == 0 && int2l == 0)
1717 return NULL_TREE;
1718 if (int2h == 0 && int2l == 1)
1720 low = int1l, hi = int1h;
1721 break;
1723 if (int1l == int2l && int1h == int2h
1724 && ! (int1l == 0 && int1h == 0))
1726 low = 1, hi = 0;
1727 break;
1729 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1730 &low, &hi, &garbagel, &garbageh);
1731 break;
1733 case TRUNC_MOD_EXPR:
1734 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1737 && !TREE_OVERFLOW (arg1)
1738 && !TREE_OVERFLOW (arg2)
1739 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 if (code == CEIL_MOD_EXPR)
1742 int1l += int2l - 1;
1743 low = int1l % int2l, hi = 0;
1744 break;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR:
1750 if (int2h == 0 && int2l == 0)
1751 return NULL_TREE;
1752 overflow = div_and_round_double (code, uns,
1753 int1l, int1h, int2l, int2h,
1754 &garbagel, &garbageh, &low, &hi);
1755 break;
1757 case MIN_EXPR:
1758 case MAX_EXPR:
1759 if (uns)
1760 low = (((unsigned HOST_WIDE_INT) int1h
1761 < (unsigned HOST_WIDE_INT) int2h)
1762 || (((unsigned HOST_WIDE_INT) int1h
1763 == (unsigned HOST_WIDE_INT) int2h)
1764 && int1l < int2l));
1765 else
1766 low = (int1h < int2h
1767 || (int1h == int2h && int1l < int2l));
1769 if (low == (code == MIN_EXPR))
1770 low = int1l, hi = int1h;
1771 else
1772 low = int2l, hi = int2h;
1773 break;
1775 default:
1776 return NULL_TREE;
1779 if (notrunc)
1781 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1787 t = copy_node (t);
1788 TREE_OVERFLOW (t) = 1;
1791 else
1792 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1793 ((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1796 return t;
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1806 static tree
1807 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 /* Sanity check for the recursive cases. */
1810 if (!arg1 || !arg2)
1811 return NULL_TREE;
1813 STRIP_NOPS (arg1);
1814 STRIP_NOPS (arg2);
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return int_const_binop (code, arg1, arg2, notrunc);
1819 if (TREE_CODE (arg1) == REAL_CST)
1821 enum machine_mode mode;
1822 REAL_VALUE_TYPE d1;
1823 REAL_VALUE_TYPE d2;
1824 REAL_VALUE_TYPE value;
1825 REAL_VALUE_TYPE result;
1826 bool inexact;
1827 tree t, type;
1829 /* The following codes are handled by real_arithmetic. */
1830 switch (code)
1832 case PLUS_EXPR:
1833 case MINUS_EXPR:
1834 case MULT_EXPR:
1835 case RDIV_EXPR:
1836 case MIN_EXPR:
1837 case MAX_EXPR:
1838 break;
1840 default:
1841 return NULL_TREE;
1844 d1 = TREE_REAL_CST (arg1);
1845 d2 = TREE_REAL_CST (arg2);
1847 type = TREE_TYPE (arg1);
1848 mode = TYPE_MODE (type);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode)
1853 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1854 return NULL_TREE;
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code == RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2, dconst0)
1860 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1861 return NULL_TREE;
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1))
1866 return arg1;
1867 else if (REAL_VALUE_ISNAN (d2))
1868 return arg2;
1870 inexact = real_arithmetic (&value, code, &d1, &d2);
1871 real_convert (&result, mode, &value);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode)
1877 && REAL_VALUE_ISINF (result)
1878 && !REAL_VALUE_ISINF (d1)
1879 && !REAL_VALUE_ISINF (d2))
1880 return NULL_TREE;
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1888 && (inexact || !real_identical (&result, &value)))
1889 return NULL_TREE;
1891 t = build_real (type, result);
1893 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1894 return t;
1897 if (TREE_CODE (arg1) == FIXED_CST)
1899 FIXED_VALUE_TYPE f1;
1900 FIXED_VALUE_TYPE f2;
1901 FIXED_VALUE_TYPE result;
1902 tree t, type;
1903 int sat_p;
1904 bool overflow_p;
1906 /* The following codes are handled by fixed_arithmetic. */
1907 switch (code)
1909 case PLUS_EXPR:
1910 case MINUS_EXPR:
1911 case MULT_EXPR:
1912 case TRUNC_DIV_EXPR:
1913 f2 = TREE_FIXED_CST (arg2);
1914 break;
1916 case LSHIFT_EXPR:
1917 case RSHIFT_EXPR:
1918 f2.data.high = TREE_INT_CST_HIGH (arg2);
1919 f2.data.low = TREE_INT_CST_LOW (arg2);
1920 f2.mode = SImode;
1921 break;
1923 default:
1924 return NULL_TREE;
1927 f1 = TREE_FIXED_CST (arg1);
1928 type = TREE_TYPE (arg1);
1929 sat_p = TYPE_SATURATING (type);
1930 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1931 t = build_fixed (type, result);
1932 /* Propagate overflow flags. */
1933 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1935 TREE_OVERFLOW (t) = 1;
1936 TREE_CONSTANT_OVERFLOW (t) = 1;
1938 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1939 TREE_CONSTANT_OVERFLOW (t) = 1;
1940 return t;
1943 if (TREE_CODE (arg1) == COMPLEX_CST)
1945 tree type = TREE_TYPE (arg1);
1946 tree r1 = TREE_REALPART (arg1);
1947 tree i1 = TREE_IMAGPART (arg1);
1948 tree r2 = TREE_REALPART (arg2);
1949 tree i2 = TREE_IMAGPART (arg2);
1950 tree real, imag;
1952 switch (code)
1954 case PLUS_EXPR:
1955 case MINUS_EXPR:
1956 real = const_binop (code, r1, r2, notrunc);
1957 imag = const_binop (code, i1, i2, notrunc);
1958 break;
1960 case MULT_EXPR:
1961 real = const_binop (MINUS_EXPR,
1962 const_binop (MULT_EXPR, r1, r2, notrunc),
1963 const_binop (MULT_EXPR, i1, i2, notrunc),
1964 notrunc);
1965 imag = const_binop (PLUS_EXPR,
1966 const_binop (MULT_EXPR, r1, i2, notrunc),
1967 const_binop (MULT_EXPR, i1, r2, notrunc),
1968 notrunc);
1969 break;
1971 case RDIV_EXPR:
1973 tree magsquared
1974 = const_binop (PLUS_EXPR,
1975 const_binop (MULT_EXPR, r2, r2, notrunc),
1976 const_binop (MULT_EXPR, i2, i2, notrunc),
1977 notrunc);
1978 tree t1
1979 = const_binop (PLUS_EXPR,
1980 const_binop (MULT_EXPR, r1, r2, notrunc),
1981 const_binop (MULT_EXPR, i1, i2, notrunc),
1982 notrunc);
1983 tree t2
1984 = const_binop (MINUS_EXPR,
1985 const_binop (MULT_EXPR, i1, r2, notrunc),
1986 const_binop (MULT_EXPR, r1, i2, notrunc),
1987 notrunc);
1989 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1990 code = TRUNC_DIV_EXPR;
1992 real = const_binop (code, t1, magsquared, notrunc);
1993 imag = const_binop (code, t2, magsquared, notrunc);
1995 break;
1997 default:
1998 return NULL_TREE;
2001 if (real && imag)
2002 return build_complex (type, real, imag);
2005 return NULL_TREE;
2008 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2009 indicates which particular sizetype to create. */
2011 tree
2012 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2014 return build_int_cst (sizetype_tab[(int) kind], number);
2017 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2018 is a tree code. The type of the result is taken from the operands.
2019 Both must be equivalent integer types, ala int_binop_types_match_p.
2020 If the operands are constant, so is the result. */
2022 tree
2023 size_binop (enum tree_code code, tree arg0, tree arg1)
2025 tree type = TREE_TYPE (arg0);
2027 if (arg0 == error_mark_node || arg1 == error_mark_node)
2028 return error_mark_node;
2030 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2031 TREE_TYPE (arg1)));
2033 /* Handle the special case of two integer constants faster. */
2034 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2036 /* And some specific cases even faster than that. */
2037 if (code == PLUS_EXPR)
2039 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2040 return arg1;
2041 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2042 return arg0;
2044 else if (code == MINUS_EXPR)
2046 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2047 return arg0;
2049 else if (code == MULT_EXPR)
2051 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2052 return arg1;
2055 /* Handle general case of two integer constants. */
2056 return int_const_binop (code, arg0, arg1, 0);
2059 return fold_build2 (code, type, arg0, arg1);
2062 /* Given two values, either both of sizetype or both of bitsizetype,
2063 compute the difference between the two values. Return the value
2064 in signed type corresponding to the type of the operands. */
2066 tree
2067 size_diffop (tree arg0, tree arg1)
2069 tree type = TREE_TYPE (arg0);
2070 tree ctype;
2072 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2073 TREE_TYPE (arg1)));
2075 /* If the type is already signed, just do the simple thing. */
2076 if (!TYPE_UNSIGNED (type))
2077 return size_binop (MINUS_EXPR, arg0, arg1);
2079 if (type == sizetype)
2080 ctype = ssizetype;
2081 else if (type == bitsizetype)
2082 ctype = sbitsizetype;
2083 else
2084 ctype = signed_type_for (type);
2086 /* If either operand is not a constant, do the conversions to the signed
2087 type and subtract. The hardware will do the right thing with any
2088 overflow in the subtraction. */
2089 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2090 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2091 fold_convert (ctype, arg1));
2093 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2094 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2095 overflow) and negate (which can't either). Special-case a result
2096 of zero while we're here. */
2097 if (tree_int_cst_equal (arg0, arg1))
2098 return build_int_cst (ctype, 0);
2099 else if (tree_int_cst_lt (arg1, arg0))
2100 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2101 else
2102 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2103 fold_convert (ctype, size_binop (MINUS_EXPR,
2104 arg1, arg0)));
2107 /* A subroutine of fold_convert_const handling conversions of an
2108 INTEGER_CST to another integer type. */
2110 static tree
2111 fold_convert_const_int_from_int (tree type, const_tree arg1)
2113 tree t;
2115 /* Given an integer constant, make new constant with new type,
2116 appropriately sign-extended or truncated. */
2117 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2118 TREE_INT_CST_HIGH (arg1),
2119 /* Don't set the overflow when
2120 converting from a pointer, */
2121 !POINTER_TYPE_P (TREE_TYPE (arg1))
2122 /* or to a sizetype with same signedness
2123 and the precision is unchanged.
2124 ??? sizetype is always sign-extended,
2125 but its signedness depends on the
2126 frontend. Thus we see spurious overflows
2127 here if we do not check this. */
2128 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2129 == TYPE_PRECISION (type))
2130 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2131 == TYPE_UNSIGNED (type))
2132 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2133 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2134 || (TREE_CODE (type) == INTEGER_TYPE
2135 && TYPE_IS_SIZETYPE (type)))),
2136 (TREE_INT_CST_HIGH (arg1) < 0
2137 && (TYPE_UNSIGNED (type)
2138 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2139 | TREE_OVERFLOW (arg1));
2141 return t;
2144 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2145 to an integer type. */
2147 static tree
2148 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2150 int overflow = 0;
2151 tree t;
2153 /* The following code implements the floating point to integer
2154 conversion rules required by the Java Language Specification,
2155 that IEEE NaNs are mapped to zero and values that overflow
2156 the target precision saturate, i.e. values greater than
2157 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2158 are mapped to INT_MIN. These semantics are allowed by the
2159 C and C++ standards that simply state that the behavior of
2160 FP-to-integer conversion is unspecified upon overflow. */
2162 HOST_WIDE_INT high, low;
2163 REAL_VALUE_TYPE r;
2164 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2166 switch (code)
2168 case FIX_TRUNC_EXPR:
2169 real_trunc (&r, VOIDmode, &x);
2170 break;
2172 default:
2173 gcc_unreachable ();
2176 /* If R is NaN, return zero and show we have an overflow. */
2177 if (REAL_VALUE_ISNAN (r))
2179 overflow = 1;
2180 high = 0;
2181 low = 0;
2184 /* See if R is less than the lower bound or greater than the
2185 upper bound. */
2187 if (! overflow)
2189 tree lt = TYPE_MIN_VALUE (type);
2190 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2191 if (REAL_VALUES_LESS (r, l))
2193 overflow = 1;
2194 high = TREE_INT_CST_HIGH (lt);
2195 low = TREE_INT_CST_LOW (lt);
2199 if (! overflow)
2201 tree ut = TYPE_MAX_VALUE (type);
2202 if (ut)
2204 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2205 if (REAL_VALUES_LESS (u, r))
2207 overflow = 1;
2208 high = TREE_INT_CST_HIGH (ut);
2209 low = TREE_INT_CST_LOW (ut);
2214 if (! overflow)
2215 REAL_VALUE_TO_INT (&low, &high, r);
2217 t = force_fit_type_double (type, low, high, -1,
2218 overflow | TREE_OVERFLOW (arg1));
2219 return t;
2222 /* A subroutine of fold_convert_const handling conversions of a
2223 FIXED_CST to an integer type. */
2225 static tree
2226 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2228 tree t;
2229 double_int temp, temp_trunc;
2230 unsigned int mode;
2232 /* Right shift FIXED_CST to temp by fbit. */
2233 temp = TREE_FIXED_CST (arg1).data;
2234 mode = TREE_FIXED_CST (arg1).mode;
2235 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2237 lshift_double (temp.low, temp.high,
2238 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2239 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2241 /* Left shift temp to temp_trunc by fbit. */
2242 lshift_double (temp.low, temp.high,
2243 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2244 &temp_trunc.low, &temp_trunc.high,
2245 SIGNED_FIXED_POINT_MODE_P (mode));
2247 else
2249 temp.low = 0;
2250 temp.high = 0;
2251 temp_trunc.low = 0;
2252 temp_trunc.high = 0;
2255 /* If FIXED_CST is negative, we need to round the value toward 0.
2256 By checking if the fractional bits are not zero to add 1 to temp. */
2257 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2258 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2260 double_int one;
2261 one.low = 1;
2262 one.high = 0;
2263 temp = double_int_add (temp, one);
2266 /* Given a fixed-point constant, make new constant with new type,
2267 appropriately sign-extended or truncated. */
2268 t = force_fit_type_double (type, temp.low, temp.high, -1,
2269 (temp.high < 0
2270 && (TYPE_UNSIGNED (type)
2271 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2272 | TREE_OVERFLOW (arg1));
2274 return t;
2277 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2278 to another floating point type. */
2280 static tree
2281 fold_convert_const_real_from_real (tree type, const_tree arg1)
2283 REAL_VALUE_TYPE value;
2284 tree t;
2286 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2287 t = build_real (type, value);
2289 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2290 return t;
2293 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2294 to a floating point type. */
2296 static tree
2297 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2299 REAL_VALUE_TYPE value;
2300 tree t;
2302 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2303 t = build_real (type, value);
2305 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2306 TREE_CONSTANT_OVERFLOW (t)
2307 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2308 return t;
2311 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2312 to another fixed-point type. */
2314 static tree
2315 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2317 FIXED_VALUE_TYPE value;
2318 tree t;
2319 bool overflow_p;
2321 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2322 TYPE_SATURATING (type));
2323 t = build_fixed (type, value);
2325 /* Propagate overflow flags. */
2326 if (overflow_p | TREE_OVERFLOW (arg1))
2328 TREE_OVERFLOW (t) = 1;
2329 TREE_CONSTANT_OVERFLOW (t) = 1;
2331 else if (TREE_CONSTANT_OVERFLOW (arg1))
2332 TREE_CONSTANT_OVERFLOW (t) = 1;
2333 return t;
2336 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2337 to a fixed-point type. */
2339 static tree
2340 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2342 FIXED_VALUE_TYPE value;
2343 tree t;
2344 bool overflow_p;
2346 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2347 TREE_INT_CST (arg1),
2348 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2349 TYPE_SATURATING (type));
2350 t = build_fixed (type, value);
2352 /* Propagate overflow flags. */
2353 if (overflow_p | TREE_OVERFLOW (arg1))
2355 TREE_OVERFLOW (t) = 1;
2356 TREE_CONSTANT_OVERFLOW (t) = 1;
2358 else if (TREE_CONSTANT_OVERFLOW (arg1))
2359 TREE_CONSTANT_OVERFLOW (t) = 1;
2360 return t;
2363 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2364 to a fixed-point type. */
2366 static tree
2367 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2369 FIXED_VALUE_TYPE value;
2370 tree t;
2371 bool overflow_p;
2373 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2374 &TREE_REAL_CST (arg1),
2375 TYPE_SATURATING (type));
2376 t = build_fixed (type, value);
2378 /* Propagate overflow flags. */
2379 if (overflow_p | TREE_OVERFLOW (arg1))
2381 TREE_OVERFLOW (t) = 1;
2382 TREE_CONSTANT_OVERFLOW (t) = 1;
2384 else if (TREE_CONSTANT_OVERFLOW (arg1))
2385 TREE_CONSTANT_OVERFLOW (t) = 1;
2386 return t;
2389 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2390 type TYPE. If no simplification can be done return NULL_TREE. */
2392 static tree
2393 fold_convert_const (enum tree_code code, tree type, tree arg1)
2395 if (TREE_TYPE (arg1) == type)
2396 return arg1;
2398 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2399 || TREE_CODE (type) == OFFSET_TYPE)
2401 if (TREE_CODE (arg1) == INTEGER_CST)
2402 return fold_convert_const_int_from_int (type, arg1);
2403 else if (TREE_CODE (arg1) == REAL_CST)
2404 return fold_convert_const_int_from_real (code, type, arg1);
2405 else if (TREE_CODE (arg1) == FIXED_CST)
2406 return fold_convert_const_int_from_fixed (type, arg1);
2408 else if (TREE_CODE (type) == REAL_TYPE)
2410 if (TREE_CODE (arg1) == INTEGER_CST)
2411 return build_real_from_int_cst (type, arg1);
2412 else if (TREE_CODE (arg1) == REAL_CST)
2413 return fold_convert_const_real_from_real (type, arg1);
2414 else if (TREE_CODE (arg1) == FIXED_CST)
2415 return fold_convert_const_real_from_fixed (type, arg1);
2417 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2419 if (TREE_CODE (arg1) == FIXED_CST)
2420 return fold_convert_const_fixed_from_fixed (type, arg1);
2421 else if (TREE_CODE (arg1) == INTEGER_CST)
2422 return fold_convert_const_fixed_from_int (type, arg1);
2423 else if (TREE_CODE (arg1) == REAL_CST)
2424 return fold_convert_const_fixed_from_real (type, arg1);
2426 return NULL_TREE;
2429 /* Construct a vector of zero elements of vector type TYPE. */
2431 static tree
2432 build_zero_vector (tree type)
2434 tree elem, list;
2435 int i, units;
2437 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2438 units = TYPE_VECTOR_SUBPARTS (type);
2440 list = NULL_TREE;
2441 for (i = 0; i < units; i++)
2442 list = tree_cons (NULL_TREE, elem, list);
2443 return build_vector (type, list);
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2448 bool
2449 fold_convertible_p (const_tree type, const_tree arg)
2451 tree orig = TREE_TYPE (arg);
2453 if (type == orig)
2454 return true;
2456 if (TREE_CODE (arg) == ERROR_MARK
2457 || TREE_CODE (type) == ERROR_MARK
2458 || TREE_CODE (orig) == ERROR_MARK)
2459 return false;
2461 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2462 return true;
2464 switch (TREE_CODE (type))
2466 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2467 case POINTER_TYPE: case REFERENCE_TYPE:
2468 case OFFSET_TYPE:
2469 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2470 || TREE_CODE (orig) == OFFSET_TYPE)
2471 return true;
2472 return (TREE_CODE (orig) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2475 case REAL_TYPE:
2476 case FIXED_POINT_TYPE:
2477 case COMPLEX_TYPE:
2478 case VECTOR_TYPE:
2479 case VOID_TYPE:
2480 return TREE_CODE (type) == TREE_CODE (orig);
2482 default:
2483 return false;
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2490 tree
2491 fold_convert (tree type, tree arg)
2493 tree orig = TREE_TYPE (arg);
2494 tree tem;
2496 if (type == orig)
2497 return arg;
2499 if (TREE_CODE (arg) == ERROR_MARK
2500 || TREE_CODE (type) == ERROR_MARK
2501 || TREE_CODE (orig) == ERROR_MARK)
2502 return error_mark_node;
2504 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2505 return fold_build1 (NOP_EXPR, type, arg);
2507 switch (TREE_CODE (type))
2509 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2510 case POINTER_TYPE: case REFERENCE_TYPE:
2511 case OFFSET_TYPE:
2512 if (TREE_CODE (arg) == INTEGER_CST)
2514 tem = fold_convert_const (NOP_EXPR, type, arg);
2515 if (tem != NULL_TREE)
2516 return tem;
2518 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2519 || TREE_CODE (orig) == OFFSET_TYPE)
2520 return fold_build1 (NOP_EXPR, type, arg);
2521 if (TREE_CODE (orig) == COMPLEX_TYPE)
2523 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2524 return fold_convert (type, tem);
2526 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2528 return fold_build1 (NOP_EXPR, type, arg);
2530 case REAL_TYPE:
2531 if (TREE_CODE (arg) == INTEGER_CST)
2533 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2534 if (tem != NULL_TREE)
2535 return tem;
2537 else if (TREE_CODE (arg) == REAL_CST)
2539 tem = fold_convert_const (NOP_EXPR, type, arg);
2540 if (tem != NULL_TREE)
2541 return tem;
2543 else if (TREE_CODE (arg) == FIXED_CST)
2545 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2546 if (tem != NULL_TREE)
2547 return tem;
2550 switch (TREE_CODE (orig))
2552 case INTEGER_TYPE:
2553 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2554 case POINTER_TYPE: case REFERENCE_TYPE:
2555 return fold_build1 (FLOAT_EXPR, type, arg);
2557 case REAL_TYPE:
2558 return fold_build1 (NOP_EXPR, type, arg);
2560 case FIXED_POINT_TYPE:
2561 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2563 case COMPLEX_TYPE:
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2567 default:
2568 gcc_unreachable ();
2571 case FIXED_POINT_TYPE:
2572 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2573 || TREE_CODE (arg) == REAL_CST)
2575 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2576 if (tem != NULL_TREE)
2577 return tem;
2580 switch (TREE_CODE (orig))
2582 case FIXED_POINT_TYPE:
2583 case INTEGER_TYPE:
2584 case ENUMERAL_TYPE:
2585 case BOOLEAN_TYPE:
2586 case REAL_TYPE:
2587 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2589 case COMPLEX_TYPE:
2590 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2591 return fold_convert (type, tem);
2593 default:
2594 gcc_unreachable ();
2597 case COMPLEX_TYPE:
2598 switch (TREE_CODE (orig))
2600 case INTEGER_TYPE:
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2603 case REAL_TYPE:
2604 case FIXED_POINT_TYPE:
2605 return build2 (COMPLEX_EXPR, type,
2606 fold_convert (TREE_TYPE (type), arg),
2607 fold_convert (TREE_TYPE (type), integer_zero_node));
2608 case COMPLEX_TYPE:
2610 tree rpart, ipart;
2612 if (TREE_CODE (arg) == COMPLEX_EXPR)
2614 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2615 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2616 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2619 arg = save_expr (arg);
2620 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2621 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2622 rpart = fold_convert (TREE_TYPE (type), rpart);
2623 ipart = fold_convert (TREE_TYPE (type), ipart);
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2627 default:
2628 gcc_unreachable ();
2631 case VECTOR_TYPE:
2632 if (integer_zerop (arg))
2633 return build_zero_vector (type);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2636 || TREE_CODE (orig) == VECTOR_TYPE);
2637 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2639 case VOID_TYPE:
2640 tem = fold_ignored_result (arg);
2641 if (TREE_CODE (tem) == MODIFY_EXPR)
2642 return tem;
2643 return fold_build1 (NOP_EXPR, type, tem);
2645 default:
2646 gcc_unreachable ();
2650 /* Return false if expr can be assumed not to be an lvalue, true
2651 otherwise. */
2653 static bool
2654 maybe_lvalue_p (const_tree x)
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x))
2659 case VAR_DECL:
2660 case PARM_DECL:
2661 case RESULT_DECL:
2662 case LABEL_DECL:
2663 case FUNCTION_DECL:
2664 case SSA_NAME:
2666 case COMPONENT_REF:
2667 case INDIRECT_REF:
2668 case ALIGN_INDIRECT_REF:
2669 case MISALIGNED_INDIRECT_REF:
2670 case ARRAY_REF:
2671 case ARRAY_RANGE_REF:
2672 case BIT_FIELD_REF:
2673 case OBJ_TYPE_REF:
2675 case REALPART_EXPR:
2676 case IMAGPART_EXPR:
2677 case PREINCREMENT_EXPR:
2678 case PREDECREMENT_EXPR:
2679 case SAVE_EXPR:
2680 case TRY_CATCH_EXPR:
2681 case WITH_CLEANUP_EXPR:
2682 case COMPOUND_EXPR:
2683 case MODIFY_EXPR:
2684 case TARGET_EXPR:
2685 case COND_EXPR:
2686 case BIND_EXPR:
2687 case MIN_EXPR:
2688 case MAX_EXPR:
2689 break;
2691 default:
2692 /* Assume the worst for front-end tree codes. */
2693 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2694 break;
2695 return false;
2698 return true;
2701 /* Return an expr equal to X but certainly not valid as an lvalue. */
2703 tree
2704 non_lvalue (tree x)
2706 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2707 us. */
2708 if (in_gimple_form)
2709 return x;
2711 if (! maybe_lvalue_p (x))
2712 return x;
2713 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2716 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2717 Zero means allow extended lvalues. */
2719 int pedantic_lvalues;
2721 /* When pedantic, return an expr equal to X but certainly not valid as a
2722 pedantic lvalue. Otherwise, return X. */
2724 static tree
2725 pedantic_non_lvalue (tree x)
2727 if (pedantic_lvalues)
2728 return non_lvalue (x);
2729 else
2730 return x;
2733 /* Given a tree comparison code, return the code that is the logical inverse
2734 of the given code. It is not safe to do this for floating-point
2735 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2736 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2738 enum tree_code
2739 invert_tree_comparison (enum tree_code code, bool honor_nans)
2741 if (honor_nans && flag_trapping_math)
2742 return ERROR_MARK;
2744 switch (code)
2746 case EQ_EXPR:
2747 return NE_EXPR;
2748 case NE_EXPR:
2749 return EQ_EXPR;
2750 case GT_EXPR:
2751 return honor_nans ? UNLE_EXPR : LE_EXPR;
2752 case GE_EXPR:
2753 return honor_nans ? UNLT_EXPR : LT_EXPR;
2754 case LT_EXPR:
2755 return honor_nans ? UNGE_EXPR : GE_EXPR;
2756 case LE_EXPR:
2757 return honor_nans ? UNGT_EXPR : GT_EXPR;
2758 case LTGT_EXPR:
2759 return UNEQ_EXPR;
2760 case UNEQ_EXPR:
2761 return LTGT_EXPR;
2762 case UNGT_EXPR:
2763 return LE_EXPR;
2764 case UNGE_EXPR:
2765 return LT_EXPR;
2766 case UNLT_EXPR:
2767 return GE_EXPR;
2768 case UNLE_EXPR:
2769 return GT_EXPR;
2770 case ORDERED_EXPR:
2771 return UNORDERED_EXPR;
2772 case UNORDERED_EXPR:
2773 return ORDERED_EXPR;
2774 default:
2775 gcc_unreachable ();
2779 /* Similar, but return the comparison that results if the operands are
2780 swapped. This is safe for floating-point. */
2782 enum tree_code
2783 swap_tree_comparison (enum tree_code code)
2785 switch (code)
2787 case EQ_EXPR:
2788 case NE_EXPR:
2789 case ORDERED_EXPR:
2790 case UNORDERED_EXPR:
2791 case LTGT_EXPR:
2792 case UNEQ_EXPR:
2793 return code;
2794 case GT_EXPR:
2795 return LT_EXPR;
2796 case GE_EXPR:
2797 return LE_EXPR;
2798 case LT_EXPR:
2799 return GT_EXPR;
2800 case LE_EXPR:
2801 return GE_EXPR;
2802 case UNGT_EXPR:
2803 return UNLT_EXPR;
2804 case UNGE_EXPR:
2805 return UNLE_EXPR;
2806 case UNLT_EXPR:
2807 return UNGT_EXPR;
2808 case UNLE_EXPR:
2809 return UNGE_EXPR;
2810 default:
2811 gcc_unreachable ();
2816 /* Convert a comparison tree code from an enum tree_code representation
2817 into a compcode bit-based encoding. This function is the inverse of
2818 compcode_to_comparison. */
2820 static enum comparison_code
2821 comparison_to_compcode (enum tree_code code)
2823 switch (code)
2825 case LT_EXPR:
2826 return COMPCODE_LT;
2827 case EQ_EXPR:
2828 return COMPCODE_EQ;
2829 case LE_EXPR:
2830 return COMPCODE_LE;
2831 case GT_EXPR:
2832 return COMPCODE_GT;
2833 case NE_EXPR:
2834 return COMPCODE_NE;
2835 case GE_EXPR:
2836 return COMPCODE_GE;
2837 case ORDERED_EXPR:
2838 return COMPCODE_ORD;
2839 case UNORDERED_EXPR:
2840 return COMPCODE_UNORD;
2841 case UNLT_EXPR:
2842 return COMPCODE_UNLT;
2843 case UNEQ_EXPR:
2844 return COMPCODE_UNEQ;
2845 case UNLE_EXPR:
2846 return COMPCODE_UNLE;
2847 case UNGT_EXPR:
2848 return COMPCODE_UNGT;
2849 case LTGT_EXPR:
2850 return COMPCODE_LTGT;
2851 case UNGE_EXPR:
2852 return COMPCODE_UNGE;
2853 default:
2854 gcc_unreachable ();
2858 /* Convert a compcode bit-based encoding of a comparison operator back
2859 to GCC's enum tree_code representation. This function is the
2860 inverse of comparison_to_compcode. */
2862 static enum tree_code
2863 compcode_to_comparison (enum comparison_code code)
2865 switch (code)
2867 case COMPCODE_LT:
2868 return LT_EXPR;
2869 case COMPCODE_EQ:
2870 return EQ_EXPR;
2871 case COMPCODE_LE:
2872 return LE_EXPR;
2873 case COMPCODE_GT:
2874 return GT_EXPR;
2875 case COMPCODE_NE:
2876 return NE_EXPR;
2877 case COMPCODE_GE:
2878 return GE_EXPR;
2879 case COMPCODE_ORD:
2880 return ORDERED_EXPR;
2881 case COMPCODE_UNORD:
2882 return UNORDERED_EXPR;
2883 case COMPCODE_UNLT:
2884 return UNLT_EXPR;
2885 case COMPCODE_UNEQ:
2886 return UNEQ_EXPR;
2887 case COMPCODE_UNLE:
2888 return UNLE_EXPR;
2889 case COMPCODE_UNGT:
2890 return UNGT_EXPR;
2891 case COMPCODE_LTGT:
2892 return LTGT_EXPR;
2893 case COMPCODE_UNGE:
2894 return UNGE_EXPR;
2895 default:
2896 gcc_unreachable ();
2900 /* Return a tree for the comparison which is the combination of
2901 doing the AND or OR (depending on CODE) of the two operations LCODE
2902 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2903 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2904 if this makes the transformation invalid. */
2906 tree
2907 combine_comparisons (enum tree_code code, enum tree_code lcode,
2908 enum tree_code rcode, tree truth_type,
2909 tree ll_arg, tree lr_arg)
2911 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2912 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2913 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2914 enum comparison_code compcode;
2916 switch (code)
2918 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2919 compcode = lcompcode & rcompcode;
2920 break;
2922 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2923 compcode = lcompcode | rcompcode;
2924 break;
2926 default:
2927 return NULL_TREE;
2930 if (!honor_nans)
2932 /* Eliminate unordered comparisons, as well as LTGT and ORD
2933 which are not used unless the mode has NaNs. */
2934 compcode &= ~COMPCODE_UNORD;
2935 if (compcode == COMPCODE_LTGT)
2936 compcode = COMPCODE_NE;
2937 else if (compcode == COMPCODE_ORD)
2938 compcode = COMPCODE_TRUE;
2940 else if (flag_trapping_math)
2942 /* Check that the original operation and the optimized ones will trap
2943 under the same condition. */
2944 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2945 && (lcompcode != COMPCODE_EQ)
2946 && (lcompcode != COMPCODE_ORD);
2947 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2948 && (rcompcode != COMPCODE_EQ)
2949 && (rcompcode != COMPCODE_ORD);
2950 bool trap = (compcode & COMPCODE_UNORD) == 0
2951 && (compcode != COMPCODE_EQ)
2952 && (compcode != COMPCODE_ORD);
2954 /* In a short-circuited boolean expression the LHS might be
2955 such that the RHS, if evaluated, will never trap. For
2956 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2957 if neither x nor y is NaN. (This is a mixed blessing: for
2958 example, the expression above will never trap, hence
2959 optimizing it to x < y would be invalid). */
2960 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2961 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2962 rtrap = false;
2964 /* If the comparison was short-circuited, and only the RHS
2965 trapped, we may now generate a spurious trap. */
2966 if (rtrap && !ltrap
2967 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2968 return NULL_TREE;
2970 /* If we changed the conditions that cause a trap, we lose. */
2971 if ((ltrap || rtrap) != trap)
2972 return NULL_TREE;
2975 if (compcode == COMPCODE_TRUE)
2976 return constant_boolean_node (true, truth_type);
2977 else if (compcode == COMPCODE_FALSE)
2978 return constant_boolean_node (false, truth_type);
2979 else
2980 return fold_build2 (compcode_to_comparison (compcode),
2981 truth_type, ll_arg, lr_arg);
2984 /* Return nonzero if two operands (typically of the same tree node)
2985 are necessarily equal. If either argument has side-effects this
2986 function returns zero. FLAGS modifies behavior as follows:
2988 If OEP_ONLY_CONST is set, only return nonzero for constants.
2989 This function tests whether the operands are indistinguishable;
2990 it does not test whether they are equal using C's == operation.
2991 The distinction is important for IEEE floating point, because
2992 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2993 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2995 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2996 even though it may hold multiple values during a function.
2997 This is because a GCC tree node guarantees that nothing else is
2998 executed between the evaluation of its "operands" (which may often
2999 be evaluated in arbitrary order). Hence if the operands themselves
3000 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3001 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3002 unset means assuming isochronic (or instantaneous) tree equivalence.
3003 Unless comparing arbitrary expression trees, such as from different
3004 statements, this flag can usually be left unset.
3006 If OEP_PURE_SAME is set, then pure functions with identical arguments
3007 are considered the same. It is used when the caller has other ways
3008 to ensure that global memory is unchanged in between. */
3011 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3013 /* If either is ERROR_MARK, they aren't equal. */
3014 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3015 return 0;
3017 /* Check equality of integer constants before bailing out due to
3018 precision differences. */
3019 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3020 return tree_int_cst_equal (arg0, arg1);
3022 /* If both types don't have the same signedness, then we can't consider
3023 them equal. We must check this before the STRIP_NOPS calls
3024 because they may change the signedness of the arguments. As pointers
3025 strictly don't have a signedness, require either two pointers or
3026 two non-pointers as well. */
3027 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3028 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3029 return 0;
3031 /* If both types don't have the same precision, then it is not safe
3032 to strip NOPs. */
3033 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3034 return 0;
3036 STRIP_NOPS (arg0);
3037 STRIP_NOPS (arg1);
3039 /* In case both args are comparisons but with different comparison
3040 code, try to swap the comparison operands of one arg to produce
3041 a match and compare that variant. */
3042 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3043 && COMPARISON_CLASS_P (arg0)
3044 && COMPARISON_CLASS_P (arg1))
3046 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3048 if (TREE_CODE (arg0) == swap_code)
3049 return operand_equal_p (TREE_OPERAND (arg0, 0),
3050 TREE_OPERAND (arg1, 1), flags)
3051 && operand_equal_p (TREE_OPERAND (arg0, 1),
3052 TREE_OPERAND (arg1, 0), flags);
3055 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3056 /* This is needed for conversions and for COMPONENT_REF.
3057 Might as well play it safe and always test this. */
3058 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3059 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3060 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3061 return 0;
3063 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3064 We don't care about side effects in that case because the SAVE_EXPR
3065 takes care of that for us. In all other cases, two expressions are
3066 equal if they have no side effects. If we have two identical
3067 expressions with side effects that should be treated the same due
3068 to the only side effects being identical SAVE_EXPR's, that will
3069 be detected in the recursive calls below. */
3070 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3071 && (TREE_CODE (arg0) == SAVE_EXPR
3072 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3073 return 1;
3075 /* Next handle constant cases, those for which we can return 1 even
3076 if ONLY_CONST is set. */
3077 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3078 switch (TREE_CODE (arg0))
3080 case INTEGER_CST:
3081 return tree_int_cst_equal (arg0, arg1);
3083 case FIXED_CST:
3084 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3085 TREE_FIXED_CST (arg1));
3087 case REAL_CST:
3088 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3089 TREE_REAL_CST (arg1)))
3090 return 1;
3093 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3095 /* If we do not distinguish between signed and unsigned zero,
3096 consider them equal. */
3097 if (real_zerop (arg0) && real_zerop (arg1))
3098 return 1;
3100 return 0;
3102 case VECTOR_CST:
3104 tree v1, v2;
3106 v1 = TREE_VECTOR_CST_ELTS (arg0);
3107 v2 = TREE_VECTOR_CST_ELTS (arg1);
3108 while (v1 && v2)
3110 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3111 flags))
3112 return 0;
3113 v1 = TREE_CHAIN (v1);
3114 v2 = TREE_CHAIN (v2);
3117 return v1 == v2;
3120 case COMPLEX_CST:
3121 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3122 flags)
3123 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3124 flags));
3126 case STRING_CST:
3127 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3128 && ! memcmp (TREE_STRING_POINTER (arg0),
3129 TREE_STRING_POINTER (arg1),
3130 TREE_STRING_LENGTH (arg0)));
3132 case ADDR_EXPR:
3133 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3135 default:
3136 break;
3139 if (flags & OEP_ONLY_CONST)
3140 return 0;
3142 /* Define macros to test an operand from arg0 and arg1 for equality and a
3143 variant that allows null and views null as being different from any
3144 non-null value. In the latter case, if either is null, the both
3145 must be; otherwise, do the normal comparison. */
3146 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3147 TREE_OPERAND (arg1, N), flags)
3149 #define OP_SAME_WITH_NULL(N) \
3150 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3151 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3153 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3155 case tcc_unary:
3156 /* Two conversions are equal only if signedness and modes match. */
3157 switch (TREE_CODE (arg0))
3159 CASE_CONVERT:
3160 case FIX_TRUNC_EXPR:
3161 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3162 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3163 return 0;
3164 break;
3165 default:
3166 break;
3169 return OP_SAME (0);
3172 case tcc_comparison:
3173 case tcc_binary:
3174 if (OP_SAME (0) && OP_SAME (1))
3175 return 1;
3177 /* For commutative ops, allow the other order. */
3178 return (commutative_tree_code (TREE_CODE (arg0))
3179 && operand_equal_p (TREE_OPERAND (arg0, 0),
3180 TREE_OPERAND (arg1, 1), flags)
3181 && operand_equal_p (TREE_OPERAND (arg0, 1),
3182 TREE_OPERAND (arg1, 0), flags));
3184 case tcc_reference:
3185 /* If either of the pointer (or reference) expressions we are
3186 dereferencing contain a side effect, these cannot be equal. */
3187 if (TREE_SIDE_EFFECTS (arg0)
3188 || TREE_SIDE_EFFECTS (arg1))
3189 return 0;
3191 switch (TREE_CODE (arg0))
3193 case INDIRECT_REF:
3194 case ALIGN_INDIRECT_REF:
3195 case MISALIGNED_INDIRECT_REF:
3196 case REALPART_EXPR:
3197 case IMAGPART_EXPR:
3198 return OP_SAME (0);
3200 case ARRAY_REF:
3201 case ARRAY_RANGE_REF:
3202 /* Operands 2 and 3 may be null.
3203 Compare the array index by value if it is constant first as we
3204 may have different types but same value here. */
3205 return (OP_SAME (0)
3206 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3207 TREE_OPERAND (arg1, 1))
3208 || OP_SAME (1))
3209 && OP_SAME_WITH_NULL (2)
3210 && OP_SAME_WITH_NULL (3));
3212 case COMPONENT_REF:
3213 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3214 may be NULL when we're called to compare MEM_EXPRs. */
3215 return OP_SAME_WITH_NULL (0)
3216 && OP_SAME (1)
3217 && OP_SAME_WITH_NULL (2);
3219 case BIT_FIELD_REF:
3220 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3222 default:
3223 return 0;
3226 case tcc_expression:
3227 switch (TREE_CODE (arg0))
3229 case ADDR_EXPR:
3230 case TRUTH_NOT_EXPR:
3231 return OP_SAME (0);
3233 case TRUTH_ANDIF_EXPR:
3234 case TRUTH_ORIF_EXPR:
3235 return OP_SAME (0) && OP_SAME (1);
3237 case TRUTH_AND_EXPR:
3238 case TRUTH_OR_EXPR:
3239 case TRUTH_XOR_EXPR:
3240 if (OP_SAME (0) && OP_SAME (1))
3241 return 1;
3243 /* Otherwise take into account this is a commutative operation. */
3244 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3245 TREE_OPERAND (arg1, 1), flags)
3246 && operand_equal_p (TREE_OPERAND (arg0, 1),
3247 TREE_OPERAND (arg1, 0), flags));
3249 case COND_EXPR:
3250 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3252 default:
3253 return 0;
3256 case tcc_vl_exp:
3257 switch (TREE_CODE (arg0))
3259 case CALL_EXPR:
3260 /* If the CALL_EXPRs call different functions, then they
3261 clearly can not be equal. */
3262 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3263 flags))
3264 return 0;
3267 unsigned int cef = call_expr_flags (arg0);
3268 if (flags & OEP_PURE_SAME)
3269 cef &= ECF_CONST | ECF_PURE;
3270 else
3271 cef &= ECF_CONST;
3272 if (!cef)
3273 return 0;
3276 /* Now see if all the arguments are the same. */
3278 const_call_expr_arg_iterator iter0, iter1;
3279 const_tree a0, a1;
3280 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3281 a1 = first_const_call_expr_arg (arg1, &iter1);
3282 a0 && a1;
3283 a0 = next_const_call_expr_arg (&iter0),
3284 a1 = next_const_call_expr_arg (&iter1))
3285 if (! operand_equal_p (a0, a1, flags))
3286 return 0;
3288 /* If we get here and both argument lists are exhausted
3289 then the CALL_EXPRs are equal. */
3290 return ! (a0 || a1);
3292 default:
3293 return 0;
3296 case tcc_declaration:
3297 /* Consider __builtin_sqrt equal to sqrt. */
3298 return (TREE_CODE (arg0) == FUNCTION_DECL
3299 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3300 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3301 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3303 default:
3304 return 0;
3307 #undef OP_SAME
3308 #undef OP_SAME_WITH_NULL
3311 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3312 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3314 When in doubt, return 0. */
3316 static int
3317 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3319 int unsignedp1, unsignedpo;
3320 tree primarg0, primarg1, primother;
3321 unsigned int correct_width;
3323 if (operand_equal_p (arg0, arg1, 0))
3324 return 1;
3326 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3327 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3328 return 0;
3330 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3331 and see if the inner values are the same. This removes any
3332 signedness comparison, which doesn't matter here. */
3333 primarg0 = arg0, primarg1 = arg1;
3334 STRIP_NOPS (primarg0);
3335 STRIP_NOPS (primarg1);
3336 if (operand_equal_p (primarg0, primarg1, 0))
3337 return 1;
3339 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3340 actual comparison operand, ARG0.
3342 First throw away any conversions to wider types
3343 already present in the operands. */
3345 primarg1 = get_narrower (arg1, &unsignedp1);
3346 primother = get_narrower (other, &unsignedpo);
3348 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3349 if (unsignedp1 == unsignedpo
3350 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3351 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3353 tree type = TREE_TYPE (arg0);
3355 /* Make sure shorter operand is extended the right way
3356 to match the longer operand. */
3357 primarg1 = fold_convert (signed_or_unsigned_type_for
3358 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3360 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3361 return 1;
3364 return 0;
3367 /* See if ARG is an expression that is either a comparison or is performing
3368 arithmetic on comparisons. The comparisons must only be comparing
3369 two different values, which will be stored in *CVAL1 and *CVAL2; if
3370 they are nonzero it means that some operands have already been found.
3371 No variables may be used anywhere else in the expression except in the
3372 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3373 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3375 If this is true, return 1. Otherwise, return zero. */
3377 static int
3378 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3380 enum tree_code code = TREE_CODE (arg);
3381 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3383 /* We can handle some of the tcc_expression cases here. */
3384 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3385 tclass = tcc_unary;
3386 else if (tclass == tcc_expression
3387 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3388 || code == COMPOUND_EXPR))
3389 tclass = tcc_binary;
3391 else if (tclass == tcc_expression && code == SAVE_EXPR
3392 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3394 /* If we've already found a CVAL1 or CVAL2, this expression is
3395 two complex to handle. */
3396 if (*cval1 || *cval2)
3397 return 0;
3399 tclass = tcc_unary;
3400 *save_p = 1;
3403 switch (tclass)
3405 case tcc_unary:
3406 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3408 case tcc_binary:
3409 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3410 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3411 cval1, cval2, save_p));
3413 case tcc_constant:
3414 return 1;
3416 case tcc_expression:
3417 if (code == COND_EXPR)
3418 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3419 cval1, cval2, save_p)
3420 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3421 cval1, cval2, save_p)
3422 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3423 cval1, cval2, save_p));
3424 return 0;
3426 case tcc_comparison:
3427 /* First see if we can handle the first operand, then the second. For
3428 the second operand, we know *CVAL1 can't be zero. It must be that
3429 one side of the comparison is each of the values; test for the
3430 case where this isn't true by failing if the two operands
3431 are the same. */
3433 if (operand_equal_p (TREE_OPERAND (arg, 0),
3434 TREE_OPERAND (arg, 1), 0))
3435 return 0;
3437 if (*cval1 == 0)
3438 *cval1 = TREE_OPERAND (arg, 0);
3439 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3441 else if (*cval2 == 0)
3442 *cval2 = TREE_OPERAND (arg, 0);
3443 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3445 else
3446 return 0;
3448 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3450 else if (*cval2 == 0)
3451 *cval2 = TREE_OPERAND (arg, 1);
3452 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3454 else
3455 return 0;
3457 return 1;
3459 default:
3460 return 0;
3464 /* ARG is a tree that is known to contain just arithmetic operations and
3465 comparisons. Evaluate the operations in the tree substituting NEW0 for
3466 any occurrence of OLD0 as an operand of a comparison and likewise for
3467 NEW1 and OLD1. */
3469 static tree
3470 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3472 tree type = TREE_TYPE (arg);
3473 enum tree_code code = TREE_CODE (arg);
3474 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3476 /* We can handle some of the tcc_expression cases here. */
3477 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3478 tclass = tcc_unary;
3479 else if (tclass == tcc_expression
3480 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3481 tclass = tcc_binary;
3483 switch (tclass)
3485 case tcc_unary:
3486 return fold_build1 (code, type,
3487 eval_subst (TREE_OPERAND (arg, 0),
3488 old0, new0, old1, new1));
3490 case tcc_binary:
3491 return fold_build2 (code, type,
3492 eval_subst (TREE_OPERAND (arg, 0),
3493 old0, new0, old1, new1),
3494 eval_subst (TREE_OPERAND (arg, 1),
3495 old0, new0, old1, new1));
3497 case tcc_expression:
3498 switch (code)
3500 case SAVE_EXPR:
3501 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3503 case COMPOUND_EXPR:
3504 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3506 case COND_EXPR:
3507 return fold_build3 (code, type,
3508 eval_subst (TREE_OPERAND (arg, 0),
3509 old0, new0, old1, new1),
3510 eval_subst (TREE_OPERAND (arg, 1),
3511 old0, new0, old1, new1),
3512 eval_subst (TREE_OPERAND (arg, 2),
3513 old0, new0, old1, new1));
3514 default:
3515 break;
3517 /* Fall through - ??? */
3519 case tcc_comparison:
3521 tree arg0 = TREE_OPERAND (arg, 0);
3522 tree arg1 = TREE_OPERAND (arg, 1);
3524 /* We need to check both for exact equality and tree equality. The
3525 former will be true if the operand has a side-effect. In that
3526 case, we know the operand occurred exactly once. */
3528 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3529 arg0 = new0;
3530 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3531 arg0 = new1;
3533 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3534 arg1 = new0;
3535 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3536 arg1 = new1;
3538 return fold_build2 (code, type, arg0, arg1);
3541 default:
3542 return arg;
3546 /* Return a tree for the case when the result of an expression is RESULT
3547 converted to TYPE and OMITTED was previously an operand of the expression
3548 but is now not needed (e.g., we folded OMITTED * 0).
3550 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3551 the conversion of RESULT to TYPE. */
3553 tree
3554 omit_one_operand (tree type, tree result, tree omitted)
3556 tree t = fold_convert (type, result);
3558 /* If the resulting operand is an empty statement, just return the omitted
3559 statement casted to void. */
3560 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3561 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3563 if (TREE_SIDE_EFFECTS (omitted))
3564 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3566 return non_lvalue (t);
3569 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3571 static tree
3572 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3574 tree t = fold_convert (type, result);
3576 /* If the resulting operand is an empty statement, just return the omitted
3577 statement casted to void. */
3578 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3579 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3581 if (TREE_SIDE_EFFECTS (omitted))
3582 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3584 return pedantic_non_lvalue (t);
3587 /* Return a tree for the case when the result of an expression is RESULT
3588 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3589 of the expression but are now not needed.
3591 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3592 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3593 evaluated before OMITTED2. Otherwise, if neither has side effects,
3594 just do the conversion of RESULT to TYPE. */
3596 tree
3597 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3599 tree t = fold_convert (type, result);
3601 if (TREE_SIDE_EFFECTS (omitted2))
3602 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3603 if (TREE_SIDE_EFFECTS (omitted1))
3604 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3606 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3610 /* Return a simplified tree node for the truth-negation of ARG. This
3611 never alters ARG itself. We assume that ARG is an operation that
3612 returns a truth value (0 or 1).
3614 FIXME: one would think we would fold the result, but it causes
3615 problems with the dominator optimizer. */
3617 tree
3618 fold_truth_not_expr (tree arg)
3620 tree type = TREE_TYPE (arg);
3621 enum tree_code code = TREE_CODE (arg);
3623 /* If this is a comparison, we can simply invert it, except for
3624 floating-point non-equality comparisons, in which case we just
3625 enclose a TRUTH_NOT_EXPR around what we have. */
3627 if (TREE_CODE_CLASS (code) == tcc_comparison)
3629 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3630 if (FLOAT_TYPE_P (op_type)
3631 && flag_trapping_math
3632 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3633 && code != NE_EXPR && code != EQ_EXPR)
3634 return NULL_TREE;
3635 else
3637 code = invert_tree_comparison (code,
3638 HONOR_NANS (TYPE_MODE (op_type)));
3639 if (code == ERROR_MARK)
3640 return NULL_TREE;
3641 else
3642 return build2 (code, type,
3643 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3647 switch (code)
3649 case INTEGER_CST:
3650 return constant_boolean_node (integer_zerop (arg), type);
3652 case TRUTH_AND_EXPR:
3653 return build2 (TRUTH_OR_EXPR, type,
3654 invert_truthvalue (TREE_OPERAND (arg, 0)),
3655 invert_truthvalue (TREE_OPERAND (arg, 1)));
3657 case TRUTH_OR_EXPR:
3658 return build2 (TRUTH_AND_EXPR, type,
3659 invert_truthvalue (TREE_OPERAND (arg, 0)),
3660 invert_truthvalue (TREE_OPERAND (arg, 1)));
3662 case TRUTH_XOR_EXPR:
3663 /* Here we can invert either operand. We invert the first operand
3664 unless the second operand is a TRUTH_NOT_EXPR in which case our
3665 result is the XOR of the first operand with the inside of the
3666 negation of the second operand. */
3668 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3669 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3670 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3671 else
3672 return build2 (TRUTH_XOR_EXPR, type,
3673 invert_truthvalue (TREE_OPERAND (arg, 0)),
3674 TREE_OPERAND (arg, 1));
3676 case TRUTH_ANDIF_EXPR:
3677 return build2 (TRUTH_ORIF_EXPR, type,
3678 invert_truthvalue (TREE_OPERAND (arg, 0)),
3679 invert_truthvalue (TREE_OPERAND (arg, 1)));
3681 case TRUTH_ORIF_EXPR:
3682 return build2 (TRUTH_ANDIF_EXPR, type,
3683 invert_truthvalue (TREE_OPERAND (arg, 0)),
3684 invert_truthvalue (TREE_OPERAND (arg, 1)));
3686 case TRUTH_NOT_EXPR:
3687 return TREE_OPERAND (arg, 0);
3689 case COND_EXPR:
3691 tree arg1 = TREE_OPERAND (arg, 1);
3692 tree arg2 = TREE_OPERAND (arg, 2);
3693 /* A COND_EXPR may have a throw as one operand, which
3694 then has void type. Just leave void operands
3695 as they are. */
3696 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3697 VOID_TYPE_P (TREE_TYPE (arg1))
3698 ? arg1 : invert_truthvalue (arg1),
3699 VOID_TYPE_P (TREE_TYPE (arg2))
3700 ? arg2 : invert_truthvalue (arg2));
3703 case COMPOUND_EXPR:
3704 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3705 invert_truthvalue (TREE_OPERAND (arg, 1)));
3707 case NON_LVALUE_EXPR:
3708 return invert_truthvalue (TREE_OPERAND (arg, 0));
3710 case NOP_EXPR:
3711 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3712 return build1 (TRUTH_NOT_EXPR, type, arg);
3714 case CONVERT_EXPR:
3715 case FLOAT_EXPR:
3716 return build1 (TREE_CODE (arg), type,
3717 invert_truthvalue (TREE_OPERAND (arg, 0)));
3719 case BIT_AND_EXPR:
3720 if (!integer_onep (TREE_OPERAND (arg, 1)))
3721 break;
3722 return build2 (EQ_EXPR, type, arg,
3723 build_int_cst (type, 0));
3725 case SAVE_EXPR:
3726 return build1 (TRUTH_NOT_EXPR, type, arg);
3728 case CLEANUP_POINT_EXPR:
3729 return build1 (CLEANUP_POINT_EXPR, type,
3730 invert_truthvalue (TREE_OPERAND (arg, 0)));
3732 default:
3733 break;
3736 return NULL_TREE;
3739 /* Return a simplified tree node for the truth-negation of ARG. This
3740 never alters ARG itself. We assume that ARG is an operation that
3741 returns a truth value (0 or 1).
3743 FIXME: one would think we would fold the result, but it causes
3744 problems with the dominator optimizer. */
3746 tree
3747 invert_truthvalue (tree arg)
3749 tree tem;
3751 if (TREE_CODE (arg) == ERROR_MARK)
3752 return arg;
3754 tem = fold_truth_not_expr (arg);
3755 if (!tem)
3756 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3758 return tem;
3761 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3762 operands are another bit-wise operation with a common input. If so,
3763 distribute the bit operations to save an operation and possibly two if
3764 constants are involved. For example, convert
3765 (A | B) & (A | C) into A | (B & C)
3766 Further simplification will occur if B and C are constants.
3768 If this optimization cannot be done, 0 will be returned. */
3770 static tree
3771 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3773 tree common;
3774 tree left, right;
3776 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3777 || TREE_CODE (arg0) == code
3778 || (TREE_CODE (arg0) != BIT_AND_EXPR
3779 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3780 return 0;
3782 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3784 common = TREE_OPERAND (arg0, 0);
3785 left = TREE_OPERAND (arg0, 1);
3786 right = TREE_OPERAND (arg1, 1);
3788 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3790 common = TREE_OPERAND (arg0, 0);
3791 left = TREE_OPERAND (arg0, 1);
3792 right = TREE_OPERAND (arg1, 0);
3794 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3796 common = TREE_OPERAND (arg0, 1);
3797 left = TREE_OPERAND (arg0, 0);
3798 right = TREE_OPERAND (arg1, 1);
3800 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3802 common = TREE_OPERAND (arg0, 1);
3803 left = TREE_OPERAND (arg0, 0);
3804 right = TREE_OPERAND (arg1, 0);
3806 else
3807 return 0;
3809 common = fold_convert (type, common);
3810 left = fold_convert (type, left);
3811 right = fold_convert (type, right);
3812 return fold_build2 (TREE_CODE (arg0), type, common,
3813 fold_build2 (code, type, left, right));
3816 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3817 with code CODE. This optimization is unsafe. */
3818 static tree
3819 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3821 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3822 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3824 /* (A / C) +- (B / C) -> (A +- B) / C. */
3825 if (mul0 == mul1
3826 && operand_equal_p (TREE_OPERAND (arg0, 1),
3827 TREE_OPERAND (arg1, 1), 0))
3828 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3829 fold_build2 (code, type,
3830 TREE_OPERAND (arg0, 0),
3831 TREE_OPERAND (arg1, 0)),
3832 TREE_OPERAND (arg0, 1));
3834 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3835 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3836 TREE_OPERAND (arg1, 0), 0)
3837 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3838 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3840 REAL_VALUE_TYPE r0, r1;
3841 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3842 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3843 if (!mul0)
3844 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3845 if (!mul1)
3846 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3847 real_arithmetic (&r0, code, &r0, &r1);
3848 return fold_build2 (MULT_EXPR, type,
3849 TREE_OPERAND (arg0, 0),
3850 build_real (type, r0));
3853 return NULL_TREE;
3856 /* Subroutine for fold_truthop: decode a field reference.
3858 If EXP is a comparison reference, we return the innermost reference.
3860 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3861 set to the starting bit number.
3863 If the innermost field can be completely contained in a mode-sized
3864 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3866 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3867 otherwise it is not changed.
3869 *PUNSIGNEDP is set to the signedness of the field.
3871 *PMASK is set to the mask used. This is either contained in a
3872 BIT_AND_EXPR or derived from the width of the field.
3874 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3876 Return 0 if this is not a component reference or is one that we can't
3877 do anything with. */
3879 static tree
3880 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3881 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3882 int *punsignedp, int *pvolatilep,
3883 tree *pmask, tree *pand_mask)
3885 tree outer_type = 0;
3886 tree and_mask = 0;
3887 tree mask, inner, offset;
3888 tree unsigned_type;
3889 unsigned int precision;
3891 /* All the optimizations using this function assume integer fields.
3892 There are problems with FP fields since the type_for_size call
3893 below can fail for, e.g., XFmode. */
3894 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3895 return 0;
3897 /* We are interested in the bare arrangement of bits, so strip everything
3898 that doesn't affect the machine mode. However, record the type of the
3899 outermost expression if it may matter below. */
3900 if (CONVERT_EXPR_P (exp)
3901 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3902 outer_type = TREE_TYPE (exp);
3903 STRIP_NOPS (exp);
3905 if (TREE_CODE (exp) == BIT_AND_EXPR)
3907 and_mask = TREE_OPERAND (exp, 1);
3908 exp = TREE_OPERAND (exp, 0);
3909 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3910 if (TREE_CODE (and_mask) != INTEGER_CST)
3911 return 0;
3914 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3915 punsignedp, pvolatilep, false);
3916 if ((inner == exp && and_mask == 0)
3917 || *pbitsize < 0 || offset != 0
3918 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3919 return 0;
3921 /* If the number of bits in the reference is the same as the bitsize of
3922 the outer type, then the outer type gives the signedness. Otherwise
3923 (in case of a small bitfield) the signedness is unchanged. */
3924 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3925 *punsignedp = TYPE_UNSIGNED (outer_type);
3927 /* Compute the mask to access the bitfield. */
3928 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3929 precision = TYPE_PRECISION (unsigned_type);
3931 mask = build_int_cst_type (unsigned_type, -1);
3933 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3934 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3936 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3937 if (and_mask != 0)
3938 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3939 fold_convert (unsigned_type, and_mask), mask);
3941 *pmask = mask;
3942 *pand_mask = and_mask;
3943 return inner;
3946 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3947 represents the sign bit of EXP's type. If EXP represents a sign
3948 or zero extension, also test VAL against the unextended type.
3949 The return value is the (sub)expression whose sign bit is VAL,
3950 or NULL_TREE otherwise. */
3952 static tree
3953 sign_bit_p (tree exp, const_tree val)
3955 unsigned HOST_WIDE_INT mask_lo, lo;
3956 HOST_WIDE_INT mask_hi, hi;
3957 int width;
3958 tree t;
3960 /* Tree EXP must have an integral type. */
3961 t = TREE_TYPE (exp);
3962 if (! INTEGRAL_TYPE_P (t))
3963 return NULL_TREE;
3965 /* Tree VAL must be an integer constant. */
3966 if (TREE_CODE (val) != INTEGER_CST
3967 || TREE_OVERFLOW (val))
3968 return NULL_TREE;
3970 width = TYPE_PRECISION (t);
3971 if (width > HOST_BITS_PER_WIDE_INT)
3973 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3974 lo = 0;
3976 mask_hi = ((unsigned HOST_WIDE_INT) -1
3977 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3978 mask_lo = -1;
3980 else
3982 hi = 0;
3983 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3985 mask_hi = 0;
3986 mask_lo = ((unsigned HOST_WIDE_INT) -1
3987 >> (HOST_BITS_PER_WIDE_INT - width));
3990 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3991 treat VAL as if it were unsigned. */
3992 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3993 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3994 return exp;
3996 /* Handle extension from a narrower type. */
3997 if (TREE_CODE (exp) == NOP_EXPR
3998 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3999 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4001 return NULL_TREE;
4004 /* Subroutine for fold_truthop: determine if an operand is simple enough
4005 to be evaluated unconditionally. */
4007 static int
4008 simple_operand_p (const_tree exp)
4010 /* Strip any conversions that don't change the machine mode. */
4011 STRIP_NOPS (exp);
4013 return (CONSTANT_CLASS_P (exp)
4014 || TREE_CODE (exp) == SSA_NAME
4015 || (DECL_P (exp)
4016 && ! TREE_ADDRESSABLE (exp)
4017 && ! TREE_THIS_VOLATILE (exp)
4018 && ! DECL_NONLOCAL (exp)
4019 /* Don't regard global variables as simple. They may be
4020 allocated in ways unknown to the compiler (shared memory,
4021 #pragma weak, etc). */
4022 && ! TREE_PUBLIC (exp)
4023 && ! DECL_EXTERNAL (exp)
4024 /* Loading a static variable is unduly expensive, but global
4025 registers aren't expensive. */
4026 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4029 /* The following functions are subroutines to fold_range_test and allow it to
4030 try to change a logical combination of comparisons into a range test.
4032 For example, both
4033 X == 2 || X == 3 || X == 4 || X == 5
4035 X >= 2 && X <= 5
4036 are converted to
4037 (unsigned) (X - 2) <= 3
4039 We describe each set of comparisons as being either inside or outside
4040 a range, using a variable named like IN_P, and then describe the
4041 range with a lower and upper bound. If one of the bounds is omitted,
4042 it represents either the highest or lowest value of the type.
4044 In the comments below, we represent a range by two numbers in brackets
4045 preceded by a "+" to designate being inside that range, or a "-" to
4046 designate being outside that range, so the condition can be inverted by
4047 flipping the prefix. An omitted bound is represented by a "-". For
4048 example, "- [-, 10]" means being outside the range starting at the lowest
4049 possible value and ending at 10, in other words, being greater than 10.
4050 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4051 always false.
4053 We set up things so that the missing bounds are handled in a consistent
4054 manner so neither a missing bound nor "true" and "false" need to be
4055 handled using a special case. */
4057 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4058 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4059 and UPPER1_P are nonzero if the respective argument is an upper bound
4060 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4061 must be specified for a comparison. ARG1 will be converted to ARG0's
4062 type if both are specified. */
4064 static tree
4065 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4066 tree arg1, int upper1_p)
4068 tree tem;
4069 int result;
4070 int sgn0, sgn1;
4072 /* If neither arg represents infinity, do the normal operation.
4073 Else, if not a comparison, return infinity. Else handle the special
4074 comparison rules. Note that most of the cases below won't occur, but
4075 are handled for consistency. */
4077 if (arg0 != 0 && arg1 != 0)
4079 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4080 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4081 STRIP_NOPS (tem);
4082 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4085 if (TREE_CODE_CLASS (code) != tcc_comparison)
4086 return 0;
4088 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4089 for neither. In real maths, we cannot assume open ended ranges are
4090 the same. But, this is computer arithmetic, where numbers are finite.
4091 We can therefore make the transformation of any unbounded range with
4092 the value Z, Z being greater than any representable number. This permits
4093 us to treat unbounded ranges as equal. */
4094 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4095 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4096 switch (code)
4098 case EQ_EXPR:
4099 result = sgn0 == sgn1;
4100 break;
4101 case NE_EXPR:
4102 result = sgn0 != sgn1;
4103 break;
4104 case LT_EXPR:
4105 result = sgn0 < sgn1;
4106 break;
4107 case LE_EXPR:
4108 result = sgn0 <= sgn1;
4109 break;
4110 case GT_EXPR:
4111 result = sgn0 > sgn1;
4112 break;
4113 case GE_EXPR:
4114 result = sgn0 >= sgn1;
4115 break;
4116 default:
4117 gcc_unreachable ();
4120 return constant_boolean_node (result, type);
4123 /* Given EXP, a logical expression, set the range it is testing into
4124 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4125 actually being tested. *PLOW and *PHIGH will be made of the same
4126 type as the returned expression. If EXP is not a comparison, we
4127 will most likely not be returning a useful value and range. Set
4128 *STRICT_OVERFLOW_P to true if the return value is only valid
4129 because signed overflow is undefined; otherwise, do not change
4130 *STRICT_OVERFLOW_P. */
4132 static tree
4133 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4134 bool *strict_overflow_p)
4136 enum tree_code code;
4137 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4138 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4139 int in_p, n_in_p;
4140 tree low, high, n_low, n_high;
4142 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4143 and see if we can refine the range. Some of the cases below may not
4144 happen, but it doesn't seem worth worrying about this. We "continue"
4145 the outer loop when we've changed something; otherwise we "break"
4146 the switch, which will "break" the while. */
4148 in_p = 0;
4149 low = high = build_int_cst (TREE_TYPE (exp), 0);
4151 while (1)
4153 code = TREE_CODE (exp);
4154 exp_type = TREE_TYPE (exp);
4156 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4158 if (TREE_OPERAND_LENGTH (exp) > 0)
4159 arg0 = TREE_OPERAND (exp, 0);
4160 if (TREE_CODE_CLASS (code) == tcc_comparison
4161 || TREE_CODE_CLASS (code) == tcc_unary
4162 || TREE_CODE_CLASS (code) == tcc_binary)
4163 arg0_type = TREE_TYPE (arg0);
4164 if (TREE_CODE_CLASS (code) == tcc_binary
4165 || TREE_CODE_CLASS (code) == tcc_comparison
4166 || (TREE_CODE_CLASS (code) == tcc_expression
4167 && TREE_OPERAND_LENGTH (exp) > 1))
4168 arg1 = TREE_OPERAND (exp, 1);
4171 switch (code)
4173 case TRUTH_NOT_EXPR:
4174 in_p = ! in_p, exp = arg0;
4175 continue;
4177 case EQ_EXPR: case NE_EXPR:
4178 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4179 /* We can only do something if the range is testing for zero
4180 and if the second operand is an integer constant. Note that
4181 saying something is "in" the range we make is done by
4182 complementing IN_P since it will set in the initial case of
4183 being not equal to zero; "out" is leaving it alone. */
4184 if (low == 0 || high == 0
4185 || ! integer_zerop (low) || ! integer_zerop (high)
4186 || TREE_CODE (arg1) != INTEGER_CST)
4187 break;
4189 switch (code)
4191 case NE_EXPR: /* - [c, c] */
4192 low = high = arg1;
4193 break;
4194 case EQ_EXPR: /* + [c, c] */
4195 in_p = ! in_p, low = high = arg1;
4196 break;
4197 case GT_EXPR: /* - [-, c] */
4198 low = 0, high = arg1;
4199 break;
4200 case GE_EXPR: /* + [c, -] */
4201 in_p = ! in_p, low = arg1, high = 0;
4202 break;
4203 case LT_EXPR: /* - [c, -] */
4204 low = arg1, high = 0;
4205 break;
4206 case LE_EXPR: /* + [-, c] */
4207 in_p = ! in_p, low = 0, high = arg1;
4208 break;
4209 default:
4210 gcc_unreachable ();
4213 /* If this is an unsigned comparison, we also know that EXP is
4214 greater than or equal to zero. We base the range tests we make
4215 on that fact, so we record it here so we can parse existing
4216 range tests. We test arg0_type since often the return type
4217 of, e.g. EQ_EXPR, is boolean. */
4218 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4220 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4221 in_p, low, high, 1,
4222 build_int_cst (arg0_type, 0),
4223 NULL_TREE))
4224 break;
4226 in_p = n_in_p, low = n_low, high = n_high;
4228 /* If the high bound is missing, but we have a nonzero low
4229 bound, reverse the range so it goes from zero to the low bound
4230 minus 1. */
4231 if (high == 0 && low && ! integer_zerop (low))
4233 in_p = ! in_p;
4234 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4235 integer_one_node, 0);
4236 low = build_int_cst (arg0_type, 0);
4240 exp = arg0;
4241 continue;
4243 case NEGATE_EXPR:
4244 /* (-x) IN [a,b] -> x in [-b, -a] */
4245 n_low = range_binop (MINUS_EXPR, exp_type,
4246 build_int_cst (exp_type, 0),
4247 0, high, 1);
4248 n_high = range_binop (MINUS_EXPR, exp_type,
4249 build_int_cst (exp_type, 0),
4250 0, low, 0);
4251 low = n_low, high = n_high;
4252 exp = arg0;
4253 continue;
4255 case BIT_NOT_EXPR:
4256 /* ~ X -> -X - 1 */
4257 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4258 build_int_cst (exp_type, 1));
4259 continue;
4261 case PLUS_EXPR: case MINUS_EXPR:
4262 if (TREE_CODE (arg1) != INTEGER_CST)
4263 break;
4265 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4266 move a constant to the other side. */
4267 if (!TYPE_UNSIGNED (arg0_type)
4268 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4269 break;
4271 /* If EXP is signed, any overflow in the computation is undefined,
4272 so we don't worry about it so long as our computations on
4273 the bounds don't overflow. For unsigned, overflow is defined
4274 and this is exactly the right thing. */
4275 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4276 arg0_type, low, 0, arg1, 0);
4277 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4278 arg0_type, high, 1, arg1, 0);
4279 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4280 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4281 break;
4283 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4284 *strict_overflow_p = true;
4286 /* Check for an unsigned range which has wrapped around the maximum
4287 value thus making n_high < n_low, and normalize it. */
4288 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4290 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4291 integer_one_node, 0);
4292 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4293 integer_one_node, 0);
4295 /* If the range is of the form +/- [ x+1, x ], we won't
4296 be able to normalize it. But then, it represents the
4297 whole range or the empty set, so make it
4298 +/- [ -, - ]. */
4299 if (tree_int_cst_equal (n_low, low)
4300 && tree_int_cst_equal (n_high, high))
4301 low = high = 0;
4302 else
4303 in_p = ! in_p;
4305 else
4306 low = n_low, high = n_high;
4308 exp = arg0;
4309 continue;
4311 CASE_CONVERT: case NON_LVALUE_EXPR:
4312 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4313 break;
4315 if (! INTEGRAL_TYPE_P (arg0_type)
4316 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4317 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4318 break;
4320 n_low = low, n_high = high;
4322 if (n_low != 0)
4323 n_low = fold_convert (arg0_type, n_low);
4325 if (n_high != 0)
4326 n_high = fold_convert (arg0_type, n_high);
4329 /* If we're converting arg0 from an unsigned type, to exp,
4330 a signed type, we will be doing the comparison as unsigned.
4331 The tests above have already verified that LOW and HIGH
4332 are both positive.
4334 So we have to ensure that we will handle large unsigned
4335 values the same way that the current signed bounds treat
4336 negative values. */
4338 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4340 tree high_positive;
4341 tree equiv_type;
4342 /* For fixed-point modes, we need to pass the saturating flag
4343 as the 2nd parameter. */
4344 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4345 equiv_type = lang_hooks.types.type_for_mode
4346 (TYPE_MODE (arg0_type),
4347 TYPE_SATURATING (arg0_type));
4348 else
4349 equiv_type = lang_hooks.types.type_for_mode
4350 (TYPE_MODE (arg0_type), 1);
4352 /* A range without an upper bound is, naturally, unbounded.
4353 Since convert would have cropped a very large value, use
4354 the max value for the destination type. */
4355 high_positive
4356 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4357 : TYPE_MAX_VALUE (arg0_type);
4359 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4360 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4361 fold_convert (arg0_type,
4362 high_positive),
4363 build_int_cst (arg0_type, 1));
4365 /* If the low bound is specified, "and" the range with the
4366 range for which the original unsigned value will be
4367 positive. */
4368 if (low != 0)
4370 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4371 1, n_low, n_high, 1,
4372 fold_convert (arg0_type,
4373 integer_zero_node),
4374 high_positive))
4375 break;
4377 in_p = (n_in_p == in_p);
4379 else
4381 /* Otherwise, "or" the range with the range of the input
4382 that will be interpreted as negative. */
4383 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4384 0, n_low, n_high, 1,
4385 fold_convert (arg0_type,
4386 integer_zero_node),
4387 high_positive))
4388 break;
4390 in_p = (in_p != n_in_p);
4394 exp = arg0;
4395 low = n_low, high = n_high;
4396 continue;
4398 default:
4399 break;
4402 break;
4405 /* If EXP is a constant, we can evaluate whether this is true or false. */
4406 if (TREE_CODE (exp) == INTEGER_CST)
4408 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4409 exp, 0, low, 0))
4410 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4411 exp, 1, high, 1)));
4412 low = high = 0;
4413 exp = 0;
4416 *pin_p = in_p, *plow = low, *phigh = high;
4417 return exp;
4420 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4421 type, TYPE, return an expression to test if EXP is in (or out of, depending
4422 on IN_P) the range. Return 0 if the test couldn't be created. */
4424 static tree
4425 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4427 tree etype = TREE_TYPE (exp);
4428 tree value;
4430 #ifdef HAVE_canonicalize_funcptr_for_compare
4431 /* Disable this optimization for function pointer expressions
4432 on targets that require function pointer canonicalization. */
4433 if (HAVE_canonicalize_funcptr_for_compare
4434 && TREE_CODE (etype) == POINTER_TYPE
4435 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4436 return NULL_TREE;
4437 #endif
4439 if (! in_p)
4441 value = build_range_check (type, exp, 1, low, high);
4442 if (value != 0)
4443 return invert_truthvalue (value);
4445 return 0;
4448 if (low == 0 && high == 0)
4449 return build_int_cst (type, 1);
4451 if (low == 0)
4452 return fold_build2 (LE_EXPR, type, exp,
4453 fold_convert (etype, high));
4455 if (high == 0)
4456 return fold_build2 (GE_EXPR, type, exp,
4457 fold_convert (etype, low));
4459 if (operand_equal_p (low, high, 0))
4460 return fold_build2 (EQ_EXPR, type, exp,
4461 fold_convert (etype, low));
4463 if (integer_zerop (low))
4465 if (! TYPE_UNSIGNED (etype))
4467 etype = unsigned_type_for (etype);
4468 high = fold_convert (etype, high);
4469 exp = fold_convert (etype, exp);
4471 return build_range_check (type, exp, 1, 0, high);
4474 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4475 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4477 unsigned HOST_WIDE_INT lo;
4478 HOST_WIDE_INT hi;
4479 int prec;
4481 prec = TYPE_PRECISION (etype);
4482 if (prec <= HOST_BITS_PER_WIDE_INT)
4484 hi = 0;
4485 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4487 else
4489 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4490 lo = (unsigned HOST_WIDE_INT) -1;
4493 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4495 if (TYPE_UNSIGNED (etype))
4497 tree signed_etype = signed_type_for (etype);
4498 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4499 etype
4500 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4501 else
4502 etype = signed_etype;
4503 exp = fold_convert (etype, exp);
4505 return fold_build2 (GT_EXPR, type, exp,
4506 build_int_cst (etype, 0));
4510 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4511 This requires wrap-around arithmetics for the type of the expression. */
4512 switch (TREE_CODE (etype))
4514 case INTEGER_TYPE:
4515 /* There is no requirement that LOW be within the range of ETYPE
4516 if the latter is a subtype. It must, however, be within the base
4517 type of ETYPE. So be sure we do the subtraction in that type. */
4518 if (TREE_TYPE (etype))
4519 etype = TREE_TYPE (etype);
4520 break;
4522 case ENUMERAL_TYPE:
4523 case BOOLEAN_TYPE:
4524 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4525 TYPE_UNSIGNED (etype));
4526 break;
4528 default:
4529 break;
4532 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4533 if (TREE_CODE (etype) == INTEGER_TYPE
4534 && !TYPE_OVERFLOW_WRAPS (etype))
4536 tree utype, minv, maxv;
4538 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4539 for the type in question, as we rely on this here. */
4540 utype = unsigned_type_for (etype);
4541 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4542 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4543 integer_one_node, 1);
4544 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4546 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4547 minv, 1, maxv, 1)))
4548 etype = utype;
4549 else
4550 return 0;
4553 high = fold_convert (etype, high);
4554 low = fold_convert (etype, low);
4555 exp = fold_convert (etype, exp);
4557 value = const_binop (MINUS_EXPR, high, low, 0);
4560 if (POINTER_TYPE_P (etype))
4562 if (value != 0 && !TREE_OVERFLOW (value))
4564 low = fold_convert (sizetype, low);
4565 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4566 return build_range_check (type,
4567 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4568 1, build_int_cst (etype, 0), value);
4570 return 0;
4573 if (value != 0 && !TREE_OVERFLOW (value))
4574 return build_range_check (type,
4575 fold_build2 (MINUS_EXPR, etype, exp, low),
4576 1, build_int_cst (etype, 0), value);
4578 return 0;
4581 /* Return the predecessor of VAL in its type, handling the infinite case. */
4583 static tree
4584 range_predecessor (tree val)
4586 tree type = TREE_TYPE (val);
4588 if (INTEGRAL_TYPE_P (type)
4589 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4590 return 0;
4591 else
4592 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4595 /* Return the successor of VAL in its type, handling the infinite case. */
4597 static tree
4598 range_successor (tree val)
4600 tree type = TREE_TYPE (val);
4602 if (INTEGRAL_TYPE_P (type)
4603 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4604 return 0;
4605 else
4606 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4609 /* Given two ranges, see if we can merge them into one. Return 1 if we
4610 can, 0 if we can't. Set the output range into the specified parameters. */
4612 static int
4613 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4614 tree high0, int in1_p, tree low1, tree high1)
4616 int no_overlap;
4617 int subset;
4618 int temp;
4619 tree tem;
4620 int in_p;
4621 tree low, high;
4622 int lowequal = ((low0 == 0 && low1 == 0)
4623 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4624 low0, 0, low1, 0)));
4625 int highequal = ((high0 == 0 && high1 == 0)
4626 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4627 high0, 1, high1, 1)));
4629 /* Make range 0 be the range that starts first, or ends last if they
4630 start at the same value. Swap them if it isn't. */
4631 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4632 low0, 0, low1, 0))
4633 || (lowequal
4634 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4635 high1, 1, high0, 1))))
4637 temp = in0_p, in0_p = in1_p, in1_p = temp;
4638 tem = low0, low0 = low1, low1 = tem;
4639 tem = high0, high0 = high1, high1 = tem;
4642 /* Now flag two cases, whether the ranges are disjoint or whether the
4643 second range is totally subsumed in the first. Note that the tests
4644 below are simplified by the ones above. */
4645 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4646 high0, 1, low1, 0));
4647 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4648 high1, 1, high0, 1));
4650 /* We now have four cases, depending on whether we are including or
4651 excluding the two ranges. */
4652 if (in0_p && in1_p)
4654 /* If they don't overlap, the result is false. If the second range
4655 is a subset it is the result. Otherwise, the range is from the start
4656 of the second to the end of the first. */
4657 if (no_overlap)
4658 in_p = 0, low = high = 0;
4659 else if (subset)
4660 in_p = 1, low = low1, high = high1;
4661 else
4662 in_p = 1, low = low1, high = high0;
4665 else if (in0_p && ! in1_p)
4667 /* If they don't overlap, the result is the first range. If they are
4668 equal, the result is false. If the second range is a subset of the
4669 first, and the ranges begin at the same place, we go from just after
4670 the end of the second range to the end of the first. If the second
4671 range is not a subset of the first, or if it is a subset and both
4672 ranges end at the same place, the range starts at the start of the
4673 first range and ends just before the second range.
4674 Otherwise, we can't describe this as a single range. */
4675 if (no_overlap)
4676 in_p = 1, low = low0, high = high0;
4677 else if (lowequal && highequal)
4678 in_p = 0, low = high = 0;
4679 else if (subset && lowequal)
4681 low = range_successor (high1);
4682 high = high0;
4683 in_p = 1;
4684 if (low == 0)
4686 /* We are in the weird situation where high0 > high1 but
4687 high1 has no successor. Punt. */
4688 return 0;
4691 else if (! subset || highequal)
4693 low = low0;
4694 high = range_predecessor (low1);
4695 in_p = 1;
4696 if (high == 0)
4698 /* low0 < low1 but low1 has no predecessor. Punt. */
4699 return 0;
4702 else
4703 return 0;
4706 else if (! in0_p && in1_p)
4708 /* If they don't overlap, the result is the second range. If the second
4709 is a subset of the first, the result is false. Otherwise,
4710 the range starts just after the first range and ends at the
4711 end of the second. */
4712 if (no_overlap)
4713 in_p = 1, low = low1, high = high1;
4714 else if (subset || highequal)
4715 in_p = 0, low = high = 0;
4716 else
4718 low = range_successor (high0);
4719 high = high1;
4720 in_p = 1;
4721 if (low == 0)
4723 /* high1 > high0 but high0 has no successor. Punt. */
4724 return 0;
4729 else
4731 /* The case where we are excluding both ranges. Here the complex case
4732 is if they don't overlap. In that case, the only time we have a
4733 range is if they are adjacent. If the second is a subset of the
4734 first, the result is the first. Otherwise, the range to exclude
4735 starts at the beginning of the first range and ends at the end of the
4736 second. */
4737 if (no_overlap)
4739 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4740 range_successor (high0),
4741 1, low1, 0)))
4742 in_p = 0, low = low0, high = high1;
4743 else
4745 /* Canonicalize - [min, x] into - [-, x]. */
4746 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4747 switch (TREE_CODE (TREE_TYPE (low0)))
4749 case ENUMERAL_TYPE:
4750 if (TYPE_PRECISION (TREE_TYPE (low0))
4751 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4752 break;
4753 /* FALLTHROUGH */
4754 case INTEGER_TYPE:
4755 if (tree_int_cst_equal (low0,
4756 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4757 low0 = 0;
4758 break;
4759 case POINTER_TYPE:
4760 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4761 && integer_zerop (low0))
4762 low0 = 0;
4763 break;
4764 default:
4765 break;
4768 /* Canonicalize - [x, max] into - [x, -]. */
4769 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4770 switch (TREE_CODE (TREE_TYPE (high1)))
4772 case ENUMERAL_TYPE:
4773 if (TYPE_PRECISION (TREE_TYPE (high1))
4774 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4775 break;
4776 /* FALLTHROUGH */
4777 case INTEGER_TYPE:
4778 if (tree_int_cst_equal (high1,
4779 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4780 high1 = 0;
4781 break;
4782 case POINTER_TYPE:
4783 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4784 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4785 high1, 1,
4786 integer_one_node, 1)))
4787 high1 = 0;
4788 break;
4789 default:
4790 break;
4793 /* The ranges might be also adjacent between the maximum and
4794 minimum values of the given type. For
4795 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4796 return + [x + 1, y - 1]. */
4797 if (low0 == 0 && high1 == 0)
4799 low = range_successor (high0);
4800 high = range_predecessor (low1);
4801 if (low == 0 || high == 0)
4802 return 0;
4804 in_p = 1;
4806 else
4807 return 0;
4810 else if (subset)
4811 in_p = 0, low = low0, high = high0;
4812 else
4813 in_p = 0, low = low0, high = high1;
4816 *pin_p = in_p, *plow = low, *phigh = high;
4817 return 1;
4821 /* Subroutine of fold, looking inside expressions of the form
4822 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4823 of the COND_EXPR. This function is being used also to optimize
4824 A op B ? C : A, by reversing the comparison first.
4826 Return a folded expression whose code is not a COND_EXPR
4827 anymore, or NULL_TREE if no folding opportunity is found. */
4829 static tree
4830 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4832 enum tree_code comp_code = TREE_CODE (arg0);
4833 tree arg00 = TREE_OPERAND (arg0, 0);
4834 tree arg01 = TREE_OPERAND (arg0, 1);
4835 tree arg1_type = TREE_TYPE (arg1);
4836 tree tem;
4838 STRIP_NOPS (arg1);
4839 STRIP_NOPS (arg2);
4841 /* If we have A op 0 ? A : -A, consider applying the following
4842 transformations:
4844 A == 0? A : -A same as -A
4845 A != 0? A : -A same as A
4846 A >= 0? A : -A same as abs (A)
4847 A > 0? A : -A same as abs (A)
4848 A <= 0? A : -A same as -abs (A)
4849 A < 0? A : -A same as -abs (A)
4851 None of these transformations work for modes with signed
4852 zeros. If A is +/-0, the first two transformations will
4853 change the sign of the result (from +0 to -0, or vice
4854 versa). The last four will fix the sign of the result,
4855 even though the original expressions could be positive or
4856 negative, depending on the sign of A.
4858 Note that all these transformations are correct if A is
4859 NaN, since the two alternatives (A and -A) are also NaNs. */
4860 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4861 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4862 ? real_zerop (arg01)
4863 : integer_zerop (arg01))
4864 && ((TREE_CODE (arg2) == NEGATE_EXPR
4865 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4866 /* In the case that A is of the form X-Y, '-A' (arg2) may
4867 have already been folded to Y-X, check for that. */
4868 || (TREE_CODE (arg1) == MINUS_EXPR
4869 && TREE_CODE (arg2) == MINUS_EXPR
4870 && operand_equal_p (TREE_OPERAND (arg1, 0),
4871 TREE_OPERAND (arg2, 1), 0)
4872 && operand_equal_p (TREE_OPERAND (arg1, 1),
4873 TREE_OPERAND (arg2, 0), 0))))
4874 switch (comp_code)
4876 case EQ_EXPR:
4877 case UNEQ_EXPR:
4878 tem = fold_convert (arg1_type, arg1);
4879 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4880 case NE_EXPR:
4881 case LTGT_EXPR:
4882 return pedantic_non_lvalue (fold_convert (type, arg1));
4883 case UNGE_EXPR:
4884 case UNGT_EXPR:
4885 if (flag_trapping_math)
4886 break;
4887 /* Fall through. */
4888 case GE_EXPR:
4889 case GT_EXPR:
4890 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4891 arg1 = fold_convert (signed_type_for
4892 (TREE_TYPE (arg1)), arg1);
4893 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4894 return pedantic_non_lvalue (fold_convert (type, tem));
4895 case UNLE_EXPR:
4896 case UNLT_EXPR:
4897 if (flag_trapping_math)
4898 break;
4899 case LE_EXPR:
4900 case LT_EXPR:
4901 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4902 arg1 = fold_convert (signed_type_for
4903 (TREE_TYPE (arg1)), arg1);
4904 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4905 return negate_expr (fold_convert (type, tem));
4906 default:
4907 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4908 break;
4911 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4912 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4913 both transformations are correct when A is NaN: A != 0
4914 is then true, and A == 0 is false. */
4916 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4917 && integer_zerop (arg01) && integer_zerop (arg2))
4919 if (comp_code == NE_EXPR)
4920 return pedantic_non_lvalue (fold_convert (type, arg1));
4921 else if (comp_code == EQ_EXPR)
4922 return build_int_cst (type, 0);
4925 /* Try some transformations of A op B ? A : B.
4927 A == B? A : B same as B
4928 A != B? A : B same as A
4929 A >= B? A : B same as max (A, B)
4930 A > B? A : B same as max (B, A)
4931 A <= B? A : B same as min (A, B)
4932 A < B? A : B same as min (B, A)
4934 As above, these transformations don't work in the presence
4935 of signed zeros. For example, if A and B are zeros of
4936 opposite sign, the first two transformations will change
4937 the sign of the result. In the last four, the original
4938 expressions give different results for (A=+0, B=-0) and
4939 (A=-0, B=+0), but the transformed expressions do not.
4941 The first two transformations are correct if either A or B
4942 is a NaN. In the first transformation, the condition will
4943 be false, and B will indeed be chosen. In the case of the
4944 second transformation, the condition A != B will be true,
4945 and A will be chosen.
4947 The conversions to max() and min() are not correct if B is
4948 a number and A is not. The conditions in the original
4949 expressions will be false, so all four give B. The min()
4950 and max() versions would give a NaN instead. */
4951 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4952 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4953 /* Avoid these transformations if the COND_EXPR may be used
4954 as an lvalue in the C++ front-end. PR c++/19199. */
4955 && (in_gimple_form
4956 || (strcmp (lang_hooks.name, "GNU C++") != 0
4957 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4958 || ! maybe_lvalue_p (arg1)
4959 || ! maybe_lvalue_p (arg2)))
4961 tree comp_op0 = arg00;
4962 tree comp_op1 = arg01;
4963 tree comp_type = TREE_TYPE (comp_op0);
4965 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4966 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4968 comp_type = type;
4969 comp_op0 = arg1;
4970 comp_op1 = arg2;
4973 switch (comp_code)
4975 case EQ_EXPR:
4976 return pedantic_non_lvalue (fold_convert (type, arg2));
4977 case NE_EXPR:
4978 return pedantic_non_lvalue (fold_convert (type, arg1));
4979 case LE_EXPR:
4980 case LT_EXPR:
4981 case UNLE_EXPR:
4982 case UNLT_EXPR:
4983 /* In C++ a ?: expression can be an lvalue, so put the
4984 operand which will be used if they are equal first
4985 so that we can convert this back to the
4986 corresponding COND_EXPR. */
4987 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4989 comp_op0 = fold_convert (comp_type, comp_op0);
4990 comp_op1 = fold_convert (comp_type, comp_op1);
4991 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4992 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4993 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4994 return pedantic_non_lvalue (fold_convert (type, tem));
4996 break;
4997 case GE_EXPR:
4998 case GT_EXPR:
4999 case UNGE_EXPR:
5000 case UNGT_EXPR:
5001 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5003 comp_op0 = fold_convert (comp_type, comp_op0);
5004 comp_op1 = fold_convert (comp_type, comp_op1);
5005 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5006 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5007 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5008 return pedantic_non_lvalue (fold_convert (type, tem));
5010 break;
5011 case UNEQ_EXPR:
5012 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5013 return pedantic_non_lvalue (fold_convert (type, arg2));
5014 break;
5015 case LTGT_EXPR:
5016 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5017 return pedantic_non_lvalue (fold_convert (type, arg1));
5018 break;
5019 default:
5020 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5021 break;
5025 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5026 we might still be able to simplify this. For example,
5027 if C1 is one less or one more than C2, this might have started
5028 out as a MIN or MAX and been transformed by this function.
5029 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5031 if (INTEGRAL_TYPE_P (type)
5032 && TREE_CODE (arg01) == INTEGER_CST
5033 && TREE_CODE (arg2) == INTEGER_CST)
5034 switch (comp_code)
5036 case EQ_EXPR:
5037 /* We can replace A with C1 in this case. */
5038 arg1 = fold_convert (type, arg01);
5039 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5041 case LT_EXPR:
5042 /* If C1 is C2 + 1, this is min(A, C2). */
5043 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5044 OEP_ONLY_CONST)
5045 && operand_equal_p (arg01,
5046 const_binop (PLUS_EXPR, arg2,
5047 build_int_cst (type, 1), 0),
5048 OEP_ONLY_CONST))
5049 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5050 type,
5051 fold_convert (type, arg1),
5052 arg2));
5053 break;
5055 case LE_EXPR:
5056 /* If C1 is C2 - 1, this is min(A, C2). */
5057 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5058 OEP_ONLY_CONST)
5059 && operand_equal_p (arg01,
5060 const_binop (MINUS_EXPR, arg2,
5061 build_int_cst (type, 1), 0),
5062 OEP_ONLY_CONST))
5063 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5064 type,
5065 fold_convert (type, arg1),
5066 arg2));
5067 break;
5069 case GT_EXPR:
5070 /* If C1 is C2 - 1, this is max(A, C2). */
5071 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5072 OEP_ONLY_CONST)
5073 && operand_equal_p (arg01,
5074 const_binop (MINUS_EXPR, arg2,
5075 build_int_cst (type, 1), 0),
5076 OEP_ONLY_CONST))
5077 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5078 type,
5079 fold_convert (type, arg1),
5080 arg2));
5081 break;
5083 case GE_EXPR:
5084 /* If C1 is C2 + 1, this is max(A, C2). */
5085 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5086 OEP_ONLY_CONST)
5087 && operand_equal_p (arg01,
5088 const_binop (PLUS_EXPR, arg2,
5089 build_int_cst (type, 1), 0),
5090 OEP_ONLY_CONST))
5091 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5092 type,
5093 fold_convert (type, arg1),
5094 arg2));
5095 break;
5096 case NE_EXPR:
5097 break;
5098 default:
5099 gcc_unreachable ();
5102 return NULL_TREE;
5107 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5108 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5109 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5110 false) >= 2)
5111 #endif
5113 /* EXP is some logical combination of boolean tests. See if we can
5114 merge it into some range test. Return the new tree if so. */
5116 static tree
5117 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5119 int or_op = (code == TRUTH_ORIF_EXPR
5120 || code == TRUTH_OR_EXPR);
5121 int in0_p, in1_p, in_p;
5122 tree low0, low1, low, high0, high1, high;
5123 bool strict_overflow_p = false;
5124 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5125 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5126 tree tem;
5127 const char * const warnmsg = G_("assuming signed overflow does not occur "
5128 "when simplifying range test");
5130 /* If this is an OR operation, invert both sides; we will invert
5131 again at the end. */
5132 if (or_op)
5133 in0_p = ! in0_p, in1_p = ! in1_p;
5135 /* If both expressions are the same, if we can merge the ranges, and we
5136 can build the range test, return it or it inverted. If one of the
5137 ranges is always true or always false, consider it to be the same
5138 expression as the other. */
5139 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5140 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5141 in1_p, low1, high1)
5142 && 0 != (tem = (build_range_check (type,
5143 lhs != 0 ? lhs
5144 : rhs != 0 ? rhs : integer_zero_node,
5145 in_p, low, high))))
5147 if (strict_overflow_p)
5148 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5149 return or_op ? invert_truthvalue (tem) : tem;
5152 /* On machines where the branch cost is expensive, if this is a
5153 short-circuited branch and the underlying object on both sides
5154 is the same, make a non-short-circuit operation. */
5155 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5156 && lhs != 0 && rhs != 0
5157 && (code == TRUTH_ANDIF_EXPR
5158 || code == TRUTH_ORIF_EXPR)
5159 && operand_equal_p (lhs, rhs, 0))
5161 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5162 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5163 which cases we can't do this. */
5164 if (simple_operand_p (lhs))
5165 return build2 (code == TRUTH_ANDIF_EXPR
5166 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5167 type, op0, op1);
5169 else if (lang_hooks.decls.global_bindings_p () == 0
5170 && ! CONTAINS_PLACEHOLDER_P (lhs))
5172 tree common = save_expr (lhs);
5174 if (0 != (lhs = build_range_check (type, common,
5175 or_op ? ! in0_p : in0_p,
5176 low0, high0))
5177 && (0 != (rhs = build_range_check (type, common,
5178 or_op ? ! in1_p : in1_p,
5179 low1, high1))))
5181 if (strict_overflow_p)
5182 fold_overflow_warning (warnmsg,
5183 WARN_STRICT_OVERFLOW_COMPARISON);
5184 return build2 (code == TRUTH_ANDIF_EXPR
5185 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5186 type, lhs, rhs);
5191 return 0;
5194 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5195 bit value. Arrange things so the extra bits will be set to zero if and
5196 only if C is signed-extended to its full width. If MASK is nonzero,
5197 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5199 static tree
5200 unextend (tree c, int p, int unsignedp, tree mask)
5202 tree type = TREE_TYPE (c);
5203 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5204 tree temp;
5206 if (p == modesize || unsignedp)
5207 return c;
5209 /* We work by getting just the sign bit into the low-order bit, then
5210 into the high-order bit, then sign-extend. We then XOR that value
5211 with C. */
5212 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5213 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5215 /* We must use a signed type in order to get an arithmetic right shift.
5216 However, we must also avoid introducing accidental overflows, so that
5217 a subsequent call to integer_zerop will work. Hence we must
5218 do the type conversion here. At this point, the constant is either
5219 zero or one, and the conversion to a signed type can never overflow.
5220 We could get an overflow if this conversion is done anywhere else. */
5221 if (TYPE_UNSIGNED (type))
5222 temp = fold_convert (signed_type_for (type), temp);
5224 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5225 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5226 if (mask != 0)
5227 temp = const_binop (BIT_AND_EXPR, temp,
5228 fold_convert (TREE_TYPE (c), mask), 0);
5229 /* If necessary, convert the type back to match the type of C. */
5230 if (TYPE_UNSIGNED (type))
5231 temp = fold_convert (type, temp);
5233 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5236 /* Find ways of folding logical expressions of LHS and RHS:
5237 Try to merge two comparisons to the same innermost item.
5238 Look for range tests like "ch >= '0' && ch <= '9'".
5239 Look for combinations of simple terms on machines with expensive branches
5240 and evaluate the RHS unconditionally.
5242 For example, if we have p->a == 2 && p->b == 4 and we can make an
5243 object large enough to span both A and B, we can do this with a comparison
5244 against the object ANDed with the a mask.
5246 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5247 operations to do this with one comparison.
5249 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5250 function and the one above.
5252 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5253 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5255 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5256 two operands.
5258 We return the simplified tree or 0 if no optimization is possible. */
5260 static tree
5261 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5263 /* If this is the "or" of two comparisons, we can do something if
5264 the comparisons are NE_EXPR. If this is the "and", we can do something
5265 if the comparisons are EQ_EXPR. I.e.,
5266 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5268 WANTED_CODE is this operation code. For single bit fields, we can
5269 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5270 comparison for one-bit fields. */
5272 enum tree_code wanted_code;
5273 enum tree_code lcode, rcode;
5274 tree ll_arg, lr_arg, rl_arg, rr_arg;
5275 tree ll_inner, lr_inner, rl_inner, rr_inner;
5276 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5277 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5278 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5279 HOST_WIDE_INT lnbitsize, lnbitpos;
5280 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5281 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5282 enum machine_mode lnmode;
5283 tree ll_mask, lr_mask, rl_mask, rr_mask;
5284 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5285 tree l_const, r_const;
5286 tree lntype, result;
5287 int first_bit, end_bit;
5288 int volatilep;
5289 tree orig_lhs = lhs, orig_rhs = rhs;
5290 enum tree_code orig_code = code;
5292 /* Start by getting the comparison codes. Fail if anything is volatile.
5293 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5294 it were surrounded with a NE_EXPR. */
5296 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5297 return 0;
5299 lcode = TREE_CODE (lhs);
5300 rcode = TREE_CODE (rhs);
5302 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5304 lhs = build2 (NE_EXPR, truth_type, lhs,
5305 build_int_cst (TREE_TYPE (lhs), 0));
5306 lcode = NE_EXPR;
5309 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5311 rhs = build2 (NE_EXPR, truth_type, rhs,
5312 build_int_cst (TREE_TYPE (rhs), 0));
5313 rcode = NE_EXPR;
5316 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5317 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5318 return 0;
5320 ll_arg = TREE_OPERAND (lhs, 0);
5321 lr_arg = TREE_OPERAND (lhs, 1);
5322 rl_arg = TREE_OPERAND (rhs, 0);
5323 rr_arg = TREE_OPERAND (rhs, 1);
5325 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5326 if (simple_operand_p (ll_arg)
5327 && simple_operand_p (lr_arg))
5329 tree result;
5330 if (operand_equal_p (ll_arg, rl_arg, 0)
5331 && operand_equal_p (lr_arg, rr_arg, 0))
5333 result = combine_comparisons (code, lcode, rcode,
5334 truth_type, ll_arg, lr_arg);
5335 if (result)
5336 return result;
5338 else if (operand_equal_p (ll_arg, rr_arg, 0)
5339 && operand_equal_p (lr_arg, rl_arg, 0))
5341 result = combine_comparisons (code, lcode,
5342 swap_tree_comparison (rcode),
5343 truth_type, ll_arg, lr_arg);
5344 if (result)
5345 return result;
5349 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5350 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5352 /* If the RHS can be evaluated unconditionally and its operands are
5353 simple, it wins to evaluate the RHS unconditionally on machines
5354 with expensive branches. In this case, this isn't a comparison
5355 that can be merged. Avoid doing this if the RHS is a floating-point
5356 comparison since those can trap. */
5358 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5359 false) >= 2
5360 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5361 && simple_operand_p (rl_arg)
5362 && simple_operand_p (rr_arg))
5364 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5365 if (code == TRUTH_OR_EXPR
5366 && lcode == NE_EXPR && integer_zerop (lr_arg)
5367 && rcode == NE_EXPR && integer_zerop (rr_arg)
5368 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5369 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5370 return build2 (NE_EXPR, truth_type,
5371 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5372 ll_arg, rl_arg),
5373 build_int_cst (TREE_TYPE (ll_arg), 0));
5375 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5376 if (code == TRUTH_AND_EXPR
5377 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5378 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5379 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5380 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5381 return build2 (EQ_EXPR, truth_type,
5382 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5383 ll_arg, rl_arg),
5384 build_int_cst (TREE_TYPE (ll_arg), 0));
5386 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5388 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5389 return build2 (code, truth_type, lhs, rhs);
5390 return NULL_TREE;
5394 /* See if the comparisons can be merged. Then get all the parameters for
5395 each side. */
5397 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5398 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5399 return 0;
5401 volatilep = 0;
5402 ll_inner = decode_field_reference (ll_arg,
5403 &ll_bitsize, &ll_bitpos, &ll_mode,
5404 &ll_unsignedp, &volatilep, &ll_mask,
5405 &ll_and_mask);
5406 lr_inner = decode_field_reference (lr_arg,
5407 &lr_bitsize, &lr_bitpos, &lr_mode,
5408 &lr_unsignedp, &volatilep, &lr_mask,
5409 &lr_and_mask);
5410 rl_inner = decode_field_reference (rl_arg,
5411 &rl_bitsize, &rl_bitpos, &rl_mode,
5412 &rl_unsignedp, &volatilep, &rl_mask,
5413 &rl_and_mask);
5414 rr_inner = decode_field_reference (rr_arg,
5415 &rr_bitsize, &rr_bitpos, &rr_mode,
5416 &rr_unsignedp, &volatilep, &rr_mask,
5417 &rr_and_mask);
5419 /* It must be true that the inner operation on the lhs of each
5420 comparison must be the same if we are to be able to do anything.
5421 Then see if we have constants. If not, the same must be true for
5422 the rhs's. */
5423 if (volatilep || ll_inner == 0 || rl_inner == 0
5424 || ! operand_equal_p (ll_inner, rl_inner, 0))
5425 return 0;
5427 if (TREE_CODE (lr_arg) == INTEGER_CST
5428 && TREE_CODE (rr_arg) == INTEGER_CST)
5429 l_const = lr_arg, r_const = rr_arg;
5430 else if (lr_inner == 0 || rr_inner == 0
5431 || ! operand_equal_p (lr_inner, rr_inner, 0))
5432 return 0;
5433 else
5434 l_const = r_const = 0;
5436 /* If either comparison code is not correct for our logical operation,
5437 fail. However, we can convert a one-bit comparison against zero into
5438 the opposite comparison against that bit being set in the field. */
5440 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5441 if (lcode != wanted_code)
5443 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5445 /* Make the left operand unsigned, since we are only interested
5446 in the value of one bit. Otherwise we are doing the wrong
5447 thing below. */
5448 ll_unsignedp = 1;
5449 l_const = ll_mask;
5451 else
5452 return 0;
5455 /* This is analogous to the code for l_const above. */
5456 if (rcode != wanted_code)
5458 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5460 rl_unsignedp = 1;
5461 r_const = rl_mask;
5463 else
5464 return 0;
5467 /* See if we can find a mode that contains both fields being compared on
5468 the left. If we can't, fail. Otherwise, update all constants and masks
5469 to be relative to a field of that size. */
5470 first_bit = MIN (ll_bitpos, rl_bitpos);
5471 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5472 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5473 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5474 volatilep);
5475 if (lnmode == VOIDmode)
5476 return 0;
5478 lnbitsize = GET_MODE_BITSIZE (lnmode);
5479 lnbitpos = first_bit & ~ (lnbitsize - 1);
5480 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5481 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5483 if (BYTES_BIG_ENDIAN)
5485 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5486 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5489 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5490 size_int (xll_bitpos), 0);
5491 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5492 size_int (xrl_bitpos), 0);
5494 if (l_const)
5496 l_const = fold_convert (lntype, l_const);
5497 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5498 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5499 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5500 fold_build1 (BIT_NOT_EXPR,
5501 lntype, ll_mask),
5502 0)))
5504 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5506 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5509 if (r_const)
5511 r_const = fold_convert (lntype, r_const);
5512 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5513 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5514 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5515 fold_build1 (BIT_NOT_EXPR,
5516 lntype, rl_mask),
5517 0)))
5519 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5521 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5525 /* Handle the case of comparisons with constants. If there is something in
5526 common between the masks, those bits of the constants must be the same.
5527 If not, the condition is always false. Test for this to avoid generating
5528 incorrect code below. */
5529 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5530 if (! integer_zerop (result)
5531 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5532 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5534 if (wanted_code == NE_EXPR)
5536 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5537 return constant_boolean_node (true, truth_type);
5539 else
5541 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5542 return constant_boolean_node (false, truth_type);
5546 return NULL_TREE;
5549 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5550 constant. */
5552 static tree
5553 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5555 tree arg0 = op0;
5556 enum tree_code op_code;
5557 tree comp_const;
5558 tree minmax_const;
5559 int consts_equal, consts_lt;
5560 tree inner;
5562 STRIP_SIGN_NOPS (arg0);
5564 op_code = TREE_CODE (arg0);
5565 minmax_const = TREE_OPERAND (arg0, 1);
5566 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5567 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5568 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5569 inner = TREE_OPERAND (arg0, 0);
5571 /* If something does not permit us to optimize, return the original tree. */
5572 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5573 || TREE_CODE (comp_const) != INTEGER_CST
5574 || TREE_OVERFLOW (comp_const)
5575 || TREE_CODE (minmax_const) != INTEGER_CST
5576 || TREE_OVERFLOW (minmax_const))
5577 return NULL_TREE;
5579 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5580 and GT_EXPR, doing the rest with recursive calls using logical
5581 simplifications. */
5582 switch (code)
5584 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5586 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5587 type, op0, op1);
5588 if (tem)
5589 return invert_truthvalue (tem);
5590 return NULL_TREE;
5593 case GE_EXPR:
5594 return
5595 fold_build2 (TRUTH_ORIF_EXPR, type,
5596 optimize_minmax_comparison
5597 (EQ_EXPR, type, arg0, comp_const),
5598 optimize_minmax_comparison
5599 (GT_EXPR, type, arg0, comp_const));
5601 case EQ_EXPR:
5602 if (op_code == MAX_EXPR && consts_equal)
5603 /* MAX (X, 0) == 0 -> X <= 0 */
5604 return fold_build2 (LE_EXPR, type, inner, comp_const);
5606 else if (op_code == MAX_EXPR && consts_lt)
5607 /* MAX (X, 0) == 5 -> X == 5 */
5608 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5610 else if (op_code == MAX_EXPR)
5611 /* MAX (X, 0) == -1 -> false */
5612 return omit_one_operand (type, integer_zero_node, inner);
5614 else if (consts_equal)
5615 /* MIN (X, 0) == 0 -> X >= 0 */
5616 return fold_build2 (GE_EXPR, type, inner, comp_const);
5618 else if (consts_lt)
5619 /* MIN (X, 0) == 5 -> false */
5620 return omit_one_operand (type, integer_zero_node, inner);
5622 else
5623 /* MIN (X, 0) == -1 -> X == -1 */
5624 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5626 case GT_EXPR:
5627 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5628 /* MAX (X, 0) > 0 -> X > 0
5629 MAX (X, 0) > 5 -> X > 5 */
5630 return fold_build2 (GT_EXPR, type, inner, comp_const);
5632 else if (op_code == MAX_EXPR)
5633 /* MAX (X, 0) > -1 -> true */
5634 return omit_one_operand (type, integer_one_node, inner);
5636 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5637 /* MIN (X, 0) > 0 -> false
5638 MIN (X, 0) > 5 -> false */
5639 return omit_one_operand (type, integer_zero_node, inner);
5641 else
5642 /* MIN (X, 0) > -1 -> X > -1 */
5643 return fold_build2 (GT_EXPR, type, inner, comp_const);
5645 default:
5646 return NULL_TREE;
5650 /* T is an integer expression that is being multiplied, divided, or taken a
5651 modulus (CODE says which and what kind of divide or modulus) by a
5652 constant C. See if we can eliminate that operation by folding it with
5653 other operations already in T. WIDE_TYPE, if non-null, is a type that
5654 should be used for the computation if wider than our type.
5656 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5657 (X * 2) + (Y * 4). We must, however, be assured that either the original
5658 expression would not overflow or that overflow is undefined for the type
5659 in the language in question.
5661 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5662 the machine has a multiply-accumulate insn or that this is part of an
5663 addressing calculation.
5665 If we return a non-null expression, it is an equivalent form of the
5666 original computation, but need not be in the original type.
5668 We set *STRICT_OVERFLOW_P to true if the return values depends on
5669 signed overflow being undefined. Otherwise we do not change
5670 *STRICT_OVERFLOW_P. */
5672 static tree
5673 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5674 bool *strict_overflow_p)
5676 /* To avoid exponential search depth, refuse to allow recursion past
5677 three levels. Beyond that (1) it's highly unlikely that we'll find
5678 something interesting and (2) we've probably processed it before
5679 when we built the inner expression. */
5681 static int depth;
5682 tree ret;
5684 if (depth > 3)
5685 return NULL;
5687 depth++;
5688 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5689 depth--;
5691 return ret;
5694 static tree
5695 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5696 bool *strict_overflow_p)
5698 tree type = TREE_TYPE (t);
5699 enum tree_code tcode = TREE_CODE (t);
5700 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5701 > GET_MODE_SIZE (TYPE_MODE (type)))
5702 ? wide_type : type);
5703 tree t1, t2;
5704 int same_p = tcode == code;
5705 tree op0 = NULL_TREE, op1 = NULL_TREE;
5706 bool sub_strict_overflow_p;
5708 /* Don't deal with constants of zero here; they confuse the code below. */
5709 if (integer_zerop (c))
5710 return NULL_TREE;
5712 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5713 op0 = TREE_OPERAND (t, 0);
5715 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5716 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5718 /* Note that we need not handle conditional operations here since fold
5719 already handles those cases. So just do arithmetic here. */
5720 switch (tcode)
5722 case INTEGER_CST:
5723 /* For a constant, we can always simplify if we are a multiply
5724 or (for divide and modulus) if it is a multiple of our constant. */
5725 if (code == MULT_EXPR
5726 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5727 return const_binop (code, fold_convert (ctype, t),
5728 fold_convert (ctype, c), 0);
5729 break;
5731 CASE_CONVERT: case NON_LVALUE_EXPR:
5732 /* If op0 is an expression ... */
5733 if ((COMPARISON_CLASS_P (op0)
5734 || UNARY_CLASS_P (op0)
5735 || BINARY_CLASS_P (op0)
5736 || VL_EXP_CLASS_P (op0)
5737 || EXPRESSION_CLASS_P (op0))
5738 /* ... and has wrapping overflow, and its type is smaller
5739 than ctype, then we cannot pass through as widening. */
5740 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5741 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5742 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5743 && (TYPE_PRECISION (ctype)
5744 > TYPE_PRECISION (TREE_TYPE (op0))))
5745 /* ... or this is a truncation (t is narrower than op0),
5746 then we cannot pass through this narrowing. */
5747 || (TYPE_PRECISION (type)
5748 < TYPE_PRECISION (TREE_TYPE (op0)))
5749 /* ... or signedness changes for division or modulus,
5750 then we cannot pass through this conversion. */
5751 || (code != MULT_EXPR
5752 && (TYPE_UNSIGNED (ctype)
5753 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5754 /* ... or has undefined overflow while the converted to
5755 type has not, we cannot do the operation in the inner type
5756 as that would introduce undefined overflow. */
5757 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5758 && !TYPE_OVERFLOW_UNDEFINED (type))))
5759 break;
5761 /* Pass the constant down and see if we can make a simplification. If
5762 we can, replace this expression with the inner simplification for
5763 possible later conversion to our or some other type. */
5764 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5765 && TREE_CODE (t2) == INTEGER_CST
5766 && !TREE_OVERFLOW (t2)
5767 && (0 != (t1 = extract_muldiv (op0, t2, code,
5768 code == MULT_EXPR
5769 ? ctype : NULL_TREE,
5770 strict_overflow_p))))
5771 return t1;
5772 break;
5774 case ABS_EXPR:
5775 /* If widening the type changes it from signed to unsigned, then we
5776 must avoid building ABS_EXPR itself as unsigned. */
5777 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5779 tree cstype = (*signed_type_for) (ctype);
5780 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5781 != 0)
5783 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5784 return fold_convert (ctype, t1);
5786 break;
5788 /* If the constant is negative, we cannot simplify this. */
5789 if (tree_int_cst_sgn (c) == -1)
5790 break;
5791 /* FALLTHROUGH */
5792 case NEGATE_EXPR:
5793 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5794 != 0)
5795 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5796 break;
5798 case MIN_EXPR: case MAX_EXPR:
5799 /* If widening the type changes the signedness, then we can't perform
5800 this optimization as that changes the result. */
5801 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5802 break;
5804 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5805 sub_strict_overflow_p = false;
5806 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5807 &sub_strict_overflow_p)) != 0
5808 && (t2 = extract_muldiv (op1, c, code, wide_type,
5809 &sub_strict_overflow_p)) != 0)
5811 if (tree_int_cst_sgn (c) < 0)
5812 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5813 if (sub_strict_overflow_p)
5814 *strict_overflow_p = true;
5815 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5816 fold_convert (ctype, t2));
5818 break;
5820 case LSHIFT_EXPR: case RSHIFT_EXPR:
5821 /* If the second operand is constant, this is a multiplication
5822 or floor division, by a power of two, so we can treat it that
5823 way unless the multiplier or divisor overflows. Signed
5824 left-shift overflow is implementation-defined rather than
5825 undefined in C90, so do not convert signed left shift into
5826 multiplication. */
5827 if (TREE_CODE (op1) == INTEGER_CST
5828 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5829 /* const_binop may not detect overflow correctly,
5830 so check for it explicitly here. */
5831 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5832 && TREE_INT_CST_HIGH (op1) == 0
5833 && 0 != (t1 = fold_convert (ctype,
5834 const_binop (LSHIFT_EXPR,
5835 size_one_node,
5836 op1, 0)))
5837 && !TREE_OVERFLOW (t1))
5838 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5839 ? MULT_EXPR : FLOOR_DIV_EXPR,
5840 ctype, fold_convert (ctype, op0), t1),
5841 c, code, wide_type, strict_overflow_p);
5842 break;
5844 case PLUS_EXPR: case MINUS_EXPR:
5845 /* See if we can eliminate the operation on both sides. If we can, we
5846 can return a new PLUS or MINUS. If we can't, the only remaining
5847 cases where we can do anything are if the second operand is a
5848 constant. */
5849 sub_strict_overflow_p = false;
5850 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5851 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5852 if (t1 != 0 && t2 != 0
5853 && (code == MULT_EXPR
5854 /* If not multiplication, we can only do this if both operands
5855 are divisible by c. */
5856 || (multiple_of_p (ctype, op0, c)
5857 && multiple_of_p (ctype, op1, c))))
5859 if (sub_strict_overflow_p)
5860 *strict_overflow_p = true;
5861 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5862 fold_convert (ctype, t2));
5865 /* If this was a subtraction, negate OP1 and set it to be an addition.
5866 This simplifies the logic below. */
5867 if (tcode == MINUS_EXPR)
5868 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5870 if (TREE_CODE (op1) != INTEGER_CST)
5871 break;
5873 /* If either OP1 or C are negative, this optimization is not safe for
5874 some of the division and remainder types while for others we need
5875 to change the code. */
5876 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5878 if (code == CEIL_DIV_EXPR)
5879 code = FLOOR_DIV_EXPR;
5880 else if (code == FLOOR_DIV_EXPR)
5881 code = CEIL_DIV_EXPR;
5882 else if (code != MULT_EXPR
5883 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5884 break;
5887 /* If it's a multiply or a division/modulus operation of a multiple
5888 of our constant, do the operation and verify it doesn't overflow. */
5889 if (code == MULT_EXPR
5890 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5892 op1 = const_binop (code, fold_convert (ctype, op1),
5893 fold_convert (ctype, c), 0);
5894 /* We allow the constant to overflow with wrapping semantics. */
5895 if (op1 == 0
5896 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5897 break;
5899 else
5900 break;
5902 /* If we have an unsigned type is not a sizetype, we cannot widen
5903 the operation since it will change the result if the original
5904 computation overflowed. */
5905 if (TYPE_UNSIGNED (ctype)
5906 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5907 && ctype != type)
5908 break;
5910 /* If we were able to eliminate our operation from the first side,
5911 apply our operation to the second side and reform the PLUS. */
5912 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5913 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5915 /* The last case is if we are a multiply. In that case, we can
5916 apply the distributive law to commute the multiply and addition
5917 if the multiplication of the constants doesn't overflow. */
5918 if (code == MULT_EXPR)
5919 return fold_build2 (tcode, ctype,
5920 fold_build2 (code, ctype,
5921 fold_convert (ctype, op0),
5922 fold_convert (ctype, c)),
5923 op1);
5925 break;
5927 case MULT_EXPR:
5928 /* We have a special case here if we are doing something like
5929 (C * 8) % 4 since we know that's zero. */
5930 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5931 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5932 /* If the multiplication can overflow we cannot optimize this.
5933 ??? Until we can properly mark individual operations as
5934 not overflowing we need to treat sizetype special here as
5935 stor-layout relies on this opimization to make
5936 DECL_FIELD_BIT_OFFSET always a constant. */
5937 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5938 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5939 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5940 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5941 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5943 *strict_overflow_p = true;
5944 return omit_one_operand (type, integer_zero_node, op0);
5947 /* ... fall through ... */
5949 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5950 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5951 /* If we can extract our operation from the LHS, do so and return a
5952 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5953 do something only if the second operand is a constant. */
5954 if (same_p
5955 && (t1 = extract_muldiv (op0, c, code, wide_type,
5956 strict_overflow_p)) != 0)
5957 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5958 fold_convert (ctype, op1));
5959 else if (tcode == MULT_EXPR && code == MULT_EXPR
5960 && (t1 = extract_muldiv (op1, c, code, wide_type,
5961 strict_overflow_p)) != 0)
5962 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5963 fold_convert (ctype, t1));
5964 else if (TREE_CODE (op1) != INTEGER_CST)
5965 return 0;
5967 /* If these are the same operation types, we can associate them
5968 assuming no overflow. */
5969 if (tcode == code
5970 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5971 fold_convert (ctype, c), 1))
5972 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5973 TREE_INT_CST_HIGH (t1),
5974 (TYPE_UNSIGNED (ctype)
5975 && tcode != MULT_EXPR) ? -1 : 1,
5976 TREE_OVERFLOW (t1)))
5977 && !TREE_OVERFLOW (t1))
5978 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5980 /* If these operations "cancel" each other, we have the main
5981 optimizations of this pass, which occur when either constant is a
5982 multiple of the other, in which case we replace this with either an
5983 operation or CODE or TCODE.
5985 If we have an unsigned type that is not a sizetype, we cannot do
5986 this since it will change the result if the original computation
5987 overflowed. */
5988 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5989 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5990 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5991 || (tcode == MULT_EXPR
5992 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5993 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5994 && code != MULT_EXPR)))
5996 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5998 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5999 *strict_overflow_p = true;
6000 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6001 fold_convert (ctype,
6002 const_binop (TRUNC_DIV_EXPR,
6003 op1, c, 0)));
6005 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6007 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6008 *strict_overflow_p = true;
6009 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6010 fold_convert (ctype,
6011 const_binop (TRUNC_DIV_EXPR,
6012 c, op1, 0)));
6015 break;
6017 default:
6018 break;
6021 return 0;
6024 /* Return a node which has the indicated constant VALUE (either 0 or
6025 1), and is of the indicated TYPE. */
6027 tree
6028 constant_boolean_node (int value, tree type)
6030 if (type == integer_type_node)
6031 return value ? integer_one_node : integer_zero_node;
6032 else if (type == boolean_type_node)
6033 return value ? boolean_true_node : boolean_false_node;
6034 else
6035 return build_int_cst (type, value);
6039 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6040 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6041 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6042 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6043 COND is the first argument to CODE; otherwise (as in the example
6044 given here), it is the second argument. TYPE is the type of the
6045 original expression. Return NULL_TREE if no simplification is
6046 possible. */
6048 static tree
6049 fold_binary_op_with_conditional_arg (enum tree_code code,
6050 tree type, tree op0, tree op1,
6051 tree cond, tree arg, int cond_first_p)
6053 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6054 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6055 tree test, true_value, false_value;
6056 tree lhs = NULL_TREE;
6057 tree rhs = NULL_TREE;
6059 /* This transformation is only worthwhile if we don't have to wrap
6060 arg in a SAVE_EXPR, and the operation can be simplified on at least
6061 one of the branches once its pushed inside the COND_EXPR. */
6062 if (!TREE_CONSTANT (arg))
6063 return NULL_TREE;
6065 if (TREE_CODE (cond) == COND_EXPR)
6067 test = TREE_OPERAND (cond, 0);
6068 true_value = TREE_OPERAND (cond, 1);
6069 false_value = TREE_OPERAND (cond, 2);
6070 /* If this operand throws an expression, then it does not make
6071 sense to try to perform a logical or arithmetic operation
6072 involving it. */
6073 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6074 lhs = true_value;
6075 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6076 rhs = false_value;
6078 else
6080 tree testtype = TREE_TYPE (cond);
6081 test = cond;
6082 true_value = constant_boolean_node (true, testtype);
6083 false_value = constant_boolean_node (false, testtype);
6086 arg = fold_convert (arg_type, arg);
6087 if (lhs == 0)
6089 true_value = fold_convert (cond_type, true_value);
6090 if (cond_first_p)
6091 lhs = fold_build2 (code, type, true_value, arg);
6092 else
6093 lhs = fold_build2 (code, type, arg, true_value);
6095 if (rhs == 0)
6097 false_value = fold_convert (cond_type, false_value);
6098 if (cond_first_p)
6099 rhs = fold_build2 (code, type, false_value, arg);
6100 else
6101 rhs = fold_build2 (code, type, arg, false_value);
6104 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6105 return fold_convert (type, test);
6109 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6111 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6112 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6113 ADDEND is the same as X.
6115 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6116 and finite. The problematic cases are when X is zero, and its mode
6117 has signed zeros. In the case of rounding towards -infinity,
6118 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6119 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6121 bool
6122 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6124 if (!real_zerop (addend))
6125 return false;
6127 /* Don't allow the fold with -fsignaling-nans. */
6128 if (HONOR_SNANS (TYPE_MODE (type)))
6129 return false;
6131 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6132 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6133 return true;
6135 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6136 if (TREE_CODE (addend) == REAL_CST
6137 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6138 negate = !negate;
6140 /* The mode has signed zeros, and we have to honor their sign.
6141 In this situation, there is only one case we can return true for.
6142 X - 0 is the same as X unless rounding towards -infinity is
6143 supported. */
6144 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6147 /* Subroutine of fold() that checks comparisons of built-in math
6148 functions against real constants.
6150 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6151 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6152 is the type of the result and ARG0 and ARG1 are the operands of the
6153 comparison. ARG1 must be a TREE_REAL_CST.
6155 The function returns the constant folded tree if a simplification
6156 can be made, and NULL_TREE otherwise. */
6158 static tree
6159 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6160 tree type, tree arg0, tree arg1)
6162 REAL_VALUE_TYPE c;
6164 if (BUILTIN_SQRT_P (fcode))
6166 tree arg = CALL_EXPR_ARG (arg0, 0);
6167 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6169 c = TREE_REAL_CST (arg1);
6170 if (REAL_VALUE_NEGATIVE (c))
6172 /* sqrt(x) < y is always false, if y is negative. */
6173 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6174 return omit_one_operand (type, integer_zero_node, arg);
6176 /* sqrt(x) > y is always true, if y is negative and we
6177 don't care about NaNs, i.e. negative values of x. */
6178 if (code == NE_EXPR || !HONOR_NANS (mode))
6179 return omit_one_operand (type, integer_one_node, arg);
6181 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6182 return fold_build2 (GE_EXPR, type, arg,
6183 build_real (TREE_TYPE (arg), dconst0));
6185 else if (code == GT_EXPR || code == GE_EXPR)
6187 REAL_VALUE_TYPE c2;
6189 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6190 real_convert (&c2, mode, &c2);
6192 if (REAL_VALUE_ISINF (c2))
6194 /* sqrt(x) > y is x == +Inf, when y is very large. */
6195 if (HONOR_INFINITIES (mode))
6196 return fold_build2 (EQ_EXPR, type, arg,
6197 build_real (TREE_TYPE (arg), c2));
6199 /* sqrt(x) > y is always false, when y is very large
6200 and we don't care about infinities. */
6201 return omit_one_operand (type, integer_zero_node, arg);
6204 /* sqrt(x) > c is the same as x > c*c. */
6205 return fold_build2 (code, type, arg,
6206 build_real (TREE_TYPE (arg), c2));
6208 else if (code == LT_EXPR || code == LE_EXPR)
6210 REAL_VALUE_TYPE c2;
6212 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6213 real_convert (&c2, mode, &c2);
6215 if (REAL_VALUE_ISINF (c2))
6217 /* sqrt(x) < y is always true, when y is a very large
6218 value and we don't care about NaNs or Infinities. */
6219 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6220 return omit_one_operand (type, integer_one_node, arg);
6222 /* sqrt(x) < y is x != +Inf when y is very large and we
6223 don't care about NaNs. */
6224 if (! HONOR_NANS (mode))
6225 return fold_build2 (NE_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg), c2));
6228 /* sqrt(x) < y is x >= 0 when y is very large and we
6229 don't care about Infinities. */
6230 if (! HONOR_INFINITIES (mode))
6231 return fold_build2 (GE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg), dconst0));
6234 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6235 if (lang_hooks.decls.global_bindings_p () != 0
6236 || CONTAINS_PLACEHOLDER_P (arg))
6237 return NULL_TREE;
6239 arg = save_expr (arg);
6240 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6241 fold_build2 (GE_EXPR, type, arg,
6242 build_real (TREE_TYPE (arg),
6243 dconst0)),
6244 fold_build2 (NE_EXPR, type, arg,
6245 build_real (TREE_TYPE (arg),
6246 c2)));
6249 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6250 if (! HONOR_NANS (mode))
6251 return fold_build2 (code, type, arg,
6252 build_real (TREE_TYPE (arg), c2));
6254 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6255 if (lang_hooks.decls.global_bindings_p () == 0
6256 && ! CONTAINS_PLACEHOLDER_P (arg))
6258 arg = save_expr (arg);
6259 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6260 fold_build2 (GE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg),
6262 dconst0)),
6263 fold_build2 (code, type, arg,
6264 build_real (TREE_TYPE (arg),
6265 c2)));
6270 return NULL_TREE;
6273 /* Subroutine of fold() that optimizes comparisons against Infinities,
6274 either +Inf or -Inf.
6276 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6277 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6278 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6280 The function returns the constant folded tree if a simplification
6281 can be made, and NULL_TREE otherwise. */
6283 static tree
6284 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6286 enum machine_mode mode;
6287 REAL_VALUE_TYPE max;
6288 tree temp;
6289 bool neg;
6291 mode = TYPE_MODE (TREE_TYPE (arg0));
6293 /* For negative infinity swap the sense of the comparison. */
6294 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6295 if (neg)
6296 code = swap_tree_comparison (code);
6298 switch (code)
6300 case GT_EXPR:
6301 /* x > +Inf is always false, if with ignore sNANs. */
6302 if (HONOR_SNANS (mode))
6303 return NULL_TREE;
6304 return omit_one_operand (type, integer_zero_node, arg0);
6306 case LE_EXPR:
6307 /* x <= +Inf is always true, if we don't case about NaNs. */
6308 if (! HONOR_NANS (mode))
6309 return omit_one_operand (type, integer_one_node, arg0);
6311 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6312 if (lang_hooks.decls.global_bindings_p () == 0
6313 && ! CONTAINS_PLACEHOLDER_P (arg0))
6315 arg0 = save_expr (arg0);
6316 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6318 break;
6320 case EQ_EXPR:
6321 case GE_EXPR:
6322 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6323 real_maxval (&max, neg, mode);
6324 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6325 arg0, build_real (TREE_TYPE (arg0), max));
6327 case LT_EXPR:
6328 /* x < +Inf is always equal to x <= DBL_MAX. */
6329 real_maxval (&max, neg, mode);
6330 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6333 case NE_EXPR:
6334 /* x != +Inf is always equal to !(x > DBL_MAX). */
6335 real_maxval (&max, neg, mode);
6336 if (! HONOR_NANS (mode))
6337 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6338 arg0, build_real (TREE_TYPE (arg0), max));
6340 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6341 arg0, build_real (TREE_TYPE (arg0), max));
6342 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6344 default:
6345 break;
6348 return NULL_TREE;
6351 /* Subroutine of fold() that optimizes comparisons of a division by
6352 a nonzero integer constant against an integer constant, i.e.
6353 X/C1 op C2.
6355 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6356 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6357 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6359 The function returns the constant folded tree if a simplification
6360 can be made, and NULL_TREE otherwise. */
6362 static tree
6363 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6365 tree prod, tmp, hi, lo;
6366 tree arg00 = TREE_OPERAND (arg0, 0);
6367 tree arg01 = TREE_OPERAND (arg0, 1);
6368 unsigned HOST_WIDE_INT lpart;
6369 HOST_WIDE_INT hpart;
6370 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6371 bool neg_overflow;
6372 int overflow;
6374 /* We have to do this the hard way to detect unsigned overflow.
6375 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6376 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6377 TREE_INT_CST_HIGH (arg01),
6378 TREE_INT_CST_LOW (arg1),
6379 TREE_INT_CST_HIGH (arg1),
6380 &lpart, &hpart, unsigned_p);
6381 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6382 -1, overflow);
6383 neg_overflow = false;
6385 if (unsigned_p)
6387 tmp = int_const_binop (MINUS_EXPR, arg01,
6388 build_int_cst (TREE_TYPE (arg01), 1), 0);
6389 lo = prod;
6391 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6392 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6393 TREE_INT_CST_HIGH (prod),
6394 TREE_INT_CST_LOW (tmp),
6395 TREE_INT_CST_HIGH (tmp),
6396 &lpart, &hpart, unsigned_p);
6397 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6398 -1, overflow | TREE_OVERFLOW (prod));
6400 else if (tree_int_cst_sgn (arg01) >= 0)
6402 tmp = int_const_binop (MINUS_EXPR, arg01,
6403 build_int_cst (TREE_TYPE (arg01), 1), 0);
6404 switch (tree_int_cst_sgn (arg1))
6406 case -1:
6407 neg_overflow = true;
6408 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6409 hi = prod;
6410 break;
6412 case 0:
6413 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6414 hi = tmp;
6415 break;
6417 case 1:
6418 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6419 lo = prod;
6420 break;
6422 default:
6423 gcc_unreachable ();
6426 else
6428 /* A negative divisor reverses the relational operators. */
6429 code = swap_tree_comparison (code);
6431 tmp = int_const_binop (PLUS_EXPR, arg01,
6432 build_int_cst (TREE_TYPE (arg01), 1), 0);
6433 switch (tree_int_cst_sgn (arg1))
6435 case -1:
6436 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6437 lo = prod;
6438 break;
6440 case 0:
6441 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6442 lo = tmp;
6443 break;
6445 case 1:
6446 neg_overflow = true;
6447 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6448 hi = prod;
6449 break;
6451 default:
6452 gcc_unreachable ();
6456 switch (code)
6458 case EQ_EXPR:
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand (type, integer_zero_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2 (GE_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2 (LE_EXPR, type, arg00, hi);
6465 return build_range_check (type, arg00, 1, lo, hi);
6467 case NE_EXPR:
6468 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6469 return omit_one_operand (type, integer_one_node, arg00);
6470 if (TREE_OVERFLOW (hi))
6471 return fold_build2 (LT_EXPR, type, arg00, lo);
6472 if (TREE_OVERFLOW (lo))
6473 return fold_build2 (GT_EXPR, type, arg00, hi);
6474 return build_range_check (type, arg00, 0, lo, hi);
6476 case LT_EXPR:
6477 if (TREE_OVERFLOW (lo))
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand (type, tmp, arg00);
6482 return fold_build2 (LT_EXPR, type, arg00, lo);
6484 case LE_EXPR:
6485 if (TREE_OVERFLOW (hi))
6487 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6488 return omit_one_operand (type, tmp, arg00);
6490 return fold_build2 (LE_EXPR, type, arg00, hi);
6492 case GT_EXPR:
6493 if (TREE_OVERFLOW (hi))
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand (type, tmp, arg00);
6498 return fold_build2 (GT_EXPR, type, arg00, hi);
6500 case GE_EXPR:
6501 if (TREE_OVERFLOW (lo))
6503 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6504 return omit_one_operand (type, tmp, arg00);
6506 return fold_build2 (GE_EXPR, type, arg00, lo);
6508 default:
6509 break;
6512 return NULL_TREE;
6516 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6517 equality/inequality test, then return a simplified form of the test
6518 using a sign testing. Otherwise return NULL. TYPE is the desired
6519 result type. */
6521 static tree
6522 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6523 tree result_type)
6525 /* If this is testing a single bit, we can optimize the test. */
6526 if ((code == NE_EXPR || code == EQ_EXPR)
6527 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6528 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6530 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6531 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6532 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6534 if (arg00 != NULL_TREE
6535 /* This is only a win if casting to a signed type is cheap,
6536 i.e. when arg00's type is not a partial mode. */
6537 && TYPE_PRECISION (TREE_TYPE (arg00))
6538 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6540 tree stype = signed_type_for (TREE_TYPE (arg00));
6541 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6542 result_type, fold_convert (stype, arg00),
6543 build_int_cst (stype, 0));
6547 return NULL_TREE;
6550 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6551 equality/inequality test, then return a simplified form of
6552 the test using shifts and logical operations. Otherwise return
6553 NULL. TYPE is the desired result type. */
6555 tree
6556 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6557 tree result_type)
6559 /* If this is testing a single bit, we can optimize the test. */
6560 if ((code == NE_EXPR || code == EQ_EXPR)
6561 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6562 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6564 tree inner = TREE_OPERAND (arg0, 0);
6565 tree type = TREE_TYPE (arg0);
6566 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6567 enum machine_mode operand_mode = TYPE_MODE (type);
6568 int ops_unsigned;
6569 tree signed_type, unsigned_type, intermediate_type;
6570 tree tem, one;
6572 /* First, see if we can fold the single bit test into a sign-bit
6573 test. */
6574 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6575 result_type);
6576 if (tem)
6577 return tem;
6579 /* Otherwise we have (A & C) != 0 where C is a single bit,
6580 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6581 Similarly for (A & C) == 0. */
6583 /* If INNER is a right shift of a constant and it plus BITNUM does
6584 not overflow, adjust BITNUM and INNER. */
6585 if (TREE_CODE (inner) == RSHIFT_EXPR
6586 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6587 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6588 && bitnum < TYPE_PRECISION (type)
6589 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6590 bitnum - TYPE_PRECISION (type)))
6592 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6593 inner = TREE_OPERAND (inner, 0);
6596 /* If we are going to be able to omit the AND below, we must do our
6597 operations as unsigned. If we must use the AND, we have a choice.
6598 Normally unsigned is faster, but for some machines signed is. */
6599 #ifdef LOAD_EXTEND_OP
6600 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6601 && !flag_syntax_only) ? 0 : 1;
6602 #else
6603 ops_unsigned = 1;
6604 #endif
6606 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6607 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6608 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6609 inner = fold_convert (intermediate_type, inner);
6611 if (bitnum != 0)
6612 inner = build2 (RSHIFT_EXPR, intermediate_type,
6613 inner, size_int (bitnum));
6615 one = build_int_cst (intermediate_type, 1);
6617 if (code == EQ_EXPR)
6618 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6620 /* Put the AND last so it can combine with more things. */
6621 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6623 /* Make sure to return the proper type. */
6624 inner = fold_convert (result_type, inner);
6626 return inner;
6628 return NULL_TREE;
6631 /* Check whether we are allowed to reorder operands arg0 and arg1,
6632 such that the evaluation of arg1 occurs before arg0. */
6634 static bool
6635 reorder_operands_p (const_tree arg0, const_tree arg1)
6637 if (! flag_evaluation_order)
6638 return true;
6639 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6640 return true;
6641 return ! TREE_SIDE_EFFECTS (arg0)
6642 && ! TREE_SIDE_EFFECTS (arg1);
6645 /* Test whether it is preferable two swap two operands, ARG0 and
6646 ARG1, for example because ARG0 is an integer constant and ARG1
6647 isn't. If REORDER is true, only recommend swapping if we can
6648 evaluate the operands in reverse order. */
6650 bool
6651 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6653 STRIP_SIGN_NOPS (arg0);
6654 STRIP_SIGN_NOPS (arg1);
6656 if (TREE_CODE (arg1) == INTEGER_CST)
6657 return 0;
6658 if (TREE_CODE (arg0) == INTEGER_CST)
6659 return 1;
6661 if (TREE_CODE (arg1) == REAL_CST)
6662 return 0;
6663 if (TREE_CODE (arg0) == REAL_CST)
6664 return 1;
6666 if (TREE_CODE (arg1) == FIXED_CST)
6667 return 0;
6668 if (TREE_CODE (arg0) == FIXED_CST)
6669 return 1;
6671 if (TREE_CODE (arg1) == COMPLEX_CST)
6672 return 0;
6673 if (TREE_CODE (arg0) == COMPLEX_CST)
6674 return 1;
6676 if (TREE_CONSTANT (arg1))
6677 return 0;
6678 if (TREE_CONSTANT (arg0))
6679 return 1;
6681 if (optimize_function_for_size_p (cfun))
6682 return 0;
6684 if (reorder && flag_evaluation_order
6685 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6686 return 0;
6688 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6689 for commutative and comparison operators. Ensuring a canonical
6690 form allows the optimizers to find additional redundancies without
6691 having to explicitly check for both orderings. */
6692 if (TREE_CODE (arg0) == SSA_NAME
6693 && TREE_CODE (arg1) == SSA_NAME
6694 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6695 return 1;
6697 /* Put SSA_NAMEs last. */
6698 if (TREE_CODE (arg1) == SSA_NAME)
6699 return 0;
6700 if (TREE_CODE (arg0) == SSA_NAME)
6701 return 1;
6703 /* Put variables last. */
6704 if (DECL_P (arg1))
6705 return 0;
6706 if (DECL_P (arg0))
6707 return 1;
6709 return 0;
6712 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6713 ARG0 is extended to a wider type. */
6715 static tree
6716 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6718 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6719 tree arg1_unw;
6720 tree shorter_type, outer_type;
6721 tree min, max;
6722 bool above, below;
6724 if (arg0_unw == arg0)
6725 return NULL_TREE;
6726 shorter_type = TREE_TYPE (arg0_unw);
6728 #ifdef HAVE_canonicalize_funcptr_for_compare
6729 /* Disable this optimization if we're casting a function pointer
6730 type on targets that require function pointer canonicalization. */
6731 if (HAVE_canonicalize_funcptr_for_compare
6732 && TREE_CODE (shorter_type) == POINTER_TYPE
6733 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6734 return NULL_TREE;
6735 #endif
6737 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6738 return NULL_TREE;
6740 arg1_unw = get_unwidened (arg1, NULL_TREE);
6742 /* If possible, express the comparison in the shorter mode. */
6743 if ((code == EQ_EXPR || code == NE_EXPR
6744 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6745 && (TREE_TYPE (arg1_unw) == shorter_type
6746 || ((TYPE_PRECISION (shorter_type)
6747 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6748 && (TYPE_UNSIGNED (shorter_type)
6749 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6750 || (TREE_CODE (arg1_unw) == INTEGER_CST
6751 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6752 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6753 && int_fits_type_p (arg1_unw, shorter_type))))
6754 return fold_build2 (code, type, arg0_unw,
6755 fold_convert (shorter_type, arg1_unw));
6757 if (TREE_CODE (arg1_unw) != INTEGER_CST
6758 || TREE_CODE (shorter_type) != INTEGER_TYPE
6759 || !int_fits_type_p (arg1_unw, shorter_type))
6760 return NULL_TREE;
6762 /* If we are comparing with the integer that does not fit into the range
6763 of the shorter type, the result is known. */
6764 outer_type = TREE_TYPE (arg1_unw);
6765 min = lower_bound_in_type (outer_type, shorter_type);
6766 max = upper_bound_in_type (outer_type, shorter_type);
6768 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6769 max, arg1_unw));
6770 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6771 arg1_unw, min));
6773 switch (code)
6775 case EQ_EXPR:
6776 if (above || below)
6777 return omit_one_operand (type, integer_zero_node, arg0);
6778 break;
6780 case NE_EXPR:
6781 if (above || below)
6782 return omit_one_operand (type, integer_one_node, arg0);
6783 break;
6785 case LT_EXPR:
6786 case LE_EXPR:
6787 if (above)
6788 return omit_one_operand (type, integer_one_node, arg0);
6789 else if (below)
6790 return omit_one_operand (type, integer_zero_node, arg0);
6792 case GT_EXPR:
6793 case GE_EXPR:
6794 if (above)
6795 return omit_one_operand (type, integer_zero_node, arg0);
6796 else if (below)
6797 return omit_one_operand (type, integer_one_node, arg0);
6799 default:
6800 break;
6803 return NULL_TREE;
6806 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6807 ARG0 just the signedness is changed. */
6809 static tree
6810 fold_sign_changed_comparison (enum tree_code code, tree type,
6811 tree arg0, tree arg1)
6813 tree arg0_inner;
6814 tree inner_type, outer_type;
6816 if (!CONVERT_EXPR_P (arg0))
6817 return NULL_TREE;
6819 outer_type = TREE_TYPE (arg0);
6820 arg0_inner = TREE_OPERAND (arg0, 0);
6821 inner_type = TREE_TYPE (arg0_inner);
6823 #ifdef HAVE_canonicalize_funcptr_for_compare
6824 /* Disable this optimization if we're casting a function pointer
6825 type on targets that require function pointer canonicalization. */
6826 if (HAVE_canonicalize_funcptr_for_compare
6827 && TREE_CODE (inner_type) == POINTER_TYPE
6828 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6829 return NULL_TREE;
6830 #endif
6832 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6833 return NULL_TREE;
6835 /* If the conversion is from an integral subtype to its basetype
6836 leave it alone. */
6837 if (TREE_TYPE (inner_type) == outer_type)
6838 return NULL_TREE;
6840 if (TREE_CODE (arg1) != INTEGER_CST
6841 && !(CONVERT_EXPR_P (arg1)
6842 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6843 return NULL_TREE;
6845 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6846 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6847 && code != NE_EXPR
6848 && code != EQ_EXPR)
6849 return NULL_TREE;
6851 if (TREE_CODE (arg1) == INTEGER_CST)
6852 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6853 TREE_INT_CST_HIGH (arg1), 0,
6854 TREE_OVERFLOW (arg1));
6855 else
6856 arg1 = fold_convert (inner_type, arg1);
6858 return fold_build2 (code, type, arg0_inner, arg1);
6861 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6862 step of the array. Reconstructs s and delta in the case of s * delta
6863 being an integer constant (and thus already folded).
6864 ADDR is the address. MULT is the multiplicative expression.
6865 If the function succeeds, the new address expression is returned. Otherwise
6866 NULL_TREE is returned. */
6868 static tree
6869 try_move_mult_to_index (tree addr, tree op1)
6871 tree s, delta, step;
6872 tree ref = TREE_OPERAND (addr, 0), pref;
6873 tree ret, pos;
6874 tree itype;
6875 bool mdim = false;
6877 /* Strip the nops that might be added when converting op1 to sizetype. */
6878 STRIP_NOPS (op1);
6880 /* Canonicalize op1 into a possibly non-constant delta
6881 and an INTEGER_CST s. */
6882 if (TREE_CODE (op1) == MULT_EXPR)
6884 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6886 STRIP_NOPS (arg0);
6887 STRIP_NOPS (arg1);
6889 if (TREE_CODE (arg0) == INTEGER_CST)
6891 s = arg0;
6892 delta = arg1;
6894 else if (TREE_CODE (arg1) == INTEGER_CST)
6896 s = arg1;
6897 delta = arg0;
6899 else
6900 return NULL_TREE;
6902 else if (TREE_CODE (op1) == INTEGER_CST)
6904 delta = op1;
6905 s = NULL_TREE;
6907 else
6909 /* Simulate we are delta * 1. */
6910 delta = op1;
6911 s = integer_one_node;
6914 for (;; ref = TREE_OPERAND (ref, 0))
6916 if (TREE_CODE (ref) == ARRAY_REF)
6918 /* Remember if this was a multi-dimensional array. */
6919 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6920 mdim = true;
6922 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6923 if (! itype)
6924 continue;
6926 step = array_ref_element_size (ref);
6927 if (TREE_CODE (step) != INTEGER_CST)
6928 continue;
6930 if (s)
6932 if (! tree_int_cst_equal (step, s))
6933 continue;
6935 else
6937 /* Try if delta is a multiple of step. */
6938 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6939 if (! tmp)
6940 continue;
6941 delta = tmp;
6944 /* Only fold here if we can verify we do not overflow one
6945 dimension of a multi-dimensional array. */
6946 if (mdim)
6948 tree tmp;
6950 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6951 || !INTEGRAL_TYPE_P (itype)
6952 || !TYPE_MAX_VALUE (itype)
6953 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6954 continue;
6956 tmp = fold_binary (PLUS_EXPR, itype,
6957 fold_convert (itype,
6958 TREE_OPERAND (ref, 1)),
6959 fold_convert (itype, delta));
6960 if (!tmp
6961 || TREE_CODE (tmp) != INTEGER_CST
6962 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6963 continue;
6966 break;
6968 else
6969 mdim = false;
6971 if (!handled_component_p (ref))
6972 return NULL_TREE;
6975 /* We found the suitable array reference. So copy everything up to it,
6976 and replace the index. */
6978 pref = TREE_OPERAND (addr, 0);
6979 ret = copy_node (pref);
6980 pos = ret;
6982 while (pref != ref)
6984 pref = TREE_OPERAND (pref, 0);
6985 TREE_OPERAND (pos, 0) = copy_node (pref);
6986 pos = TREE_OPERAND (pos, 0);
6989 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6990 fold_convert (itype,
6991 TREE_OPERAND (pos, 1)),
6992 fold_convert (itype, delta));
6994 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6998 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6999 means A >= Y && A != MAX, but in this case we know that
7000 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7002 static tree
7003 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7005 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7007 if (TREE_CODE (bound) == LT_EXPR)
7008 a = TREE_OPERAND (bound, 0);
7009 else if (TREE_CODE (bound) == GT_EXPR)
7010 a = TREE_OPERAND (bound, 1);
7011 else
7012 return NULL_TREE;
7014 typea = TREE_TYPE (a);
7015 if (!INTEGRAL_TYPE_P (typea)
7016 && !POINTER_TYPE_P (typea))
7017 return NULL_TREE;
7019 if (TREE_CODE (ineq) == LT_EXPR)
7021 a1 = TREE_OPERAND (ineq, 1);
7022 y = TREE_OPERAND (ineq, 0);
7024 else if (TREE_CODE (ineq) == GT_EXPR)
7026 a1 = TREE_OPERAND (ineq, 0);
7027 y = TREE_OPERAND (ineq, 1);
7029 else
7030 return NULL_TREE;
7032 if (TREE_TYPE (a1) != typea)
7033 return NULL_TREE;
7035 if (POINTER_TYPE_P (typea))
7037 /* Convert the pointer types into integer before taking the difference. */
7038 tree ta = fold_convert (ssizetype, a);
7039 tree ta1 = fold_convert (ssizetype, a1);
7040 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7042 else
7043 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7045 if (!diff || !integer_onep (diff))
7046 return NULL_TREE;
7048 return fold_build2 (GE_EXPR, type, a, y);
7051 /* Fold a sum or difference of at least one multiplication.
7052 Returns the folded tree or NULL if no simplification could be made. */
7054 static tree
7055 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7057 tree arg00, arg01, arg10, arg11;
7058 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7060 /* (A * C) +- (B * C) -> (A+-B) * C.
7061 (A * C) +- A -> A * (C+-1).
7062 We are most concerned about the case where C is a constant,
7063 but other combinations show up during loop reduction. Since
7064 it is not difficult, try all four possibilities. */
7066 if (TREE_CODE (arg0) == MULT_EXPR)
7068 arg00 = TREE_OPERAND (arg0, 0);
7069 arg01 = TREE_OPERAND (arg0, 1);
7071 else if (TREE_CODE (arg0) == INTEGER_CST)
7073 arg00 = build_one_cst (type);
7074 arg01 = arg0;
7076 else
7078 /* We cannot generate constant 1 for fract. */
7079 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7080 return NULL_TREE;
7081 arg00 = arg0;
7082 arg01 = build_one_cst (type);
7084 if (TREE_CODE (arg1) == MULT_EXPR)
7086 arg10 = TREE_OPERAND (arg1, 0);
7087 arg11 = TREE_OPERAND (arg1, 1);
7089 else if (TREE_CODE (arg1) == INTEGER_CST)
7091 arg10 = build_one_cst (type);
7092 arg11 = arg1;
7094 else
7096 /* We cannot generate constant 1 for fract. */
7097 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7098 return NULL_TREE;
7099 arg10 = arg1;
7100 arg11 = build_one_cst (type);
7102 same = NULL_TREE;
7104 if (operand_equal_p (arg01, arg11, 0))
7105 same = arg01, alt0 = arg00, alt1 = arg10;
7106 else if (operand_equal_p (arg00, arg10, 0))
7107 same = arg00, alt0 = arg01, alt1 = arg11;
7108 else if (operand_equal_p (arg00, arg11, 0))
7109 same = arg00, alt0 = arg01, alt1 = arg10;
7110 else if (operand_equal_p (arg01, arg10, 0))
7111 same = arg01, alt0 = arg00, alt1 = arg11;
7113 /* No identical multiplicands; see if we can find a common
7114 power-of-two factor in non-power-of-two multiplies. This
7115 can help in multi-dimensional array access. */
7116 else if (host_integerp (arg01, 0)
7117 && host_integerp (arg11, 0))
7119 HOST_WIDE_INT int01, int11, tmp;
7120 bool swap = false;
7121 tree maybe_same;
7122 int01 = TREE_INT_CST_LOW (arg01);
7123 int11 = TREE_INT_CST_LOW (arg11);
7125 /* Move min of absolute values to int11. */
7126 if ((int01 >= 0 ? int01 : -int01)
7127 < (int11 >= 0 ? int11 : -int11))
7129 tmp = int01, int01 = int11, int11 = tmp;
7130 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7131 maybe_same = arg01;
7132 swap = true;
7134 else
7135 maybe_same = arg11;
7137 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7139 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7140 build_int_cst (TREE_TYPE (arg00),
7141 int01 / int11));
7142 alt1 = arg10;
7143 same = maybe_same;
7144 if (swap)
7145 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7149 if (same)
7150 return fold_build2 (MULT_EXPR, type,
7151 fold_build2 (code, type,
7152 fold_convert (type, alt0),
7153 fold_convert (type, alt1)),
7154 fold_convert (type, same));
7156 return NULL_TREE;
7159 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7160 specified by EXPR into the buffer PTR of length LEN bytes.
7161 Return the number of bytes placed in the buffer, or zero
7162 upon failure. */
7164 static int
7165 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7167 tree type = TREE_TYPE (expr);
7168 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7169 int byte, offset, word, words;
7170 unsigned char value;
7172 if (total_bytes > len)
7173 return 0;
7174 words = total_bytes / UNITS_PER_WORD;
7176 for (byte = 0; byte < total_bytes; byte++)
7178 int bitpos = byte * BITS_PER_UNIT;
7179 if (bitpos < HOST_BITS_PER_WIDE_INT)
7180 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7181 else
7182 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7183 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7185 if (total_bytes > UNITS_PER_WORD)
7187 word = byte / UNITS_PER_WORD;
7188 if (WORDS_BIG_ENDIAN)
7189 word = (words - 1) - word;
7190 offset = word * UNITS_PER_WORD;
7191 if (BYTES_BIG_ENDIAN)
7192 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7193 else
7194 offset += byte % UNITS_PER_WORD;
7196 else
7197 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7198 ptr[offset] = value;
7200 return total_bytes;
7204 /* Subroutine of native_encode_expr. Encode the REAL_CST
7205 specified by EXPR into the buffer PTR of length LEN bytes.
7206 Return the number of bytes placed in the buffer, or zero
7207 upon failure. */
7209 static int
7210 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7212 tree type = TREE_TYPE (expr);
7213 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7214 int byte, offset, word, words, bitpos;
7215 unsigned char value;
7217 /* There are always 32 bits in each long, no matter the size of
7218 the hosts long. We handle floating point representations with
7219 up to 192 bits. */
7220 long tmp[6];
7222 if (total_bytes > len)
7223 return 0;
7224 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7226 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7228 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7229 bitpos += BITS_PER_UNIT)
7231 byte = (bitpos / BITS_PER_UNIT) & 3;
7232 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7234 if (UNITS_PER_WORD < 4)
7236 word = byte / UNITS_PER_WORD;
7237 if (WORDS_BIG_ENDIAN)
7238 word = (words - 1) - word;
7239 offset = word * UNITS_PER_WORD;
7240 if (BYTES_BIG_ENDIAN)
7241 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7242 else
7243 offset += byte % UNITS_PER_WORD;
7245 else
7246 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7247 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7249 return total_bytes;
7252 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7253 specified by EXPR into the buffer PTR of length LEN bytes.
7254 Return the number of bytes placed in the buffer, or zero
7255 upon failure. */
7257 static int
7258 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7260 int rsize, isize;
7261 tree part;
7263 part = TREE_REALPART (expr);
7264 rsize = native_encode_expr (part, ptr, len);
7265 if (rsize == 0)
7266 return 0;
7267 part = TREE_IMAGPART (expr);
7268 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7269 if (isize != rsize)
7270 return 0;
7271 return rsize + isize;
7275 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7276 specified by EXPR into the buffer PTR of length LEN bytes.
7277 Return the number of bytes placed in the buffer, or zero
7278 upon failure. */
7280 static int
7281 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7283 int i, size, offset, count;
7284 tree itype, elem, elements;
7286 offset = 0;
7287 elements = TREE_VECTOR_CST_ELTS (expr);
7288 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7289 itype = TREE_TYPE (TREE_TYPE (expr));
7290 size = GET_MODE_SIZE (TYPE_MODE (itype));
7291 for (i = 0; i < count; i++)
7293 if (elements)
7295 elem = TREE_VALUE (elements);
7296 elements = TREE_CHAIN (elements);
7298 else
7299 elem = NULL_TREE;
7301 if (elem)
7303 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7304 return 0;
7306 else
7308 if (offset + size > len)
7309 return 0;
7310 memset (ptr+offset, 0, size);
7312 offset += size;
7314 return offset;
7318 /* Subroutine of native_encode_expr. Encode the STRING_CST
7319 specified by EXPR into the buffer PTR of length LEN bytes.
7320 Return the number of bytes placed in the buffer, or zero
7321 upon failure. */
7323 static int
7324 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7326 tree type = TREE_TYPE (expr);
7327 HOST_WIDE_INT total_bytes;
7329 if (TREE_CODE (type) != ARRAY_TYPE
7330 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7331 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7332 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7333 return 0;
7334 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7335 if (total_bytes > len)
7336 return 0;
7337 if (TREE_STRING_LENGTH (expr) < total_bytes)
7339 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7340 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7341 total_bytes - TREE_STRING_LENGTH (expr));
7343 else
7344 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7345 return total_bytes;
7349 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7350 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7351 buffer PTR of length LEN bytes. Return the number of bytes
7352 placed in the buffer, or zero upon failure. */
7355 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7357 switch (TREE_CODE (expr))
7359 case INTEGER_CST:
7360 return native_encode_int (expr, ptr, len);
7362 case REAL_CST:
7363 return native_encode_real (expr, ptr, len);
7365 case COMPLEX_CST:
7366 return native_encode_complex (expr, ptr, len);
7368 case VECTOR_CST:
7369 return native_encode_vector (expr, ptr, len);
7371 case STRING_CST:
7372 return native_encode_string (expr, ptr, len);
7374 default:
7375 return 0;
7380 /* Subroutine of native_interpret_expr. Interpret the contents of
7381 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7382 If the buffer cannot be interpreted, return NULL_TREE. */
7384 static tree
7385 native_interpret_int (tree type, const unsigned char *ptr, int len)
7387 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7388 int byte, offset, word, words;
7389 unsigned char value;
7390 unsigned int HOST_WIDE_INT lo = 0;
7391 HOST_WIDE_INT hi = 0;
7393 if (total_bytes > len)
7394 return NULL_TREE;
7395 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7396 return NULL_TREE;
7397 words = total_bytes / UNITS_PER_WORD;
7399 for (byte = 0; byte < total_bytes; byte++)
7401 int bitpos = byte * BITS_PER_UNIT;
7402 if (total_bytes > UNITS_PER_WORD)
7404 word = byte / UNITS_PER_WORD;
7405 if (WORDS_BIG_ENDIAN)
7406 word = (words - 1) - word;
7407 offset = word * UNITS_PER_WORD;
7408 if (BYTES_BIG_ENDIAN)
7409 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7410 else
7411 offset += byte % UNITS_PER_WORD;
7413 else
7414 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7415 value = ptr[offset];
7417 if (bitpos < HOST_BITS_PER_WIDE_INT)
7418 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7419 else
7420 hi |= (unsigned HOST_WIDE_INT) value
7421 << (bitpos - HOST_BITS_PER_WIDE_INT);
7424 return build_int_cst_wide_type (type, lo, hi);
7428 /* Subroutine of native_interpret_expr. Interpret the contents of
7429 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7430 If the buffer cannot be interpreted, return NULL_TREE. */
7432 static tree
7433 native_interpret_real (tree type, const unsigned char *ptr, int len)
7435 enum machine_mode mode = TYPE_MODE (type);
7436 int total_bytes = GET_MODE_SIZE (mode);
7437 int byte, offset, word, words, bitpos;
7438 unsigned char value;
7439 /* There are always 32 bits in each long, no matter the size of
7440 the hosts long. We handle floating point representations with
7441 up to 192 bits. */
7442 REAL_VALUE_TYPE r;
7443 long tmp[6];
7445 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7446 if (total_bytes > len || total_bytes > 24)
7447 return NULL_TREE;
7448 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7450 memset (tmp, 0, sizeof (tmp));
7451 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7452 bitpos += BITS_PER_UNIT)
7454 byte = (bitpos / BITS_PER_UNIT) & 3;
7455 if (UNITS_PER_WORD < 4)
7457 word = byte / UNITS_PER_WORD;
7458 if (WORDS_BIG_ENDIAN)
7459 word = (words - 1) - word;
7460 offset = word * UNITS_PER_WORD;
7461 if (BYTES_BIG_ENDIAN)
7462 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7463 else
7464 offset += byte % UNITS_PER_WORD;
7466 else
7467 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7468 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7470 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7473 real_from_target (&r, tmp, mode);
7474 return build_real (type, r);
7478 /* Subroutine of native_interpret_expr. Interpret the contents of
7479 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7480 If the buffer cannot be interpreted, return NULL_TREE. */
7482 static tree
7483 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7485 tree etype, rpart, ipart;
7486 int size;
7488 etype = TREE_TYPE (type);
7489 size = GET_MODE_SIZE (TYPE_MODE (etype));
7490 if (size * 2 > len)
7491 return NULL_TREE;
7492 rpart = native_interpret_expr (etype, ptr, size);
7493 if (!rpart)
7494 return NULL_TREE;
7495 ipart = native_interpret_expr (etype, ptr+size, size);
7496 if (!ipart)
7497 return NULL_TREE;
7498 return build_complex (type, rpart, ipart);
7502 /* Subroutine of native_interpret_expr. Interpret the contents of
7503 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7504 If the buffer cannot be interpreted, return NULL_TREE. */
7506 static tree
7507 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7509 tree etype, elem, elements;
7510 int i, size, count;
7512 etype = TREE_TYPE (type);
7513 size = GET_MODE_SIZE (TYPE_MODE (etype));
7514 count = TYPE_VECTOR_SUBPARTS (type);
7515 if (size * count > len)
7516 return NULL_TREE;
7518 elements = NULL_TREE;
7519 for (i = count - 1; i >= 0; i--)
7521 elem = native_interpret_expr (etype, ptr+(i*size), size);
7522 if (!elem)
7523 return NULL_TREE;
7524 elements = tree_cons (NULL_TREE, elem, elements);
7526 return build_vector (type, elements);
7530 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7531 the buffer PTR of length LEN as a constant of type TYPE. For
7532 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7533 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7534 return NULL_TREE. */
7536 tree
7537 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7539 switch (TREE_CODE (type))
7541 case INTEGER_TYPE:
7542 case ENUMERAL_TYPE:
7543 case BOOLEAN_TYPE:
7544 return native_interpret_int (type, ptr, len);
7546 case REAL_TYPE:
7547 return native_interpret_real (type, ptr, len);
7549 case COMPLEX_TYPE:
7550 return native_interpret_complex (type, ptr, len);
7552 case VECTOR_TYPE:
7553 return native_interpret_vector (type, ptr, len);
7555 default:
7556 return NULL_TREE;
7561 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7562 TYPE at compile-time. If we're unable to perform the conversion
7563 return NULL_TREE. */
7565 static tree
7566 fold_view_convert_expr (tree type, tree expr)
7568 /* We support up to 512-bit values (for V8DFmode). */
7569 unsigned char buffer[64];
7570 int len;
7572 /* Check that the host and target are sane. */
7573 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7574 return NULL_TREE;
7576 len = native_encode_expr (expr, buffer, sizeof (buffer));
7577 if (len == 0)
7578 return NULL_TREE;
7580 return native_interpret_expr (type, buffer, len);
7583 /* Build an expression for the address of T. Folds away INDIRECT_REF
7584 to avoid confusing the gimplify process. When IN_FOLD is true
7585 avoid modifications of T. */
7587 static tree
7588 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7590 /* The size of the object is not relevant when talking about its address. */
7591 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7592 t = TREE_OPERAND (t, 0);
7594 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7595 if (TREE_CODE (t) == INDIRECT_REF
7596 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7598 t = TREE_OPERAND (t, 0);
7600 if (TREE_TYPE (t) != ptrtype)
7601 t = build1 (NOP_EXPR, ptrtype, t);
7603 else if (!in_fold)
7605 tree base = t;
7607 while (handled_component_p (base))
7608 base = TREE_OPERAND (base, 0);
7610 if (DECL_P (base))
7611 TREE_ADDRESSABLE (base) = 1;
7613 t = build1 (ADDR_EXPR, ptrtype, t);
7615 else
7616 t = build1 (ADDR_EXPR, ptrtype, t);
7618 return t;
7621 /* Build an expression for the address of T with type PTRTYPE. This
7622 function modifies the input parameter 'T' by sometimes setting the
7623 TREE_ADDRESSABLE flag. */
7625 tree
7626 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7628 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7631 /* Build an expression for the address of T. This function modifies
7632 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7633 flag. When called from fold functions, use fold_addr_expr instead. */
7635 tree
7636 build_fold_addr_expr (tree t)
7638 return build_fold_addr_expr_with_type_1 (t,
7639 build_pointer_type (TREE_TYPE (t)),
7640 false);
7643 /* Same as build_fold_addr_expr, builds an expression for the address
7644 of T, but avoids touching the input node 't'. Fold functions
7645 should use this version. */
7647 static tree
7648 fold_addr_expr (tree t)
7650 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7652 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7655 /* Fold a unary expression of code CODE and type TYPE with operand
7656 OP0. Return the folded expression if folding is successful.
7657 Otherwise, return NULL_TREE. */
7659 tree
7660 fold_unary (enum tree_code code, tree type, tree op0)
7662 tree tem;
7663 tree arg0;
7664 enum tree_code_class kind = TREE_CODE_CLASS (code);
7666 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7667 && TREE_CODE_LENGTH (code) == 1);
7669 arg0 = op0;
7670 if (arg0)
7672 if (CONVERT_EXPR_CODE_P (code)
7673 || code == FLOAT_EXPR || code == ABS_EXPR)
7675 /* Don't use STRIP_NOPS, because signedness of argument type
7676 matters. */
7677 STRIP_SIGN_NOPS (arg0);
7679 else
7681 /* Strip any conversions that don't change the mode. This
7682 is safe for every expression, except for a comparison
7683 expression because its signedness is derived from its
7684 operands.
7686 Note that this is done as an internal manipulation within
7687 the constant folder, in order to find the simplest
7688 representation of the arguments so that their form can be
7689 studied. In any cases, the appropriate type conversions
7690 should be put back in the tree that will get out of the
7691 constant folder. */
7692 STRIP_NOPS (arg0);
7696 if (TREE_CODE_CLASS (code) == tcc_unary)
7698 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7699 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7700 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7701 else if (TREE_CODE (arg0) == COND_EXPR)
7703 tree arg01 = TREE_OPERAND (arg0, 1);
7704 tree arg02 = TREE_OPERAND (arg0, 2);
7705 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7706 arg01 = fold_build1 (code, type, arg01);
7707 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7708 arg02 = fold_build1 (code, type, arg02);
7709 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7710 arg01, arg02);
7712 /* If this was a conversion, and all we did was to move into
7713 inside the COND_EXPR, bring it back out. But leave it if
7714 it is a conversion from integer to integer and the
7715 result precision is no wider than a word since such a
7716 conversion is cheap and may be optimized away by combine,
7717 while it couldn't if it were outside the COND_EXPR. Then return
7718 so we don't get into an infinite recursion loop taking the
7719 conversion out and then back in. */
7721 if ((CONVERT_EXPR_CODE_P (code)
7722 || code == NON_LVALUE_EXPR)
7723 && TREE_CODE (tem) == COND_EXPR
7724 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7725 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7726 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7727 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7728 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7729 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7730 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7731 && (INTEGRAL_TYPE_P
7732 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7733 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7734 || flag_syntax_only))
7735 tem = build1 (code, type,
7736 build3 (COND_EXPR,
7737 TREE_TYPE (TREE_OPERAND
7738 (TREE_OPERAND (tem, 1), 0)),
7739 TREE_OPERAND (tem, 0),
7740 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7741 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7742 return tem;
7744 else if (COMPARISON_CLASS_P (arg0))
7746 if (TREE_CODE (type) == BOOLEAN_TYPE)
7748 arg0 = copy_node (arg0);
7749 TREE_TYPE (arg0) = type;
7750 return arg0;
7752 else if (TREE_CODE (type) != INTEGER_TYPE)
7753 return fold_build3 (COND_EXPR, type, arg0,
7754 fold_build1 (code, type,
7755 integer_one_node),
7756 fold_build1 (code, type,
7757 integer_zero_node));
7761 switch (code)
7763 case PAREN_EXPR:
7764 /* Re-association barriers around constants and other re-association
7765 barriers can be removed. */
7766 if (CONSTANT_CLASS_P (op0)
7767 || TREE_CODE (op0) == PAREN_EXPR)
7768 return fold_convert (type, op0);
7769 return NULL_TREE;
7771 CASE_CONVERT:
7772 case FLOAT_EXPR:
7773 case FIX_TRUNC_EXPR:
7774 if (TREE_TYPE (op0) == type)
7775 return op0;
7777 /* If we have (type) (a CMP b) and type is an integral type, return
7778 new expression involving the new type. */
7779 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7780 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7781 TREE_OPERAND (op0, 1));
7783 /* Handle cases of two conversions in a row. */
7784 if (CONVERT_EXPR_P (op0))
7786 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7787 tree inter_type = TREE_TYPE (op0);
7788 int inside_int = INTEGRAL_TYPE_P (inside_type);
7789 int inside_ptr = POINTER_TYPE_P (inside_type);
7790 int inside_float = FLOAT_TYPE_P (inside_type);
7791 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7792 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7793 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7794 int inter_int = INTEGRAL_TYPE_P (inter_type);
7795 int inter_ptr = POINTER_TYPE_P (inter_type);
7796 int inter_float = FLOAT_TYPE_P (inter_type);
7797 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7798 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7799 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7800 int final_int = INTEGRAL_TYPE_P (type);
7801 int final_ptr = POINTER_TYPE_P (type);
7802 int final_float = FLOAT_TYPE_P (type);
7803 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7804 unsigned int final_prec = TYPE_PRECISION (type);
7805 int final_unsignedp = TYPE_UNSIGNED (type);
7807 /* In addition to the cases of two conversions in a row
7808 handled below, if we are converting something to its own
7809 type via an object of identical or wider precision, neither
7810 conversion is needed. */
7811 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7812 && (((inter_int || inter_ptr) && final_int)
7813 || (inter_float && final_float))
7814 && inter_prec >= final_prec)
7815 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7817 /* Likewise, if the intermediate and final types are either both
7818 float or both integer, we don't need the middle conversion if
7819 it is wider than the final type and doesn't change the signedness
7820 (for integers). Avoid this if the final type is a pointer
7821 since then we sometimes need the inner conversion. Likewise if
7822 the outer has a precision not equal to the size of its mode. */
7823 if (((inter_int && inside_int)
7824 || (inter_float && inside_float)
7825 || (inter_vec && inside_vec))
7826 && inter_prec >= inside_prec
7827 && (inter_float || inter_vec
7828 || inter_unsignedp == inside_unsignedp)
7829 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7830 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7831 && ! final_ptr
7832 && (! final_vec || inter_prec == inside_prec))
7833 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7835 /* If we have a sign-extension of a zero-extended value, we can
7836 replace that by a single zero-extension. */
7837 if (inside_int && inter_int && final_int
7838 && inside_prec < inter_prec && inter_prec < final_prec
7839 && inside_unsignedp && !inter_unsignedp)
7840 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7842 /* Two conversions in a row are not needed unless:
7843 - some conversion is floating-point (overstrict for now), or
7844 - some conversion is a vector (overstrict for now), or
7845 - the intermediate type is narrower than both initial and
7846 final, or
7847 - the intermediate type and innermost type differ in signedness,
7848 and the outermost type is wider than the intermediate, or
7849 - the initial type is a pointer type and the precisions of the
7850 intermediate and final types differ, or
7851 - the final type is a pointer type and the precisions of the
7852 initial and intermediate types differ. */
7853 if (! inside_float && ! inter_float && ! final_float
7854 && ! inside_vec && ! inter_vec && ! final_vec
7855 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7856 && ! (inside_int && inter_int
7857 && inter_unsignedp != inside_unsignedp
7858 && inter_prec < final_prec)
7859 && ((inter_unsignedp && inter_prec > inside_prec)
7860 == (final_unsignedp && final_prec > inter_prec))
7861 && ! (inside_ptr && inter_prec != final_prec)
7862 && ! (final_ptr && inside_prec != inter_prec)
7863 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7864 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7865 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7868 /* Handle (T *)&A.B.C for A being of type T and B and C
7869 living at offset zero. This occurs frequently in
7870 C++ upcasting and then accessing the base. */
7871 if (TREE_CODE (op0) == ADDR_EXPR
7872 && POINTER_TYPE_P (type)
7873 && handled_component_p (TREE_OPERAND (op0, 0)))
7875 HOST_WIDE_INT bitsize, bitpos;
7876 tree offset;
7877 enum machine_mode mode;
7878 int unsignedp, volatilep;
7879 tree base = TREE_OPERAND (op0, 0);
7880 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7881 &mode, &unsignedp, &volatilep, false);
7882 /* If the reference was to a (constant) zero offset, we can use
7883 the address of the base if it has the same base type
7884 as the result type. */
7885 if (! offset && bitpos == 0
7886 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7887 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7888 return fold_convert (type, fold_addr_expr (base));
7891 if (TREE_CODE (op0) == MODIFY_EXPR
7892 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7893 /* Detect assigning a bitfield. */
7894 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7895 && DECL_BIT_FIELD
7896 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7898 /* Don't leave an assignment inside a conversion
7899 unless assigning a bitfield. */
7900 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7901 /* First do the assignment, then return converted constant. */
7902 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7903 TREE_NO_WARNING (tem) = 1;
7904 TREE_USED (tem) = 1;
7905 return tem;
7908 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7909 constants (if x has signed type, the sign bit cannot be set
7910 in c). This folds extension into the BIT_AND_EXPR.
7911 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7912 very likely don't have maximal range for their precision and this
7913 transformation effectively doesn't preserve non-maximal ranges. */
7914 if (TREE_CODE (type) == INTEGER_TYPE
7915 && TREE_CODE (op0) == BIT_AND_EXPR
7916 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
7917 /* Not if the conversion is to the sub-type. */
7918 && TREE_TYPE (type) != TREE_TYPE (op0))
7920 tree and = op0;
7921 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7922 int change = 0;
7924 if (TYPE_UNSIGNED (TREE_TYPE (and))
7925 || (TYPE_PRECISION (type)
7926 <= TYPE_PRECISION (TREE_TYPE (and))))
7927 change = 1;
7928 else if (TYPE_PRECISION (TREE_TYPE (and1))
7929 <= HOST_BITS_PER_WIDE_INT
7930 && host_integerp (and1, 1))
7932 unsigned HOST_WIDE_INT cst;
7934 cst = tree_low_cst (and1, 1);
7935 cst &= (HOST_WIDE_INT) -1
7936 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7937 change = (cst == 0);
7938 #ifdef LOAD_EXTEND_OP
7939 if (change
7940 && !flag_syntax_only
7941 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7942 == ZERO_EXTEND))
7944 tree uns = unsigned_type_for (TREE_TYPE (and0));
7945 and0 = fold_convert (uns, and0);
7946 and1 = fold_convert (uns, and1);
7948 #endif
7950 if (change)
7952 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7953 TREE_INT_CST_HIGH (and1), 0,
7954 TREE_OVERFLOW (and1));
7955 return fold_build2 (BIT_AND_EXPR, type,
7956 fold_convert (type, and0), tem);
7960 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7961 when one of the new casts will fold away. Conservatively we assume
7962 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7963 if (POINTER_TYPE_P (type)
7964 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7965 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7966 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7967 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7969 tree arg00 = TREE_OPERAND (arg0, 0);
7970 tree arg01 = TREE_OPERAND (arg0, 1);
7972 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7973 fold_convert (sizetype, arg01));
7976 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7977 of the same precision, and X is an integer type not narrower than
7978 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7979 if (INTEGRAL_TYPE_P (type)
7980 && TREE_CODE (op0) == BIT_NOT_EXPR
7981 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7982 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7983 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7985 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7986 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7987 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7988 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7991 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7992 type of X and Y (integer types only). */
7993 if (INTEGRAL_TYPE_P (type)
7994 && TREE_CODE (op0) == MULT_EXPR
7995 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7996 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7998 /* Be careful not to introduce new overflows. */
7999 tree mult_type;
8000 if (TYPE_OVERFLOW_WRAPS (type))
8001 mult_type = type;
8002 else
8003 mult_type = unsigned_type_for (type);
8005 tem = fold_build2 (MULT_EXPR, mult_type,
8006 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
8007 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
8008 return fold_convert (type, tem);
8011 tem = fold_convert_const (code, type, op0);
8012 return tem ? tem : NULL_TREE;
8014 case FIXED_CONVERT_EXPR:
8015 tem = fold_convert_const (code, type, arg0);
8016 return tem ? tem : NULL_TREE;
8018 case VIEW_CONVERT_EXPR:
8019 if (TREE_TYPE (op0) == type)
8020 return op0;
8021 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8022 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8024 /* For integral conversions with the same precision or pointer
8025 conversions use a NOP_EXPR instead. */
8026 if ((INTEGRAL_TYPE_P (type)
8027 || POINTER_TYPE_P (type))
8028 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8029 || POINTER_TYPE_P (TREE_TYPE (op0)))
8030 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8031 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8032 a sub-type to its base type as generated by the Ada FE. */
8033 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8034 && TREE_TYPE (TREE_TYPE (op0))))
8035 return fold_convert (type, op0);
8037 /* Strip inner integral conversions that do not change the precision. */
8038 if (CONVERT_EXPR_P (op0)
8039 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8040 || POINTER_TYPE_P (TREE_TYPE (op0)))
8041 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8042 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8043 && (TYPE_PRECISION (TREE_TYPE (op0))
8044 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8045 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8047 return fold_view_convert_expr (type, op0);
8049 case NEGATE_EXPR:
8050 tem = fold_negate_expr (arg0);
8051 if (tem)
8052 return fold_convert (type, tem);
8053 return NULL_TREE;
8055 case ABS_EXPR:
8056 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8057 return fold_abs_const (arg0, type);
8058 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8059 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8060 /* Convert fabs((double)float) into (double)fabsf(float). */
8061 else if (TREE_CODE (arg0) == NOP_EXPR
8062 && TREE_CODE (type) == REAL_TYPE)
8064 tree targ0 = strip_float_extensions (arg0);
8065 if (targ0 != arg0)
8066 return fold_convert (type, fold_build1 (ABS_EXPR,
8067 TREE_TYPE (targ0),
8068 targ0));
8070 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8071 else if (TREE_CODE (arg0) == ABS_EXPR)
8072 return arg0;
8073 else if (tree_expr_nonnegative_p (arg0))
8074 return arg0;
8076 /* Strip sign ops from argument. */
8077 if (TREE_CODE (type) == REAL_TYPE)
8079 tem = fold_strip_sign_ops (arg0);
8080 if (tem)
8081 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8083 return NULL_TREE;
8085 case CONJ_EXPR:
8086 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8087 return fold_convert (type, arg0);
8088 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8090 tree itype = TREE_TYPE (type);
8091 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8092 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8093 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8095 if (TREE_CODE (arg0) == COMPLEX_CST)
8097 tree itype = TREE_TYPE (type);
8098 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8099 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8100 return build_complex (type, rpart, negate_expr (ipart));
8102 if (TREE_CODE (arg0) == CONJ_EXPR)
8103 return fold_convert (type, TREE_OPERAND (arg0, 0));
8104 return NULL_TREE;
8106 case BIT_NOT_EXPR:
8107 if (TREE_CODE (arg0) == INTEGER_CST)
8108 return fold_not_const (arg0, type);
8109 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8110 return fold_convert (type, TREE_OPERAND (arg0, 0));
8111 /* Convert ~ (-A) to A - 1. */
8112 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8113 return fold_build2 (MINUS_EXPR, type,
8114 fold_convert (type, TREE_OPERAND (arg0, 0)),
8115 build_int_cst (type, 1));
8116 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8117 else if (INTEGRAL_TYPE_P (type)
8118 && ((TREE_CODE (arg0) == MINUS_EXPR
8119 && integer_onep (TREE_OPERAND (arg0, 1)))
8120 || (TREE_CODE (arg0) == PLUS_EXPR
8121 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8122 return fold_build1 (NEGATE_EXPR, type,
8123 fold_convert (type, TREE_OPERAND (arg0, 0)));
8124 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8125 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8126 && (tem = fold_unary (BIT_NOT_EXPR, type,
8127 fold_convert (type,
8128 TREE_OPERAND (arg0, 0)))))
8129 return fold_build2 (BIT_XOR_EXPR, type, tem,
8130 fold_convert (type, TREE_OPERAND (arg0, 1)));
8131 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8132 && (tem = fold_unary (BIT_NOT_EXPR, type,
8133 fold_convert (type,
8134 TREE_OPERAND (arg0, 1)))))
8135 return fold_build2 (BIT_XOR_EXPR, type,
8136 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8137 /* Perform BIT_NOT_EXPR on each element individually. */
8138 else if (TREE_CODE (arg0) == VECTOR_CST)
8140 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8141 int count = TYPE_VECTOR_SUBPARTS (type), i;
8143 for (i = 0; i < count; i++)
8145 if (elements)
8147 elem = TREE_VALUE (elements);
8148 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8149 if (elem == NULL_TREE)
8150 break;
8151 elements = TREE_CHAIN (elements);
8153 else
8154 elem = build_int_cst (TREE_TYPE (type), -1);
8155 list = tree_cons (NULL_TREE, elem, list);
8157 if (i == count)
8158 return build_vector (type, nreverse (list));
8161 return NULL_TREE;
8163 case TRUTH_NOT_EXPR:
8164 /* The argument to invert_truthvalue must have Boolean type. */
8165 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8166 arg0 = fold_convert (boolean_type_node, arg0);
8168 /* Note that the operand of this must be an int
8169 and its values must be 0 or 1.
8170 ("true" is a fixed value perhaps depending on the language,
8171 but we don't handle values other than 1 correctly yet.) */
8172 tem = fold_truth_not_expr (arg0);
8173 if (!tem)
8174 return NULL_TREE;
8175 return fold_convert (type, tem);
8177 case REALPART_EXPR:
8178 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8179 return fold_convert (type, arg0);
8180 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8181 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8182 TREE_OPERAND (arg0, 1));
8183 if (TREE_CODE (arg0) == COMPLEX_CST)
8184 return fold_convert (type, TREE_REALPART (arg0));
8185 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8187 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8188 tem = fold_build2 (TREE_CODE (arg0), itype,
8189 fold_build1 (REALPART_EXPR, itype,
8190 TREE_OPERAND (arg0, 0)),
8191 fold_build1 (REALPART_EXPR, itype,
8192 TREE_OPERAND (arg0, 1)));
8193 return fold_convert (type, tem);
8195 if (TREE_CODE (arg0) == CONJ_EXPR)
8197 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8198 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8199 return fold_convert (type, tem);
8201 if (TREE_CODE (arg0) == CALL_EXPR)
8203 tree fn = get_callee_fndecl (arg0);
8204 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8205 switch (DECL_FUNCTION_CODE (fn))
8207 CASE_FLT_FN (BUILT_IN_CEXPI):
8208 fn = mathfn_built_in (type, BUILT_IN_COS);
8209 if (fn)
8210 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8211 break;
8213 default:
8214 break;
8217 return NULL_TREE;
8219 case IMAGPART_EXPR:
8220 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8221 return fold_convert (type, integer_zero_node);
8222 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8223 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8224 TREE_OPERAND (arg0, 0));
8225 if (TREE_CODE (arg0) == COMPLEX_CST)
8226 return fold_convert (type, TREE_IMAGPART (arg0));
8227 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8229 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8230 tem = fold_build2 (TREE_CODE (arg0), itype,
8231 fold_build1 (IMAGPART_EXPR, itype,
8232 TREE_OPERAND (arg0, 0)),
8233 fold_build1 (IMAGPART_EXPR, itype,
8234 TREE_OPERAND (arg0, 1)));
8235 return fold_convert (type, tem);
8237 if (TREE_CODE (arg0) == CONJ_EXPR)
8239 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8240 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8241 return fold_convert (type, negate_expr (tem));
8243 if (TREE_CODE (arg0) == CALL_EXPR)
8245 tree fn = get_callee_fndecl (arg0);
8246 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8247 switch (DECL_FUNCTION_CODE (fn))
8249 CASE_FLT_FN (BUILT_IN_CEXPI):
8250 fn = mathfn_built_in (type, BUILT_IN_SIN);
8251 if (fn)
8252 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8253 break;
8255 default:
8256 break;
8259 return NULL_TREE;
8261 default:
8262 return NULL_TREE;
8263 } /* switch (code) */
8266 /* Fold a binary expression of code CODE and type TYPE with operands
8267 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8268 Return the folded expression if folding is successful. Otherwise,
8269 return NULL_TREE. */
8271 static tree
8272 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8274 enum tree_code compl_code;
8276 if (code == MIN_EXPR)
8277 compl_code = MAX_EXPR;
8278 else if (code == MAX_EXPR)
8279 compl_code = MIN_EXPR;
8280 else
8281 gcc_unreachable ();
8283 /* MIN (MAX (a, b), b) == b. */
8284 if (TREE_CODE (op0) == compl_code
8285 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8286 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8288 /* MIN (MAX (b, a), b) == b. */
8289 if (TREE_CODE (op0) == compl_code
8290 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8291 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8292 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8294 /* MIN (a, MAX (a, b)) == a. */
8295 if (TREE_CODE (op1) == compl_code
8296 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8297 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8298 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8300 /* MIN (a, MAX (b, a)) == a. */
8301 if (TREE_CODE (op1) == compl_code
8302 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8303 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8304 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8306 return NULL_TREE;
8309 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8310 by changing CODE to reduce the magnitude of constants involved in
8311 ARG0 of the comparison.
8312 Returns a canonicalized comparison tree if a simplification was
8313 possible, otherwise returns NULL_TREE.
8314 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8315 valid if signed overflow is undefined. */
8317 static tree
8318 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8319 tree arg0, tree arg1,
8320 bool *strict_overflow_p)
8322 enum tree_code code0 = TREE_CODE (arg0);
8323 tree t, cst0 = NULL_TREE;
8324 int sgn0;
8325 bool swap = false;
8327 /* Match A +- CST code arg1 and CST code arg1. We can change the
8328 first form only if overflow is undefined. */
8329 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8330 /* In principle pointers also have undefined overflow behavior,
8331 but that causes problems elsewhere. */
8332 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8333 && (code0 == MINUS_EXPR
8334 || code0 == PLUS_EXPR)
8335 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8336 || code0 == INTEGER_CST))
8337 return NULL_TREE;
8339 /* Identify the constant in arg0 and its sign. */
8340 if (code0 == INTEGER_CST)
8341 cst0 = arg0;
8342 else
8343 cst0 = TREE_OPERAND (arg0, 1);
8344 sgn0 = tree_int_cst_sgn (cst0);
8346 /* Overflowed constants and zero will cause problems. */
8347 if (integer_zerop (cst0)
8348 || TREE_OVERFLOW (cst0))
8349 return NULL_TREE;
8351 /* See if we can reduce the magnitude of the constant in
8352 arg0 by changing the comparison code. */
8353 if (code0 == INTEGER_CST)
8355 /* CST <= arg1 -> CST-1 < arg1. */
8356 if (code == LE_EXPR && sgn0 == 1)
8357 code = LT_EXPR;
8358 /* -CST < arg1 -> -CST-1 <= arg1. */
8359 else if (code == LT_EXPR && sgn0 == -1)
8360 code = LE_EXPR;
8361 /* CST > arg1 -> CST-1 >= arg1. */
8362 else if (code == GT_EXPR && sgn0 == 1)
8363 code = GE_EXPR;
8364 /* -CST >= arg1 -> -CST-1 > arg1. */
8365 else if (code == GE_EXPR && sgn0 == -1)
8366 code = GT_EXPR;
8367 else
8368 return NULL_TREE;
8369 /* arg1 code' CST' might be more canonical. */
8370 swap = true;
8372 else
8374 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8375 if (code == LT_EXPR
8376 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8377 code = LE_EXPR;
8378 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8379 else if (code == GT_EXPR
8380 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8381 code = GE_EXPR;
8382 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8383 else if (code == LE_EXPR
8384 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8385 code = LT_EXPR;
8386 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8387 else if (code == GE_EXPR
8388 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8389 code = GT_EXPR;
8390 else
8391 return NULL_TREE;
8392 *strict_overflow_p = true;
8395 /* Now build the constant reduced in magnitude. But not if that
8396 would produce one outside of its types range. */
8397 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8398 && ((sgn0 == 1
8399 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8400 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8401 || (sgn0 == -1
8402 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8403 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8404 /* We cannot swap the comparison here as that would cause us to
8405 endlessly recurse. */
8406 return NULL_TREE;
8408 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8409 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8410 if (code0 != INTEGER_CST)
8411 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8413 /* If swapping might yield to a more canonical form, do so. */
8414 if (swap)
8415 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8416 else
8417 return fold_build2 (code, type, t, arg1);
8420 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8421 overflow further. Try to decrease the magnitude of constants involved
8422 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8423 and put sole constants at the second argument position.
8424 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8426 static tree
8427 maybe_canonicalize_comparison (enum tree_code code, tree type,
8428 tree arg0, tree arg1)
8430 tree t;
8431 bool strict_overflow_p;
8432 const char * const warnmsg = G_("assuming signed overflow does not occur "
8433 "when reducing constant in comparison");
8435 /* Try canonicalization by simplifying arg0. */
8436 strict_overflow_p = false;
8437 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8438 &strict_overflow_p);
8439 if (t)
8441 if (strict_overflow_p)
8442 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8443 return t;
8446 /* Try canonicalization by simplifying arg1 using the swapped
8447 comparison. */
8448 code = swap_tree_comparison (code);
8449 strict_overflow_p = false;
8450 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8451 &strict_overflow_p);
8452 if (t && strict_overflow_p)
8453 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8454 return t;
8457 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8458 space. This is used to avoid issuing overflow warnings for
8459 expressions like &p->x which can not wrap. */
8461 static bool
8462 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8464 unsigned HOST_WIDE_INT offset_low, total_low;
8465 HOST_WIDE_INT size, offset_high, total_high;
8467 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8468 return true;
8470 if (bitpos < 0)
8471 return true;
8473 if (offset == NULL_TREE)
8475 offset_low = 0;
8476 offset_high = 0;
8478 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8479 return true;
8480 else
8482 offset_low = TREE_INT_CST_LOW (offset);
8483 offset_high = TREE_INT_CST_HIGH (offset);
8486 if (add_double_with_sign (offset_low, offset_high,
8487 bitpos / BITS_PER_UNIT, 0,
8488 &total_low, &total_high,
8489 true))
8490 return true;
8492 if (total_high != 0)
8493 return true;
8495 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8496 if (size <= 0)
8497 return true;
8499 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8500 array. */
8501 if (TREE_CODE (base) == ADDR_EXPR)
8503 HOST_WIDE_INT base_size;
8505 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8506 if (base_size > 0 && size < base_size)
8507 size = base_size;
8510 return total_low > (unsigned HOST_WIDE_INT) size;
8513 /* Subroutine of fold_binary. This routine performs all of the
8514 transformations that are common to the equality/inequality
8515 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8516 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8517 fold_binary should call fold_binary. Fold a comparison with
8518 tree code CODE and type TYPE with operands OP0 and OP1. Return
8519 the folded comparison or NULL_TREE. */
8521 static tree
8522 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8524 tree arg0, arg1, tem;
8526 arg0 = op0;
8527 arg1 = op1;
8529 STRIP_SIGN_NOPS (arg0);
8530 STRIP_SIGN_NOPS (arg1);
8532 tem = fold_relational_const (code, type, arg0, arg1);
8533 if (tem != NULL_TREE)
8534 return tem;
8536 /* If one arg is a real or integer constant, put it last. */
8537 if (tree_swap_operands_p (arg0, arg1, true))
8538 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8540 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8541 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8542 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8543 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8544 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8545 && (TREE_CODE (arg1) == INTEGER_CST
8546 && !TREE_OVERFLOW (arg1)))
8548 tree const1 = TREE_OPERAND (arg0, 1);
8549 tree const2 = arg1;
8550 tree variable = TREE_OPERAND (arg0, 0);
8551 tree lhs;
8552 int lhs_add;
8553 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8555 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8556 TREE_TYPE (arg1), const2, const1);
8558 /* If the constant operation overflowed this can be
8559 simplified as a comparison against INT_MAX/INT_MIN. */
8560 if (TREE_CODE (lhs) == INTEGER_CST
8561 && TREE_OVERFLOW (lhs))
8563 int const1_sgn = tree_int_cst_sgn (const1);
8564 enum tree_code code2 = code;
8566 /* Get the sign of the constant on the lhs if the
8567 operation were VARIABLE + CONST1. */
8568 if (TREE_CODE (arg0) == MINUS_EXPR)
8569 const1_sgn = -const1_sgn;
8571 /* The sign of the constant determines if we overflowed
8572 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8573 Canonicalize to the INT_MIN overflow by swapping the comparison
8574 if necessary. */
8575 if (const1_sgn == -1)
8576 code2 = swap_tree_comparison (code);
8578 /* We now can look at the canonicalized case
8579 VARIABLE + 1 CODE2 INT_MIN
8580 and decide on the result. */
8581 if (code2 == LT_EXPR
8582 || code2 == LE_EXPR
8583 || code2 == EQ_EXPR)
8584 return omit_one_operand (type, boolean_false_node, variable);
8585 else if (code2 == NE_EXPR
8586 || code2 == GE_EXPR
8587 || code2 == GT_EXPR)
8588 return omit_one_operand (type, boolean_true_node, variable);
8591 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8592 && (TREE_CODE (lhs) != INTEGER_CST
8593 || !TREE_OVERFLOW (lhs)))
8595 fold_overflow_warning (("assuming signed overflow does not occur "
8596 "when changing X +- C1 cmp C2 to "
8597 "X cmp C1 +- C2"),
8598 WARN_STRICT_OVERFLOW_COMPARISON);
8599 return fold_build2 (code, type, variable, lhs);
8603 /* For comparisons of pointers we can decompose it to a compile time
8604 comparison of the base objects and the offsets into the object.
8605 This requires at least one operand being an ADDR_EXPR or a
8606 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8607 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8608 && (TREE_CODE (arg0) == ADDR_EXPR
8609 || TREE_CODE (arg1) == ADDR_EXPR
8610 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8611 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8613 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8614 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8615 enum machine_mode mode;
8616 int volatilep, unsignedp;
8617 bool indirect_base0 = false, indirect_base1 = false;
8619 /* Get base and offset for the access. Strip ADDR_EXPR for
8620 get_inner_reference, but put it back by stripping INDIRECT_REF
8621 off the base object if possible. indirect_baseN will be true
8622 if baseN is not an address but refers to the object itself. */
8623 base0 = arg0;
8624 if (TREE_CODE (arg0) == ADDR_EXPR)
8626 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8627 &bitsize, &bitpos0, &offset0, &mode,
8628 &unsignedp, &volatilep, false);
8629 if (TREE_CODE (base0) == INDIRECT_REF)
8630 base0 = TREE_OPERAND (base0, 0);
8631 else
8632 indirect_base0 = true;
8634 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8636 base0 = TREE_OPERAND (arg0, 0);
8637 offset0 = TREE_OPERAND (arg0, 1);
8640 base1 = arg1;
8641 if (TREE_CODE (arg1) == ADDR_EXPR)
8643 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8644 &bitsize, &bitpos1, &offset1, &mode,
8645 &unsignedp, &volatilep, false);
8646 if (TREE_CODE (base1) == INDIRECT_REF)
8647 base1 = TREE_OPERAND (base1, 0);
8648 else
8649 indirect_base1 = true;
8651 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8653 base1 = TREE_OPERAND (arg1, 0);
8654 offset1 = TREE_OPERAND (arg1, 1);
8657 /* If we have equivalent bases we might be able to simplify. */
8658 if (indirect_base0 == indirect_base1
8659 && operand_equal_p (base0, base1, 0))
8661 /* We can fold this expression to a constant if the non-constant
8662 offset parts are equal. */
8663 if ((offset0 == offset1
8664 || (offset0 && offset1
8665 && operand_equal_p (offset0, offset1, 0)))
8666 && (code == EQ_EXPR
8667 || code == NE_EXPR
8668 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8671 if (code != EQ_EXPR
8672 && code != NE_EXPR
8673 && bitpos0 != bitpos1
8674 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8675 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8676 fold_overflow_warning (("assuming pointer wraparound does not "
8677 "occur when comparing P +- C1 with "
8678 "P +- C2"),
8679 WARN_STRICT_OVERFLOW_CONDITIONAL);
8681 switch (code)
8683 case EQ_EXPR:
8684 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8685 case NE_EXPR:
8686 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8687 case LT_EXPR:
8688 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8689 case LE_EXPR:
8690 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8691 case GE_EXPR:
8692 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8693 case GT_EXPR:
8694 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8695 default:;
8698 /* We can simplify the comparison to a comparison of the variable
8699 offset parts if the constant offset parts are equal.
8700 Be careful to use signed size type here because otherwise we
8701 mess with array offsets in the wrong way. This is possible
8702 because pointer arithmetic is restricted to retain within an
8703 object and overflow on pointer differences is undefined as of
8704 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8705 else if (bitpos0 == bitpos1
8706 && ((code == EQ_EXPR || code == NE_EXPR)
8707 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8709 tree signed_size_type_node;
8710 signed_size_type_node = signed_type_for (size_type_node);
8712 /* By converting to signed size type we cover middle-end pointer
8713 arithmetic which operates on unsigned pointer types of size
8714 type size and ARRAY_REF offsets which are properly sign or
8715 zero extended from their type in case it is narrower than
8716 size type. */
8717 if (offset0 == NULL_TREE)
8718 offset0 = build_int_cst (signed_size_type_node, 0);
8719 else
8720 offset0 = fold_convert (signed_size_type_node, offset0);
8721 if (offset1 == NULL_TREE)
8722 offset1 = build_int_cst (signed_size_type_node, 0);
8723 else
8724 offset1 = fold_convert (signed_size_type_node, offset1);
8726 if (code != EQ_EXPR
8727 && code != NE_EXPR
8728 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8729 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8730 fold_overflow_warning (("assuming pointer wraparound does not "
8731 "occur when comparing P +- C1 with "
8732 "P +- C2"),
8733 WARN_STRICT_OVERFLOW_COMPARISON);
8735 return fold_build2 (code, type, offset0, offset1);
8738 /* For non-equal bases we can simplify if they are addresses
8739 of local binding decls or constants. */
8740 else if (indirect_base0 && indirect_base1
8741 /* We know that !operand_equal_p (base0, base1, 0)
8742 because the if condition was false. But make
8743 sure two decls are not the same. */
8744 && base0 != base1
8745 && TREE_CODE (arg0) == ADDR_EXPR
8746 && TREE_CODE (arg1) == ADDR_EXPR
8747 && (((TREE_CODE (base0) == VAR_DECL
8748 || TREE_CODE (base0) == PARM_DECL)
8749 && (targetm.binds_local_p (base0)
8750 || CONSTANT_CLASS_P (base1)))
8751 || CONSTANT_CLASS_P (base0))
8752 && (((TREE_CODE (base1) == VAR_DECL
8753 || TREE_CODE (base1) == PARM_DECL)
8754 && (targetm.binds_local_p (base1)
8755 || CONSTANT_CLASS_P (base0)))
8756 || CONSTANT_CLASS_P (base1)))
8758 if (code == EQ_EXPR)
8759 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8760 else if (code == NE_EXPR)
8761 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8763 /* For equal offsets we can simplify to a comparison of the
8764 base addresses. */
8765 else if (bitpos0 == bitpos1
8766 && (indirect_base0
8767 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8768 && (indirect_base1
8769 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8770 && ((offset0 == offset1)
8771 || (offset0 && offset1
8772 && operand_equal_p (offset0, offset1, 0))))
8774 if (indirect_base0)
8775 base0 = fold_addr_expr (base0);
8776 if (indirect_base1)
8777 base1 = fold_addr_expr (base1);
8778 return fold_build2 (code, type, base0, base1);
8782 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8783 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8784 the resulting offset is smaller in absolute value than the
8785 original one. */
8786 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8787 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8788 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8789 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8790 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8791 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8792 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8794 tree const1 = TREE_OPERAND (arg0, 1);
8795 tree const2 = TREE_OPERAND (arg1, 1);
8796 tree variable1 = TREE_OPERAND (arg0, 0);
8797 tree variable2 = TREE_OPERAND (arg1, 0);
8798 tree cst;
8799 const char * const warnmsg = G_("assuming signed overflow does not "
8800 "occur when combining constants around "
8801 "a comparison");
8803 /* Put the constant on the side where it doesn't overflow and is
8804 of lower absolute value than before. */
8805 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8806 ? MINUS_EXPR : PLUS_EXPR,
8807 const2, const1, 0);
8808 if (!TREE_OVERFLOW (cst)
8809 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8811 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8812 return fold_build2 (code, type,
8813 variable1,
8814 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8815 variable2, cst));
8818 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8819 ? MINUS_EXPR : PLUS_EXPR,
8820 const1, const2, 0);
8821 if (!TREE_OVERFLOW (cst)
8822 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8824 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8825 return fold_build2 (code, type,
8826 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8827 variable1, cst),
8828 variable2);
8832 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8833 signed arithmetic case. That form is created by the compiler
8834 often enough for folding it to be of value. One example is in
8835 computing loop trip counts after Operator Strength Reduction. */
8836 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8837 && TREE_CODE (arg0) == MULT_EXPR
8838 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8839 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8840 && integer_zerop (arg1))
8842 tree const1 = TREE_OPERAND (arg0, 1);
8843 tree const2 = arg1; /* zero */
8844 tree variable1 = TREE_OPERAND (arg0, 0);
8845 enum tree_code cmp_code = code;
8847 gcc_assert (!integer_zerop (const1));
8849 fold_overflow_warning (("assuming signed overflow does not occur when "
8850 "eliminating multiplication in comparison "
8851 "with zero"),
8852 WARN_STRICT_OVERFLOW_COMPARISON);
8854 /* If const1 is negative we swap the sense of the comparison. */
8855 if (tree_int_cst_sgn (const1) < 0)
8856 cmp_code = swap_tree_comparison (cmp_code);
8858 return fold_build2 (cmp_code, type, variable1, const2);
8861 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8862 if (tem)
8863 return tem;
8865 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8867 tree targ0 = strip_float_extensions (arg0);
8868 tree targ1 = strip_float_extensions (arg1);
8869 tree newtype = TREE_TYPE (targ0);
8871 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8872 newtype = TREE_TYPE (targ1);
8874 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8875 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8876 return fold_build2 (code, type, fold_convert (newtype, targ0),
8877 fold_convert (newtype, targ1));
8879 /* (-a) CMP (-b) -> b CMP a */
8880 if (TREE_CODE (arg0) == NEGATE_EXPR
8881 && TREE_CODE (arg1) == NEGATE_EXPR)
8882 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8883 TREE_OPERAND (arg0, 0));
8885 if (TREE_CODE (arg1) == REAL_CST)
8887 REAL_VALUE_TYPE cst;
8888 cst = TREE_REAL_CST (arg1);
8890 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8891 if (TREE_CODE (arg0) == NEGATE_EXPR)
8892 return fold_build2 (swap_tree_comparison (code), type,
8893 TREE_OPERAND (arg0, 0),
8894 build_real (TREE_TYPE (arg1),
8895 REAL_VALUE_NEGATE (cst)));
8897 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8898 /* a CMP (-0) -> a CMP 0 */
8899 if (REAL_VALUE_MINUS_ZERO (cst))
8900 return fold_build2 (code, type, arg0,
8901 build_real (TREE_TYPE (arg1), dconst0));
8903 /* x != NaN is always true, other ops are always false. */
8904 if (REAL_VALUE_ISNAN (cst)
8905 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8907 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8908 return omit_one_operand (type, tem, arg0);
8911 /* Fold comparisons against infinity. */
8912 if (REAL_VALUE_ISINF (cst))
8914 tem = fold_inf_compare (code, type, arg0, arg1);
8915 if (tem != NULL_TREE)
8916 return tem;
8920 /* If this is a comparison of a real constant with a PLUS_EXPR
8921 or a MINUS_EXPR of a real constant, we can convert it into a
8922 comparison with a revised real constant as long as no overflow
8923 occurs when unsafe_math_optimizations are enabled. */
8924 if (flag_unsafe_math_optimizations
8925 && TREE_CODE (arg1) == REAL_CST
8926 && (TREE_CODE (arg0) == PLUS_EXPR
8927 || TREE_CODE (arg0) == MINUS_EXPR)
8928 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8929 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8930 ? MINUS_EXPR : PLUS_EXPR,
8931 arg1, TREE_OPERAND (arg0, 1), 0))
8932 && !TREE_OVERFLOW (tem))
8933 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8935 /* Likewise, we can simplify a comparison of a real constant with
8936 a MINUS_EXPR whose first operand is also a real constant, i.e.
8937 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8938 floating-point types only if -fassociative-math is set. */
8939 if (flag_associative_math
8940 && TREE_CODE (arg1) == REAL_CST
8941 && TREE_CODE (arg0) == MINUS_EXPR
8942 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8943 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8944 arg1, 0))
8945 && !TREE_OVERFLOW (tem))
8946 return fold_build2 (swap_tree_comparison (code), type,
8947 TREE_OPERAND (arg0, 1), tem);
8949 /* Fold comparisons against built-in math functions. */
8950 if (TREE_CODE (arg1) == REAL_CST
8951 && flag_unsafe_math_optimizations
8952 && ! flag_errno_math)
8954 enum built_in_function fcode = builtin_mathfn_code (arg0);
8956 if (fcode != END_BUILTINS)
8958 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8959 if (tem != NULL_TREE)
8960 return tem;
8965 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8966 && CONVERT_EXPR_P (arg0))
8968 /* If we are widening one operand of an integer comparison,
8969 see if the other operand is similarly being widened. Perhaps we
8970 can do the comparison in the narrower type. */
8971 tem = fold_widened_comparison (code, type, arg0, arg1);
8972 if (tem)
8973 return tem;
8975 /* Or if we are changing signedness. */
8976 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8977 if (tem)
8978 return tem;
8981 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8982 constant, we can simplify it. */
8983 if (TREE_CODE (arg1) == INTEGER_CST
8984 && (TREE_CODE (arg0) == MIN_EXPR
8985 || TREE_CODE (arg0) == MAX_EXPR)
8986 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8988 tem = optimize_minmax_comparison (code, type, op0, op1);
8989 if (tem)
8990 return tem;
8993 /* Simplify comparison of something with itself. (For IEEE
8994 floating-point, we can only do some of these simplifications.) */
8995 if (operand_equal_p (arg0, arg1, 0))
8997 switch (code)
8999 case EQ_EXPR:
9000 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9001 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9002 return constant_boolean_node (1, type);
9003 break;
9005 case GE_EXPR:
9006 case LE_EXPR:
9007 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9008 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9009 return constant_boolean_node (1, type);
9010 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9012 case NE_EXPR:
9013 /* For NE, we can only do this simplification if integer
9014 or we don't honor IEEE floating point NaNs. */
9015 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9016 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9017 break;
9018 /* ... fall through ... */
9019 case GT_EXPR:
9020 case LT_EXPR:
9021 return constant_boolean_node (0, type);
9022 default:
9023 gcc_unreachable ();
9027 /* If we are comparing an expression that just has comparisons
9028 of two integer values, arithmetic expressions of those comparisons,
9029 and constants, we can simplify it. There are only three cases
9030 to check: the two values can either be equal, the first can be
9031 greater, or the second can be greater. Fold the expression for
9032 those three values. Since each value must be 0 or 1, we have
9033 eight possibilities, each of which corresponds to the constant 0
9034 or 1 or one of the six possible comparisons.
9036 This handles common cases like (a > b) == 0 but also handles
9037 expressions like ((x > y) - (y > x)) > 0, which supposedly
9038 occur in macroized code. */
9040 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9042 tree cval1 = 0, cval2 = 0;
9043 int save_p = 0;
9045 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9046 /* Don't handle degenerate cases here; they should already
9047 have been handled anyway. */
9048 && cval1 != 0 && cval2 != 0
9049 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9050 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9051 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9052 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9053 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9054 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9055 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9057 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9058 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9060 /* We can't just pass T to eval_subst in case cval1 or cval2
9061 was the same as ARG1. */
9063 tree high_result
9064 = fold_build2 (code, type,
9065 eval_subst (arg0, cval1, maxval,
9066 cval2, minval),
9067 arg1);
9068 tree equal_result
9069 = fold_build2 (code, type,
9070 eval_subst (arg0, cval1, maxval,
9071 cval2, maxval),
9072 arg1);
9073 tree low_result
9074 = fold_build2 (code, type,
9075 eval_subst (arg0, cval1, minval,
9076 cval2, maxval),
9077 arg1);
9079 /* All three of these results should be 0 or 1. Confirm they are.
9080 Then use those values to select the proper code to use. */
9082 if (TREE_CODE (high_result) == INTEGER_CST
9083 && TREE_CODE (equal_result) == INTEGER_CST
9084 && TREE_CODE (low_result) == INTEGER_CST)
9086 /* Make a 3-bit mask with the high-order bit being the
9087 value for `>', the next for '=', and the low for '<'. */
9088 switch ((integer_onep (high_result) * 4)
9089 + (integer_onep (equal_result) * 2)
9090 + integer_onep (low_result))
9092 case 0:
9093 /* Always false. */
9094 return omit_one_operand (type, integer_zero_node, arg0);
9095 case 1:
9096 code = LT_EXPR;
9097 break;
9098 case 2:
9099 code = EQ_EXPR;
9100 break;
9101 case 3:
9102 code = LE_EXPR;
9103 break;
9104 case 4:
9105 code = GT_EXPR;
9106 break;
9107 case 5:
9108 code = NE_EXPR;
9109 break;
9110 case 6:
9111 code = GE_EXPR;
9112 break;
9113 case 7:
9114 /* Always true. */
9115 return omit_one_operand (type, integer_one_node, arg0);
9118 if (save_p)
9119 return save_expr (build2 (code, type, cval1, cval2));
9120 return fold_build2 (code, type, cval1, cval2);
9125 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9126 into a single range test. */
9127 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9128 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9129 && TREE_CODE (arg1) == INTEGER_CST
9130 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9131 && !integer_zerop (TREE_OPERAND (arg0, 1))
9132 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9133 && !TREE_OVERFLOW (arg1))
9135 tem = fold_div_compare (code, type, arg0, arg1);
9136 if (tem != NULL_TREE)
9137 return tem;
9140 /* Fold ~X op ~Y as Y op X. */
9141 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9142 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9144 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9145 return fold_build2 (code, type,
9146 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9147 TREE_OPERAND (arg0, 0));
9150 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9151 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9152 && TREE_CODE (arg1) == INTEGER_CST)
9154 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9155 return fold_build2 (swap_tree_comparison (code), type,
9156 TREE_OPERAND (arg0, 0),
9157 fold_build1 (BIT_NOT_EXPR, cmp_type,
9158 fold_convert (cmp_type, arg1)));
9161 return NULL_TREE;
9165 /* Subroutine of fold_binary. Optimize complex multiplications of the
9166 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9167 argument EXPR represents the expression "z" of type TYPE. */
9169 static tree
9170 fold_mult_zconjz (tree type, tree expr)
9172 tree itype = TREE_TYPE (type);
9173 tree rpart, ipart, tem;
9175 if (TREE_CODE (expr) == COMPLEX_EXPR)
9177 rpart = TREE_OPERAND (expr, 0);
9178 ipart = TREE_OPERAND (expr, 1);
9180 else if (TREE_CODE (expr) == COMPLEX_CST)
9182 rpart = TREE_REALPART (expr);
9183 ipart = TREE_IMAGPART (expr);
9185 else
9187 expr = save_expr (expr);
9188 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9189 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9192 rpart = save_expr (rpart);
9193 ipart = save_expr (ipart);
9194 tem = fold_build2 (PLUS_EXPR, itype,
9195 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9196 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9197 return fold_build2 (COMPLEX_EXPR, type, tem,
9198 fold_convert (itype, integer_zero_node));
9202 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9203 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9204 guarantees that P and N have the same least significant log2(M) bits.
9205 N is not otherwise constrained. In particular, N is not normalized to
9206 0 <= N < M as is common. In general, the precise value of P is unknown.
9207 M is chosen as large as possible such that constant N can be determined.
9209 Returns M and sets *RESIDUE to N. */
9211 static unsigned HOST_WIDE_INT
9212 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9214 enum tree_code code;
9216 *residue = 0;
9218 code = TREE_CODE (expr);
9219 if (code == ADDR_EXPR)
9221 expr = TREE_OPERAND (expr, 0);
9222 if (handled_component_p (expr))
9224 HOST_WIDE_INT bitsize, bitpos;
9225 tree offset;
9226 enum machine_mode mode;
9227 int unsignedp, volatilep;
9229 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9230 &mode, &unsignedp, &volatilep, false);
9231 *residue = bitpos / BITS_PER_UNIT;
9232 if (offset)
9234 if (TREE_CODE (offset) == INTEGER_CST)
9235 *residue += TREE_INT_CST_LOW (offset);
9236 else
9237 /* We don't handle more complicated offset expressions. */
9238 return 1;
9242 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9243 return DECL_ALIGN_UNIT (expr);
9245 else if (code == POINTER_PLUS_EXPR)
9247 tree op0, op1;
9248 unsigned HOST_WIDE_INT modulus;
9249 enum tree_code inner_code;
9251 op0 = TREE_OPERAND (expr, 0);
9252 STRIP_NOPS (op0);
9253 modulus = get_pointer_modulus_and_residue (op0, residue);
9255 op1 = TREE_OPERAND (expr, 1);
9256 STRIP_NOPS (op1);
9257 inner_code = TREE_CODE (op1);
9258 if (inner_code == INTEGER_CST)
9260 *residue += TREE_INT_CST_LOW (op1);
9261 return modulus;
9263 else if (inner_code == MULT_EXPR)
9265 op1 = TREE_OPERAND (op1, 1);
9266 if (TREE_CODE (op1) == INTEGER_CST)
9268 unsigned HOST_WIDE_INT align;
9270 /* Compute the greatest power-of-2 divisor of op1. */
9271 align = TREE_INT_CST_LOW (op1);
9272 align &= -align;
9274 /* If align is non-zero and less than *modulus, replace
9275 *modulus with align., If align is 0, then either op1 is 0
9276 or the greatest power-of-2 divisor of op1 doesn't fit in an
9277 unsigned HOST_WIDE_INT. In either case, no additional
9278 constraint is imposed. */
9279 if (align)
9280 modulus = MIN (modulus, align);
9282 return modulus;
9287 /* If we get here, we were unable to determine anything useful about the
9288 expression. */
9289 return 1;
9293 /* Fold a binary expression of code CODE and type TYPE with operands
9294 OP0 and OP1. Return the folded expression if folding is
9295 successful. Otherwise, return NULL_TREE. */
9297 tree
9298 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9300 enum tree_code_class kind = TREE_CODE_CLASS (code);
9301 tree arg0, arg1, tem;
9302 tree t1 = NULL_TREE;
9303 bool strict_overflow_p;
9305 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9306 && TREE_CODE_LENGTH (code) == 2
9307 && op0 != NULL_TREE
9308 && op1 != NULL_TREE);
9310 arg0 = op0;
9311 arg1 = op1;
9313 /* Strip any conversions that don't change the mode. This is
9314 safe for every expression, except for a comparison expression
9315 because its signedness is derived from its operands. So, in
9316 the latter case, only strip conversions that don't change the
9317 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9318 preserved.
9320 Note that this is done as an internal manipulation within the
9321 constant folder, in order to find the simplest representation
9322 of the arguments so that their form can be studied. In any
9323 cases, the appropriate type conversions should be put back in
9324 the tree that will get out of the constant folder. */
9326 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9328 STRIP_SIGN_NOPS (arg0);
9329 STRIP_SIGN_NOPS (arg1);
9331 else
9333 STRIP_NOPS (arg0);
9334 STRIP_NOPS (arg1);
9337 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9338 constant but we can't do arithmetic on them. */
9339 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9340 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9341 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9342 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9343 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9344 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9346 if (kind == tcc_binary)
9348 /* Make sure type and arg0 have the same saturating flag. */
9349 gcc_assert (TYPE_SATURATING (type)
9350 == TYPE_SATURATING (TREE_TYPE (arg0)));
9351 tem = const_binop (code, arg0, arg1, 0);
9353 else if (kind == tcc_comparison)
9354 tem = fold_relational_const (code, type, arg0, arg1);
9355 else
9356 tem = NULL_TREE;
9358 if (tem != NULL_TREE)
9360 if (TREE_TYPE (tem) != type)
9361 tem = fold_convert (type, tem);
9362 return tem;
9366 /* If this is a commutative operation, and ARG0 is a constant, move it
9367 to ARG1 to reduce the number of tests below. */
9368 if (commutative_tree_code (code)
9369 && tree_swap_operands_p (arg0, arg1, true))
9370 return fold_build2 (code, type, op1, op0);
9372 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9374 First check for cases where an arithmetic operation is applied to a
9375 compound, conditional, or comparison operation. Push the arithmetic
9376 operation inside the compound or conditional to see if any folding
9377 can then be done. Convert comparison to conditional for this purpose.
9378 The also optimizes non-constant cases that used to be done in
9379 expand_expr.
9381 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9382 one of the operands is a comparison and the other is a comparison, a
9383 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9384 code below would make the expression more complex. Change it to a
9385 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9386 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9388 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9389 || code == EQ_EXPR || code == NE_EXPR)
9390 && ((truth_value_p (TREE_CODE (arg0))
9391 && (truth_value_p (TREE_CODE (arg1))
9392 || (TREE_CODE (arg1) == BIT_AND_EXPR
9393 && integer_onep (TREE_OPERAND (arg1, 1)))))
9394 || (truth_value_p (TREE_CODE (arg1))
9395 && (truth_value_p (TREE_CODE (arg0))
9396 || (TREE_CODE (arg0) == BIT_AND_EXPR
9397 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9399 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9400 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9401 : TRUTH_XOR_EXPR,
9402 boolean_type_node,
9403 fold_convert (boolean_type_node, arg0),
9404 fold_convert (boolean_type_node, arg1));
9406 if (code == EQ_EXPR)
9407 tem = invert_truthvalue (tem);
9409 return fold_convert (type, tem);
9412 if (TREE_CODE_CLASS (code) == tcc_binary
9413 || TREE_CODE_CLASS (code) == tcc_comparison)
9415 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9416 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9417 fold_build2 (code, type,
9418 fold_convert (TREE_TYPE (op0),
9419 TREE_OPERAND (arg0, 1)),
9420 op1));
9421 if (TREE_CODE (arg1) == COMPOUND_EXPR
9422 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9423 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9424 fold_build2 (code, type, op0,
9425 fold_convert (TREE_TYPE (op1),
9426 TREE_OPERAND (arg1, 1))));
9428 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9430 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9431 arg0, arg1,
9432 /*cond_first_p=*/1);
9433 if (tem != NULL_TREE)
9434 return tem;
9437 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9439 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9440 arg1, arg0,
9441 /*cond_first_p=*/0);
9442 if (tem != NULL_TREE)
9443 return tem;
9447 switch (code)
9449 case POINTER_PLUS_EXPR:
9450 /* 0 +p index -> (type)index */
9451 if (integer_zerop (arg0))
9452 return non_lvalue (fold_convert (type, arg1));
9454 /* PTR +p 0 -> PTR */
9455 if (integer_zerop (arg1))
9456 return non_lvalue (fold_convert (type, arg0));
9458 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9459 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9460 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9461 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9462 fold_convert (sizetype, arg1),
9463 fold_convert (sizetype, arg0)));
9465 /* index +p PTR -> PTR +p index */
9466 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9467 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9468 return fold_build2 (POINTER_PLUS_EXPR, type,
9469 fold_convert (type, arg1),
9470 fold_convert (sizetype, arg0));
9472 /* (PTR +p B) +p A -> PTR +p (B + A) */
9473 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9475 tree inner;
9476 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9477 tree arg00 = TREE_OPERAND (arg0, 0);
9478 inner = fold_build2 (PLUS_EXPR, sizetype,
9479 arg01, fold_convert (sizetype, arg1));
9480 return fold_convert (type,
9481 fold_build2 (POINTER_PLUS_EXPR,
9482 TREE_TYPE (arg00), arg00, inner));
9485 /* PTR_CST +p CST -> CST1 */
9486 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9487 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9489 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9490 of the array. Loop optimizer sometimes produce this type of
9491 expressions. */
9492 if (TREE_CODE (arg0) == ADDR_EXPR)
9494 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9495 if (tem)
9496 return fold_convert (type, tem);
9499 return NULL_TREE;
9501 case PLUS_EXPR:
9502 /* PTR + INT -> (INT)(PTR p+ INT) */
9503 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9504 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9505 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9506 TREE_TYPE (arg0),
9507 arg0,
9508 fold_convert (sizetype, arg1)));
9509 /* INT + PTR -> (INT)(PTR p+ INT) */
9510 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9511 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9512 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9513 TREE_TYPE (arg1),
9514 arg1,
9515 fold_convert (sizetype, arg0)));
9516 /* A + (-B) -> A - B */
9517 if (TREE_CODE (arg1) == NEGATE_EXPR)
9518 return fold_build2 (MINUS_EXPR, type,
9519 fold_convert (type, arg0),
9520 fold_convert (type, TREE_OPERAND (arg1, 0)));
9521 /* (-A) + B -> B - A */
9522 if (TREE_CODE (arg0) == NEGATE_EXPR
9523 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9524 return fold_build2 (MINUS_EXPR, type,
9525 fold_convert (type, arg1),
9526 fold_convert (type, TREE_OPERAND (arg0, 0)));
9528 if (INTEGRAL_TYPE_P (type))
9530 /* Convert ~A + 1 to -A. */
9531 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9532 && integer_onep (arg1))
9533 return fold_build1 (NEGATE_EXPR, type,
9534 fold_convert (type, TREE_OPERAND (arg0, 0)));
9536 /* ~X + X is -1. */
9537 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9538 && !TYPE_OVERFLOW_TRAPS (type))
9540 tree tem = TREE_OPERAND (arg0, 0);
9542 STRIP_NOPS (tem);
9543 if (operand_equal_p (tem, arg1, 0))
9545 t1 = build_int_cst_type (type, -1);
9546 return omit_one_operand (type, t1, arg1);
9550 /* X + ~X is -1. */
9551 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9552 && !TYPE_OVERFLOW_TRAPS (type))
9554 tree tem = TREE_OPERAND (arg1, 0);
9556 STRIP_NOPS (tem);
9557 if (operand_equal_p (arg0, tem, 0))
9559 t1 = build_int_cst_type (type, -1);
9560 return omit_one_operand (type, t1, arg0);
9564 /* X + (X / CST) * -CST is X % CST. */
9565 if (TREE_CODE (arg1) == MULT_EXPR
9566 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9567 && operand_equal_p (arg0,
9568 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9570 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9571 tree cst1 = TREE_OPERAND (arg1, 1);
9572 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9573 if (sum && integer_zerop (sum))
9574 return fold_convert (type,
9575 fold_build2 (TRUNC_MOD_EXPR,
9576 TREE_TYPE (arg0), arg0, cst0));
9580 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9581 same or one. Make sure type is not saturating.
9582 fold_plusminus_mult_expr will re-associate. */
9583 if ((TREE_CODE (arg0) == MULT_EXPR
9584 || TREE_CODE (arg1) == MULT_EXPR)
9585 && !TYPE_SATURATING (type)
9586 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9588 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9589 if (tem)
9590 return tem;
9593 if (! FLOAT_TYPE_P (type))
9595 if (integer_zerop (arg1))
9596 return non_lvalue (fold_convert (type, arg0));
9598 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9599 with a constant, and the two constants have no bits in common,
9600 we should treat this as a BIT_IOR_EXPR since this may produce more
9601 simplifications. */
9602 if (TREE_CODE (arg0) == BIT_AND_EXPR
9603 && TREE_CODE (arg1) == BIT_AND_EXPR
9604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9605 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9606 && integer_zerop (const_binop (BIT_AND_EXPR,
9607 TREE_OPERAND (arg0, 1),
9608 TREE_OPERAND (arg1, 1), 0)))
9610 code = BIT_IOR_EXPR;
9611 goto bit_ior;
9614 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9615 (plus (plus (mult) (mult)) (foo)) so that we can
9616 take advantage of the factoring cases below. */
9617 if (((TREE_CODE (arg0) == PLUS_EXPR
9618 || TREE_CODE (arg0) == MINUS_EXPR)
9619 && TREE_CODE (arg1) == MULT_EXPR)
9620 || ((TREE_CODE (arg1) == PLUS_EXPR
9621 || TREE_CODE (arg1) == MINUS_EXPR)
9622 && TREE_CODE (arg0) == MULT_EXPR))
9624 tree parg0, parg1, parg, marg;
9625 enum tree_code pcode;
9627 if (TREE_CODE (arg1) == MULT_EXPR)
9628 parg = arg0, marg = arg1;
9629 else
9630 parg = arg1, marg = arg0;
9631 pcode = TREE_CODE (parg);
9632 parg0 = TREE_OPERAND (parg, 0);
9633 parg1 = TREE_OPERAND (parg, 1);
9634 STRIP_NOPS (parg0);
9635 STRIP_NOPS (parg1);
9637 if (TREE_CODE (parg0) == MULT_EXPR
9638 && TREE_CODE (parg1) != MULT_EXPR)
9639 return fold_build2 (pcode, type,
9640 fold_build2 (PLUS_EXPR, type,
9641 fold_convert (type, parg0),
9642 fold_convert (type, marg)),
9643 fold_convert (type, parg1));
9644 if (TREE_CODE (parg0) != MULT_EXPR
9645 && TREE_CODE (parg1) == MULT_EXPR)
9646 return fold_build2 (PLUS_EXPR, type,
9647 fold_convert (type, parg0),
9648 fold_build2 (pcode, type,
9649 fold_convert (type, marg),
9650 fold_convert (type,
9651 parg1)));
9654 else
9656 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9657 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9658 return non_lvalue (fold_convert (type, arg0));
9660 /* Likewise if the operands are reversed. */
9661 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9662 return non_lvalue (fold_convert (type, arg1));
9664 /* Convert X + -C into X - C. */
9665 if (TREE_CODE (arg1) == REAL_CST
9666 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9668 tem = fold_negate_const (arg1, type);
9669 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9670 return fold_build2 (MINUS_EXPR, type,
9671 fold_convert (type, arg0),
9672 fold_convert (type, tem));
9675 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9676 to __complex__ ( x, y ). This is not the same for SNaNs or
9677 if signed zeros are involved. */
9678 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9679 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9680 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9682 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9683 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9684 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9685 bool arg0rz = false, arg0iz = false;
9686 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9687 || (arg0i && (arg0iz = real_zerop (arg0i))))
9689 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9690 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9691 if (arg0rz && arg1i && real_zerop (arg1i))
9693 tree rp = arg1r ? arg1r
9694 : build1 (REALPART_EXPR, rtype, arg1);
9695 tree ip = arg0i ? arg0i
9696 : build1 (IMAGPART_EXPR, rtype, arg0);
9697 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9699 else if (arg0iz && arg1r && real_zerop (arg1r))
9701 tree rp = arg0r ? arg0r
9702 : build1 (REALPART_EXPR, rtype, arg0);
9703 tree ip = arg1i ? arg1i
9704 : build1 (IMAGPART_EXPR, rtype, arg1);
9705 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9710 if (flag_unsafe_math_optimizations
9711 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9712 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9713 && (tem = distribute_real_division (code, type, arg0, arg1)))
9714 return tem;
9716 /* Convert x+x into x*2.0. */
9717 if (operand_equal_p (arg0, arg1, 0)
9718 && SCALAR_FLOAT_TYPE_P (type))
9719 return fold_build2 (MULT_EXPR, type, arg0,
9720 build_real (type, dconst2));
9722 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9723 We associate floats only if the user has specified
9724 -fassociative-math. */
9725 if (flag_associative_math
9726 && TREE_CODE (arg1) == PLUS_EXPR
9727 && TREE_CODE (arg0) != MULT_EXPR)
9729 tree tree10 = TREE_OPERAND (arg1, 0);
9730 tree tree11 = TREE_OPERAND (arg1, 1);
9731 if (TREE_CODE (tree11) == MULT_EXPR
9732 && TREE_CODE (tree10) == MULT_EXPR)
9734 tree tree0;
9735 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9736 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9739 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9740 We associate floats only if the user has specified
9741 -fassociative-math. */
9742 if (flag_associative_math
9743 && TREE_CODE (arg0) == PLUS_EXPR
9744 && TREE_CODE (arg1) != MULT_EXPR)
9746 tree tree00 = TREE_OPERAND (arg0, 0);
9747 tree tree01 = TREE_OPERAND (arg0, 1);
9748 if (TREE_CODE (tree01) == MULT_EXPR
9749 && TREE_CODE (tree00) == MULT_EXPR)
9751 tree tree0;
9752 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9753 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9758 bit_rotate:
9759 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9760 is a rotate of A by C1 bits. */
9761 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9762 is a rotate of A by B bits. */
9764 enum tree_code code0, code1;
9765 tree rtype;
9766 code0 = TREE_CODE (arg0);
9767 code1 = TREE_CODE (arg1);
9768 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9769 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9770 && operand_equal_p (TREE_OPERAND (arg0, 0),
9771 TREE_OPERAND (arg1, 0), 0)
9772 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9773 TYPE_UNSIGNED (rtype))
9774 /* Only create rotates in complete modes. Other cases are not
9775 expanded properly. */
9776 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9778 tree tree01, tree11;
9779 enum tree_code code01, code11;
9781 tree01 = TREE_OPERAND (arg0, 1);
9782 tree11 = TREE_OPERAND (arg1, 1);
9783 STRIP_NOPS (tree01);
9784 STRIP_NOPS (tree11);
9785 code01 = TREE_CODE (tree01);
9786 code11 = TREE_CODE (tree11);
9787 if (code01 == INTEGER_CST
9788 && code11 == INTEGER_CST
9789 && TREE_INT_CST_HIGH (tree01) == 0
9790 && TREE_INT_CST_HIGH (tree11) == 0
9791 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9792 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9793 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9794 code0 == LSHIFT_EXPR ? tree01 : tree11);
9795 else if (code11 == MINUS_EXPR)
9797 tree tree110, tree111;
9798 tree110 = TREE_OPERAND (tree11, 0);
9799 tree111 = TREE_OPERAND (tree11, 1);
9800 STRIP_NOPS (tree110);
9801 STRIP_NOPS (tree111);
9802 if (TREE_CODE (tree110) == INTEGER_CST
9803 && 0 == compare_tree_int (tree110,
9804 TYPE_PRECISION
9805 (TREE_TYPE (TREE_OPERAND
9806 (arg0, 0))))
9807 && operand_equal_p (tree01, tree111, 0))
9808 return build2 ((code0 == LSHIFT_EXPR
9809 ? LROTATE_EXPR
9810 : RROTATE_EXPR),
9811 type, TREE_OPERAND (arg0, 0), tree01);
9813 else if (code01 == MINUS_EXPR)
9815 tree tree010, tree011;
9816 tree010 = TREE_OPERAND (tree01, 0);
9817 tree011 = TREE_OPERAND (tree01, 1);
9818 STRIP_NOPS (tree010);
9819 STRIP_NOPS (tree011);
9820 if (TREE_CODE (tree010) == INTEGER_CST
9821 && 0 == compare_tree_int (tree010,
9822 TYPE_PRECISION
9823 (TREE_TYPE (TREE_OPERAND
9824 (arg0, 0))))
9825 && operand_equal_p (tree11, tree011, 0))
9826 return build2 ((code0 != LSHIFT_EXPR
9827 ? LROTATE_EXPR
9828 : RROTATE_EXPR),
9829 type, TREE_OPERAND (arg0, 0), tree11);
9834 associate:
9835 /* In most languages, can't associate operations on floats through
9836 parentheses. Rather than remember where the parentheses were, we
9837 don't associate floats at all, unless the user has specified
9838 -fassociative-math.
9839 And, we need to make sure type is not saturating. */
9841 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9842 && !TYPE_SATURATING (type))
9844 tree var0, con0, lit0, minus_lit0;
9845 tree var1, con1, lit1, minus_lit1;
9846 bool ok = true;
9848 /* Split both trees into variables, constants, and literals. Then
9849 associate each group together, the constants with literals,
9850 then the result with variables. This increases the chances of
9851 literals being recombined later and of generating relocatable
9852 expressions for the sum of a constant and literal. */
9853 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9854 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9855 code == MINUS_EXPR);
9857 /* With undefined overflow we can only associate constants
9858 with one variable. */
9859 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9860 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9861 && var0 && var1)
9863 tree tmp0 = var0;
9864 tree tmp1 = var1;
9866 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9867 tmp0 = TREE_OPERAND (tmp0, 0);
9868 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9869 tmp1 = TREE_OPERAND (tmp1, 0);
9870 /* The only case we can still associate with two variables
9871 is if they are the same, modulo negation. */
9872 if (!operand_equal_p (tmp0, tmp1, 0))
9873 ok = false;
9876 /* Only do something if we found more than two objects. Otherwise,
9877 nothing has changed and we risk infinite recursion. */
9878 if (ok
9879 && (2 < ((var0 != 0) + (var1 != 0)
9880 + (con0 != 0) + (con1 != 0)
9881 + (lit0 != 0) + (lit1 != 0)
9882 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9884 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9885 if (code == MINUS_EXPR)
9886 code = PLUS_EXPR;
9888 var0 = associate_trees (var0, var1, code, type);
9889 con0 = associate_trees (con0, con1, code, type);
9890 lit0 = associate_trees (lit0, lit1, code, type);
9891 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9893 /* Preserve the MINUS_EXPR if the negative part of the literal is
9894 greater than the positive part. Otherwise, the multiplicative
9895 folding code (i.e extract_muldiv) may be fooled in case
9896 unsigned constants are subtracted, like in the following
9897 example: ((X*2 + 4) - 8U)/2. */
9898 if (minus_lit0 && lit0)
9900 if (TREE_CODE (lit0) == INTEGER_CST
9901 && TREE_CODE (minus_lit0) == INTEGER_CST
9902 && tree_int_cst_lt (lit0, minus_lit0))
9904 minus_lit0 = associate_trees (minus_lit0, lit0,
9905 MINUS_EXPR, type);
9906 lit0 = 0;
9908 else
9910 lit0 = associate_trees (lit0, minus_lit0,
9911 MINUS_EXPR, type);
9912 minus_lit0 = 0;
9915 if (minus_lit0)
9917 if (con0 == 0)
9918 return fold_convert (type,
9919 associate_trees (var0, minus_lit0,
9920 MINUS_EXPR, type));
9921 else
9923 con0 = associate_trees (con0, minus_lit0,
9924 MINUS_EXPR, type);
9925 return fold_convert (type,
9926 associate_trees (var0, con0,
9927 PLUS_EXPR, type));
9931 con0 = associate_trees (con0, lit0, code, type);
9932 return fold_convert (type, associate_trees (var0, con0,
9933 code, type));
9937 return NULL_TREE;
9939 case MINUS_EXPR:
9940 /* Pointer simplifications for subtraction, simple reassociations. */
9941 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9943 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9944 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9945 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9947 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9948 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9949 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9950 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9951 return fold_build2 (PLUS_EXPR, type,
9952 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9953 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9955 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9956 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9958 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9959 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9960 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9961 if (tmp)
9962 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9965 /* A - (-B) -> A + B */
9966 if (TREE_CODE (arg1) == NEGATE_EXPR)
9967 return fold_build2 (PLUS_EXPR, type, op0,
9968 fold_convert (type, TREE_OPERAND (arg1, 0)));
9969 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9970 if (TREE_CODE (arg0) == NEGATE_EXPR
9971 && (FLOAT_TYPE_P (type)
9972 || INTEGRAL_TYPE_P (type))
9973 && negate_expr_p (arg1)
9974 && reorder_operands_p (arg0, arg1))
9975 return fold_build2 (MINUS_EXPR, type,
9976 fold_convert (type, negate_expr (arg1)),
9977 fold_convert (type, TREE_OPERAND (arg0, 0)));
9978 /* Convert -A - 1 to ~A. */
9979 if (INTEGRAL_TYPE_P (type)
9980 && TREE_CODE (arg0) == NEGATE_EXPR
9981 && integer_onep (arg1)
9982 && !TYPE_OVERFLOW_TRAPS (type))
9983 return fold_build1 (BIT_NOT_EXPR, type,
9984 fold_convert (type, TREE_OPERAND (arg0, 0)));
9986 /* Convert -1 - A to ~A. */
9987 if (INTEGRAL_TYPE_P (type)
9988 && integer_all_onesp (arg0))
9989 return fold_build1 (BIT_NOT_EXPR, type, op1);
9992 /* X - (X / CST) * CST is X % CST. */
9993 if (INTEGRAL_TYPE_P (type)
9994 && TREE_CODE (arg1) == MULT_EXPR
9995 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9996 && operand_equal_p (arg0,
9997 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9998 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9999 TREE_OPERAND (arg1, 1), 0))
10000 return fold_convert (type,
10001 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10002 arg0, TREE_OPERAND (arg1, 1)));
10004 if (! FLOAT_TYPE_P (type))
10006 if (integer_zerop (arg0))
10007 return negate_expr (fold_convert (type, arg1));
10008 if (integer_zerop (arg1))
10009 return non_lvalue (fold_convert (type, arg0));
10011 /* Fold A - (A & B) into ~B & A. */
10012 if (!TREE_SIDE_EFFECTS (arg0)
10013 && TREE_CODE (arg1) == BIT_AND_EXPR)
10015 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10017 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10018 return fold_build2 (BIT_AND_EXPR, type,
10019 fold_build1 (BIT_NOT_EXPR, type, arg10),
10020 fold_convert (type, arg0));
10022 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10024 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10025 return fold_build2 (BIT_AND_EXPR, type,
10026 fold_build1 (BIT_NOT_EXPR, type, arg11),
10027 fold_convert (type, arg0));
10031 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10032 any power of 2 minus 1. */
10033 if (TREE_CODE (arg0) == BIT_AND_EXPR
10034 && TREE_CODE (arg1) == BIT_AND_EXPR
10035 && operand_equal_p (TREE_OPERAND (arg0, 0),
10036 TREE_OPERAND (arg1, 0), 0))
10038 tree mask0 = TREE_OPERAND (arg0, 1);
10039 tree mask1 = TREE_OPERAND (arg1, 1);
10040 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10042 if (operand_equal_p (tem, mask1, 0))
10044 tem = fold_build2 (BIT_XOR_EXPR, type,
10045 TREE_OPERAND (arg0, 0), mask1);
10046 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10051 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10052 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10053 return non_lvalue (fold_convert (type, arg0));
10055 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10056 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10057 (-ARG1 + ARG0) reduces to -ARG1. */
10058 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10059 return negate_expr (fold_convert (type, arg1));
10061 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10062 __complex__ ( x, -y ). This is not the same for SNaNs or if
10063 signed zeros are involved. */
10064 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10065 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10066 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10068 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10069 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10070 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10071 bool arg0rz = false, arg0iz = false;
10072 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10073 || (arg0i && (arg0iz = real_zerop (arg0i))))
10075 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10076 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10077 if (arg0rz && arg1i && real_zerop (arg1i))
10079 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10080 arg1r ? arg1r
10081 : build1 (REALPART_EXPR, rtype, arg1));
10082 tree ip = arg0i ? arg0i
10083 : build1 (IMAGPART_EXPR, rtype, arg0);
10084 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10086 else if (arg0iz && arg1r && real_zerop (arg1r))
10088 tree rp = arg0r ? arg0r
10089 : build1 (REALPART_EXPR, rtype, arg0);
10090 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10091 arg1i ? arg1i
10092 : build1 (IMAGPART_EXPR, rtype, arg1));
10093 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10098 /* Fold &x - &x. This can happen from &x.foo - &x.
10099 This is unsafe for certain floats even in non-IEEE formats.
10100 In IEEE, it is unsafe because it does wrong for NaNs.
10101 Also note that operand_equal_p is always false if an operand
10102 is volatile. */
10104 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10105 && operand_equal_p (arg0, arg1, 0))
10106 return fold_convert (type, integer_zero_node);
10108 /* A - B -> A + (-B) if B is easily negatable. */
10109 if (negate_expr_p (arg1)
10110 && ((FLOAT_TYPE_P (type)
10111 /* Avoid this transformation if B is a positive REAL_CST. */
10112 && (TREE_CODE (arg1) != REAL_CST
10113 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10114 || INTEGRAL_TYPE_P (type)))
10115 return fold_build2 (PLUS_EXPR, type,
10116 fold_convert (type, arg0),
10117 fold_convert (type, negate_expr (arg1)));
10119 /* Try folding difference of addresses. */
10121 HOST_WIDE_INT diff;
10123 if ((TREE_CODE (arg0) == ADDR_EXPR
10124 || TREE_CODE (arg1) == ADDR_EXPR)
10125 && ptr_difference_const (arg0, arg1, &diff))
10126 return build_int_cst_type (type, diff);
10129 /* Fold &a[i] - &a[j] to i-j. */
10130 if (TREE_CODE (arg0) == ADDR_EXPR
10131 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10132 && TREE_CODE (arg1) == ADDR_EXPR
10133 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10135 tree aref0 = TREE_OPERAND (arg0, 0);
10136 tree aref1 = TREE_OPERAND (arg1, 0);
10137 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10138 TREE_OPERAND (aref1, 0), 0))
10140 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10141 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10142 tree esz = array_ref_element_size (aref0);
10143 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10144 return fold_build2 (MULT_EXPR, type, diff,
10145 fold_convert (type, esz));
10150 if (flag_unsafe_math_optimizations
10151 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10152 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10153 && (tem = distribute_real_division (code, type, arg0, arg1)))
10154 return tem;
10156 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10157 same or one. Make sure type is not saturating.
10158 fold_plusminus_mult_expr will re-associate. */
10159 if ((TREE_CODE (arg0) == MULT_EXPR
10160 || TREE_CODE (arg1) == MULT_EXPR)
10161 && !TYPE_SATURATING (type)
10162 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10164 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10165 if (tem)
10166 return tem;
10169 goto associate;
10171 case MULT_EXPR:
10172 /* (-A) * (-B) -> A * B */
10173 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10174 return fold_build2 (MULT_EXPR, type,
10175 fold_convert (type, TREE_OPERAND (arg0, 0)),
10176 fold_convert (type, negate_expr (arg1)));
10177 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10178 return fold_build2 (MULT_EXPR, type,
10179 fold_convert (type, negate_expr (arg0)),
10180 fold_convert (type, TREE_OPERAND (arg1, 0)));
10182 if (! FLOAT_TYPE_P (type))
10184 if (integer_zerop (arg1))
10185 return omit_one_operand (type, arg1, arg0);
10186 if (integer_onep (arg1))
10187 return non_lvalue (fold_convert (type, arg0));
10188 /* Transform x * -1 into -x. Make sure to do the negation
10189 on the original operand with conversions not stripped
10190 because we can only strip non-sign-changing conversions. */
10191 if (integer_all_onesp (arg1))
10192 return fold_convert (type, negate_expr (op0));
10193 /* Transform x * -C into -x * C if x is easily negatable. */
10194 if (TREE_CODE (arg1) == INTEGER_CST
10195 && tree_int_cst_sgn (arg1) == -1
10196 && negate_expr_p (arg0)
10197 && (tem = negate_expr (arg1)) != arg1
10198 && !TREE_OVERFLOW (tem))
10199 return fold_build2 (MULT_EXPR, type,
10200 fold_convert (type, negate_expr (arg0)), tem);
10202 /* (a * (1 << b)) is (a << b) */
10203 if (TREE_CODE (arg1) == LSHIFT_EXPR
10204 && integer_onep (TREE_OPERAND (arg1, 0)))
10205 return fold_build2 (LSHIFT_EXPR, type, op0,
10206 TREE_OPERAND (arg1, 1));
10207 if (TREE_CODE (arg0) == LSHIFT_EXPR
10208 && integer_onep (TREE_OPERAND (arg0, 0)))
10209 return fold_build2 (LSHIFT_EXPR, type, op1,
10210 TREE_OPERAND (arg0, 1));
10212 /* (A + A) * C -> A * 2 * C */
10213 if (TREE_CODE (arg0) == PLUS_EXPR
10214 && TREE_CODE (arg1) == INTEGER_CST
10215 && operand_equal_p (TREE_OPERAND (arg0, 0),
10216 TREE_OPERAND (arg0, 1), 0))
10217 return fold_build2 (MULT_EXPR, type,
10218 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10219 TREE_OPERAND (arg0, 1)),
10220 fold_build2 (MULT_EXPR, type,
10221 build_int_cst (type, 2) , arg1));
10223 strict_overflow_p = false;
10224 if (TREE_CODE (arg1) == INTEGER_CST
10225 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10226 &strict_overflow_p)))
10228 if (strict_overflow_p)
10229 fold_overflow_warning (("assuming signed overflow does not "
10230 "occur when simplifying "
10231 "multiplication"),
10232 WARN_STRICT_OVERFLOW_MISC);
10233 return fold_convert (type, tem);
10236 /* Optimize z * conj(z) for integer complex numbers. */
10237 if (TREE_CODE (arg0) == CONJ_EXPR
10238 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10239 return fold_mult_zconjz (type, arg1);
10240 if (TREE_CODE (arg1) == CONJ_EXPR
10241 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10242 return fold_mult_zconjz (type, arg0);
10244 else
10246 /* Maybe fold x * 0 to 0. The expressions aren't the same
10247 when x is NaN, since x * 0 is also NaN. Nor are they the
10248 same in modes with signed zeros, since multiplying a
10249 negative value by 0 gives -0, not +0. */
10250 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10251 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10252 && real_zerop (arg1))
10253 return omit_one_operand (type, arg1, arg0);
10254 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10255 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10256 && real_onep (arg1))
10257 return non_lvalue (fold_convert (type, arg0));
10259 /* Transform x * -1.0 into -x. */
10260 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10261 && real_minus_onep (arg1))
10262 return fold_convert (type, negate_expr (arg0));
10264 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10265 the result for floating point types due to rounding so it is applied
10266 only if -fassociative-math was specify. */
10267 if (flag_associative_math
10268 && TREE_CODE (arg0) == RDIV_EXPR
10269 && TREE_CODE (arg1) == REAL_CST
10270 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10272 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10273 arg1, 0);
10274 if (tem)
10275 return fold_build2 (RDIV_EXPR, type, tem,
10276 TREE_OPERAND (arg0, 1));
10279 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10280 if (operand_equal_p (arg0, arg1, 0))
10282 tree tem = fold_strip_sign_ops (arg0);
10283 if (tem != NULL_TREE)
10285 tem = fold_convert (type, tem);
10286 return fold_build2 (MULT_EXPR, type, tem, tem);
10290 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10291 This is not the same for NaNs or if signed zeros are
10292 involved. */
10293 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10294 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10295 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10296 && TREE_CODE (arg1) == COMPLEX_CST
10297 && real_zerop (TREE_REALPART (arg1)))
10299 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10300 if (real_onep (TREE_IMAGPART (arg1)))
10301 return fold_build2 (COMPLEX_EXPR, type,
10302 negate_expr (fold_build1 (IMAGPART_EXPR,
10303 rtype, arg0)),
10304 fold_build1 (REALPART_EXPR, rtype, arg0));
10305 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10306 return fold_build2 (COMPLEX_EXPR, type,
10307 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10308 negate_expr (fold_build1 (REALPART_EXPR,
10309 rtype, arg0)));
10312 /* Optimize z * conj(z) for floating point complex numbers.
10313 Guarded by flag_unsafe_math_optimizations as non-finite
10314 imaginary components don't produce scalar results. */
10315 if (flag_unsafe_math_optimizations
10316 && TREE_CODE (arg0) == CONJ_EXPR
10317 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10318 return fold_mult_zconjz (type, arg1);
10319 if (flag_unsafe_math_optimizations
10320 && TREE_CODE (arg1) == CONJ_EXPR
10321 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10322 return fold_mult_zconjz (type, arg0);
10324 if (flag_unsafe_math_optimizations)
10326 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10327 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10329 /* Optimizations of root(...)*root(...). */
10330 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10332 tree rootfn, arg;
10333 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10334 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10336 /* Optimize sqrt(x)*sqrt(x) as x. */
10337 if (BUILTIN_SQRT_P (fcode0)
10338 && operand_equal_p (arg00, arg10, 0)
10339 && ! HONOR_SNANS (TYPE_MODE (type)))
10340 return arg00;
10342 /* Optimize root(x)*root(y) as root(x*y). */
10343 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10344 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10345 return build_call_expr (rootfn, 1, arg);
10348 /* Optimize expN(x)*expN(y) as expN(x+y). */
10349 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10351 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10352 tree arg = fold_build2 (PLUS_EXPR, type,
10353 CALL_EXPR_ARG (arg0, 0),
10354 CALL_EXPR_ARG (arg1, 0));
10355 return build_call_expr (expfn, 1, arg);
10358 /* Optimizations of pow(...)*pow(...). */
10359 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10360 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10361 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10363 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10364 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10365 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10366 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10368 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10369 if (operand_equal_p (arg01, arg11, 0))
10371 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10372 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10373 return build_call_expr (powfn, 2, arg, arg01);
10376 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10377 if (operand_equal_p (arg00, arg10, 0))
10379 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10380 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10381 return build_call_expr (powfn, 2, arg00, arg);
10385 /* Optimize tan(x)*cos(x) as sin(x). */
10386 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10387 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10388 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10389 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10390 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10391 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10392 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10393 CALL_EXPR_ARG (arg1, 0), 0))
10395 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10397 if (sinfn != NULL_TREE)
10398 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10401 /* Optimize x*pow(x,c) as pow(x,c+1). */
10402 if (fcode1 == BUILT_IN_POW
10403 || fcode1 == BUILT_IN_POWF
10404 || fcode1 == BUILT_IN_POWL)
10406 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10407 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10408 if (TREE_CODE (arg11) == REAL_CST
10409 && !TREE_OVERFLOW (arg11)
10410 && operand_equal_p (arg0, arg10, 0))
10412 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10413 REAL_VALUE_TYPE c;
10414 tree arg;
10416 c = TREE_REAL_CST (arg11);
10417 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10418 arg = build_real (type, c);
10419 return build_call_expr (powfn, 2, arg0, arg);
10423 /* Optimize pow(x,c)*x as pow(x,c+1). */
10424 if (fcode0 == BUILT_IN_POW
10425 || fcode0 == BUILT_IN_POWF
10426 || fcode0 == BUILT_IN_POWL)
10428 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10429 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10430 if (TREE_CODE (arg01) == REAL_CST
10431 && !TREE_OVERFLOW (arg01)
10432 && operand_equal_p (arg1, arg00, 0))
10434 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10435 REAL_VALUE_TYPE c;
10436 tree arg;
10438 c = TREE_REAL_CST (arg01);
10439 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10440 arg = build_real (type, c);
10441 return build_call_expr (powfn, 2, arg1, arg);
10445 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10446 if (optimize_function_for_speed_p (cfun)
10447 && operand_equal_p (arg0, arg1, 0))
10449 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10451 if (powfn)
10453 tree arg = build_real (type, dconst2);
10454 return build_call_expr (powfn, 2, arg0, arg);
10459 goto associate;
10461 case BIT_IOR_EXPR:
10462 bit_ior:
10463 if (integer_all_onesp (arg1))
10464 return omit_one_operand (type, arg1, arg0);
10465 if (integer_zerop (arg1))
10466 return non_lvalue (fold_convert (type, arg0));
10467 if (operand_equal_p (arg0, arg1, 0))
10468 return non_lvalue (fold_convert (type, arg0));
10470 /* ~X | X is -1. */
10471 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10472 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10474 t1 = fold_convert (type, integer_zero_node);
10475 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10476 return omit_one_operand (type, t1, arg1);
10479 /* X | ~X is -1. */
10480 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10481 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10483 t1 = fold_convert (type, integer_zero_node);
10484 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10485 return omit_one_operand (type, t1, arg0);
10488 /* Canonicalize (X & C1) | C2. */
10489 if (TREE_CODE (arg0) == BIT_AND_EXPR
10490 && TREE_CODE (arg1) == INTEGER_CST
10491 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10493 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10494 int width = TYPE_PRECISION (type), w;
10495 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10496 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10497 hi2 = TREE_INT_CST_HIGH (arg1);
10498 lo2 = TREE_INT_CST_LOW (arg1);
10500 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10501 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10502 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10504 if (width > HOST_BITS_PER_WIDE_INT)
10506 mhi = (unsigned HOST_WIDE_INT) -1
10507 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10508 mlo = -1;
10510 else
10512 mhi = 0;
10513 mlo = (unsigned HOST_WIDE_INT) -1
10514 >> (HOST_BITS_PER_WIDE_INT - width);
10517 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10518 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10519 return fold_build2 (BIT_IOR_EXPR, type,
10520 TREE_OPERAND (arg0, 0), arg1);
10522 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10523 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10524 mode which allows further optimizations. */
10525 hi1 &= mhi;
10526 lo1 &= mlo;
10527 hi2 &= mhi;
10528 lo2 &= mlo;
10529 hi3 = hi1 & ~hi2;
10530 lo3 = lo1 & ~lo2;
10531 for (w = BITS_PER_UNIT;
10532 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10533 w <<= 1)
10535 unsigned HOST_WIDE_INT mask
10536 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10537 if (((lo1 | lo2) & mask) == mask
10538 && (lo1 & ~mask) == 0 && hi1 == 0)
10540 hi3 = 0;
10541 lo3 = mask;
10542 break;
10545 if (hi3 != hi1 || lo3 != lo1)
10546 return fold_build2 (BIT_IOR_EXPR, type,
10547 fold_build2 (BIT_AND_EXPR, type,
10548 TREE_OPERAND (arg0, 0),
10549 build_int_cst_wide (type,
10550 lo3, hi3)),
10551 arg1);
10554 /* (X & Y) | Y is (X, Y). */
10555 if (TREE_CODE (arg0) == BIT_AND_EXPR
10556 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10557 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10558 /* (X & Y) | X is (Y, X). */
10559 if (TREE_CODE (arg0) == BIT_AND_EXPR
10560 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10561 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10562 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10563 /* X | (X & Y) is (Y, X). */
10564 if (TREE_CODE (arg1) == BIT_AND_EXPR
10565 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10566 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10567 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10568 /* X | (Y & X) is (Y, X). */
10569 if (TREE_CODE (arg1) == BIT_AND_EXPR
10570 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10571 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10572 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10574 t1 = distribute_bit_expr (code, type, arg0, arg1);
10575 if (t1 != NULL_TREE)
10576 return t1;
10578 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10580 This results in more efficient code for machines without a NAND
10581 instruction. Combine will canonicalize to the first form
10582 which will allow use of NAND instructions provided by the
10583 backend if they exist. */
10584 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10585 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10587 return fold_build1 (BIT_NOT_EXPR, type,
10588 build2 (BIT_AND_EXPR, type,
10589 fold_convert (type,
10590 TREE_OPERAND (arg0, 0)),
10591 fold_convert (type,
10592 TREE_OPERAND (arg1, 0))));
10595 /* See if this can be simplified into a rotate first. If that
10596 is unsuccessful continue in the association code. */
10597 goto bit_rotate;
10599 case BIT_XOR_EXPR:
10600 if (integer_zerop (arg1))
10601 return non_lvalue (fold_convert (type, arg0));
10602 if (integer_all_onesp (arg1))
10603 return fold_build1 (BIT_NOT_EXPR, type, op0);
10604 if (operand_equal_p (arg0, arg1, 0))
10605 return omit_one_operand (type, integer_zero_node, arg0);
10607 /* ~X ^ X is -1. */
10608 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10609 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10611 t1 = fold_convert (type, integer_zero_node);
10612 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10613 return omit_one_operand (type, t1, arg1);
10616 /* X ^ ~X is -1. */
10617 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10618 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10620 t1 = fold_convert (type, integer_zero_node);
10621 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10622 return omit_one_operand (type, t1, arg0);
10625 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10626 with a constant, and the two constants have no bits in common,
10627 we should treat this as a BIT_IOR_EXPR since this may produce more
10628 simplifications. */
10629 if (TREE_CODE (arg0) == BIT_AND_EXPR
10630 && TREE_CODE (arg1) == BIT_AND_EXPR
10631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10632 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10633 && integer_zerop (const_binop (BIT_AND_EXPR,
10634 TREE_OPERAND (arg0, 1),
10635 TREE_OPERAND (arg1, 1), 0)))
10637 code = BIT_IOR_EXPR;
10638 goto bit_ior;
10641 /* (X | Y) ^ X -> Y & ~ X*/
10642 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10643 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10645 tree t2 = TREE_OPERAND (arg0, 1);
10646 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10647 arg1);
10648 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10649 fold_convert (type, t1));
10650 return t1;
10653 /* (Y | X) ^ X -> Y & ~ X*/
10654 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10655 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10657 tree t2 = TREE_OPERAND (arg0, 0);
10658 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10659 arg1);
10660 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10661 fold_convert (type, t1));
10662 return t1;
10665 /* X ^ (X | Y) -> Y & ~ X*/
10666 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10667 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10669 tree t2 = TREE_OPERAND (arg1, 1);
10670 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10671 arg0);
10672 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10673 fold_convert (type, t1));
10674 return t1;
10677 /* X ^ (Y | X) -> Y & ~ X*/
10678 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10679 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10681 tree t2 = TREE_OPERAND (arg1, 0);
10682 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10683 arg0);
10684 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10685 fold_convert (type, t1));
10686 return t1;
10689 /* Convert ~X ^ ~Y to X ^ Y. */
10690 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10691 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10692 return fold_build2 (code, type,
10693 fold_convert (type, TREE_OPERAND (arg0, 0)),
10694 fold_convert (type, TREE_OPERAND (arg1, 0)));
10696 /* Convert ~X ^ C to X ^ ~C. */
10697 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10698 && TREE_CODE (arg1) == INTEGER_CST)
10699 return fold_build2 (code, type,
10700 fold_convert (type, TREE_OPERAND (arg0, 0)),
10701 fold_build1 (BIT_NOT_EXPR, type, arg1));
10703 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10704 if (TREE_CODE (arg0) == BIT_AND_EXPR
10705 && integer_onep (TREE_OPERAND (arg0, 1))
10706 && integer_onep (arg1))
10707 return fold_build2 (EQ_EXPR, type, arg0,
10708 build_int_cst (TREE_TYPE (arg0), 0));
10710 /* Fold (X & Y) ^ Y as ~X & Y. */
10711 if (TREE_CODE (arg0) == BIT_AND_EXPR
10712 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10714 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10715 return fold_build2 (BIT_AND_EXPR, type,
10716 fold_build1 (BIT_NOT_EXPR, type, tem),
10717 fold_convert (type, arg1));
10719 /* Fold (X & Y) ^ X as ~Y & X. */
10720 if (TREE_CODE (arg0) == BIT_AND_EXPR
10721 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10722 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10724 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10725 return fold_build2 (BIT_AND_EXPR, type,
10726 fold_build1 (BIT_NOT_EXPR, type, tem),
10727 fold_convert (type, arg1));
10729 /* Fold X ^ (X & Y) as X & ~Y. */
10730 if (TREE_CODE (arg1) == BIT_AND_EXPR
10731 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10733 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10734 return fold_build2 (BIT_AND_EXPR, type,
10735 fold_convert (type, arg0),
10736 fold_build1 (BIT_NOT_EXPR, type, tem));
10738 /* Fold X ^ (Y & X) as ~Y & X. */
10739 if (TREE_CODE (arg1) == BIT_AND_EXPR
10740 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10741 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10743 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10744 return fold_build2 (BIT_AND_EXPR, type,
10745 fold_build1 (BIT_NOT_EXPR, type, tem),
10746 fold_convert (type, arg0));
10749 /* See if this can be simplified into a rotate first. If that
10750 is unsuccessful continue in the association code. */
10751 goto bit_rotate;
10753 case BIT_AND_EXPR:
10754 if (integer_all_onesp (arg1))
10755 return non_lvalue (fold_convert (type, arg0));
10756 if (integer_zerop (arg1))
10757 return omit_one_operand (type, arg1, arg0);
10758 if (operand_equal_p (arg0, arg1, 0))
10759 return non_lvalue (fold_convert (type, arg0));
10761 /* ~X & X is always zero. */
10762 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10763 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10764 return omit_one_operand (type, integer_zero_node, arg1);
10766 /* X & ~X is always zero. */
10767 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10768 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10769 return omit_one_operand (type, integer_zero_node, arg0);
10771 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10772 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10773 && TREE_CODE (arg1) == INTEGER_CST
10774 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10776 tree tmp1 = fold_convert (type, arg1);
10777 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
10778 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
10779 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
10780 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
10781 return fold_convert (type,
10782 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
10785 /* (X | Y) & Y is (X, Y). */
10786 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10787 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10788 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10789 /* (X | Y) & X is (Y, X). */
10790 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10791 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10792 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10793 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10794 /* X & (X | Y) is (Y, X). */
10795 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10796 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10797 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10798 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10799 /* X & (Y | X) is (Y, X). */
10800 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10801 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10802 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10803 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10805 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10806 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10807 && integer_onep (TREE_OPERAND (arg0, 1))
10808 && integer_onep (arg1))
10810 tem = TREE_OPERAND (arg0, 0);
10811 return fold_build2 (EQ_EXPR, type,
10812 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10813 build_int_cst (TREE_TYPE (tem), 1)),
10814 build_int_cst (TREE_TYPE (tem), 0));
10816 /* Fold ~X & 1 as (X & 1) == 0. */
10817 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10818 && integer_onep (arg1))
10820 tem = TREE_OPERAND (arg0, 0);
10821 return fold_build2 (EQ_EXPR, type,
10822 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10823 build_int_cst (TREE_TYPE (tem), 1)),
10824 build_int_cst (TREE_TYPE (tem), 0));
10827 /* Fold (X ^ Y) & Y as ~X & Y. */
10828 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10829 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10831 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10832 return fold_build2 (BIT_AND_EXPR, type,
10833 fold_build1 (BIT_NOT_EXPR, type, tem),
10834 fold_convert (type, arg1));
10836 /* Fold (X ^ Y) & X as ~Y & X. */
10837 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10839 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10841 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10842 return fold_build2 (BIT_AND_EXPR, type,
10843 fold_build1 (BIT_NOT_EXPR, type, tem),
10844 fold_convert (type, arg1));
10846 /* Fold X & (X ^ Y) as X & ~Y. */
10847 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10848 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10850 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10851 return fold_build2 (BIT_AND_EXPR, type,
10852 fold_convert (type, arg0),
10853 fold_build1 (BIT_NOT_EXPR, type, tem));
10855 /* Fold X & (Y ^ X) as ~Y & X. */
10856 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10857 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10858 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10860 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10861 return fold_build2 (BIT_AND_EXPR, type,
10862 fold_build1 (BIT_NOT_EXPR, type, tem),
10863 fold_convert (type, arg0));
10866 t1 = distribute_bit_expr (code, type, arg0, arg1);
10867 if (t1 != NULL_TREE)
10868 return t1;
10869 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10870 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10871 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10873 unsigned int prec
10874 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10876 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10877 && (~TREE_INT_CST_LOW (arg1)
10878 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10879 return fold_convert (type, TREE_OPERAND (arg0, 0));
10882 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10884 This results in more efficient code for machines without a NOR
10885 instruction. Combine will canonicalize to the first form
10886 which will allow use of NOR instructions provided by the
10887 backend if they exist. */
10888 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10889 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10891 return fold_build1 (BIT_NOT_EXPR, type,
10892 build2 (BIT_IOR_EXPR, type,
10893 fold_convert (type,
10894 TREE_OPERAND (arg0, 0)),
10895 fold_convert (type,
10896 TREE_OPERAND (arg1, 0))));
10899 /* If arg0 is derived from the address of an object or function, we may
10900 be able to fold this expression using the object or function's
10901 alignment. */
10902 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10904 unsigned HOST_WIDE_INT modulus, residue;
10905 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10907 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10909 /* This works because modulus is a power of 2. If this weren't the
10910 case, we'd have to replace it by its greatest power-of-2
10911 divisor: modulus & -modulus. */
10912 if (low < modulus)
10913 return build_int_cst (type, residue & low);
10916 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10917 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10918 if the new mask might be further optimized. */
10919 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10920 || TREE_CODE (arg0) == RSHIFT_EXPR)
10921 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10922 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10923 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10924 < TYPE_PRECISION (TREE_TYPE (arg0))
10925 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10926 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10928 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10929 unsigned HOST_WIDE_INT mask
10930 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10931 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10932 tree shift_type = TREE_TYPE (arg0);
10934 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10935 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10936 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10937 && TYPE_PRECISION (TREE_TYPE (arg0))
10938 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10940 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10941 tree arg00 = TREE_OPERAND (arg0, 0);
10942 /* See if more bits can be proven as zero because of
10943 zero extension. */
10944 if (TREE_CODE (arg00) == NOP_EXPR
10945 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10947 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10948 if (TYPE_PRECISION (inner_type)
10949 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10950 && TYPE_PRECISION (inner_type) < prec)
10952 prec = TYPE_PRECISION (inner_type);
10953 /* See if we can shorten the right shift. */
10954 if (shiftc < prec)
10955 shift_type = inner_type;
10958 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10959 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10960 zerobits <<= prec - shiftc;
10961 /* For arithmetic shift if sign bit could be set, zerobits
10962 can contain actually sign bits, so no transformation is
10963 possible, unless MASK masks them all away. In that
10964 case the shift needs to be converted into logical shift. */
10965 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10966 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10968 if ((mask & zerobits) == 0)
10969 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10970 else
10971 zerobits = 0;
10975 /* ((X << 16) & 0xff00) is (X, 0). */
10976 if ((mask & zerobits) == mask)
10977 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10979 newmask = mask | zerobits;
10980 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10982 unsigned int prec;
10984 /* Only do the transformation if NEWMASK is some integer
10985 mode's mask. */
10986 for (prec = BITS_PER_UNIT;
10987 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10988 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10989 break;
10990 if (prec < HOST_BITS_PER_WIDE_INT
10991 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10993 if (shift_type != TREE_TYPE (arg0))
10995 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10996 fold_convert (shift_type,
10997 TREE_OPERAND (arg0, 0)),
10998 TREE_OPERAND (arg0, 1));
10999 tem = fold_convert (type, tem);
11001 else
11002 tem = op0;
11003 return fold_build2 (BIT_AND_EXPR, type, tem,
11004 build_int_cst_type (TREE_TYPE (op1),
11005 newmask));
11010 goto associate;
11012 case RDIV_EXPR:
11013 /* Don't touch a floating-point divide by zero unless the mode
11014 of the constant can represent infinity. */
11015 if (TREE_CODE (arg1) == REAL_CST
11016 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11017 && real_zerop (arg1))
11018 return NULL_TREE;
11020 /* Optimize A / A to 1.0 if we don't care about
11021 NaNs or Infinities. Skip the transformation
11022 for non-real operands. */
11023 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11024 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11025 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11026 && operand_equal_p (arg0, arg1, 0))
11028 tree r = build_real (TREE_TYPE (arg0), dconst1);
11030 return omit_two_operands (type, r, arg0, arg1);
11033 /* The complex version of the above A / A optimization. */
11034 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11035 && operand_equal_p (arg0, arg1, 0))
11037 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11038 if (! HONOR_NANS (TYPE_MODE (elem_type))
11039 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11041 tree r = build_real (elem_type, dconst1);
11042 /* omit_two_operands will call fold_convert for us. */
11043 return omit_two_operands (type, r, arg0, arg1);
11047 /* (-A) / (-B) -> A / B */
11048 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11049 return fold_build2 (RDIV_EXPR, type,
11050 TREE_OPERAND (arg0, 0),
11051 negate_expr (arg1));
11052 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11053 return fold_build2 (RDIV_EXPR, type,
11054 negate_expr (arg0),
11055 TREE_OPERAND (arg1, 0));
11057 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11058 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11059 && real_onep (arg1))
11060 return non_lvalue (fold_convert (type, arg0));
11062 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11063 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11064 && real_minus_onep (arg1))
11065 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11067 /* If ARG1 is a constant, we can convert this to a multiply by the
11068 reciprocal. This does not have the same rounding properties,
11069 so only do this if -freciprocal-math. We can actually
11070 always safely do it if ARG1 is a power of two, but it's hard to
11071 tell if it is or not in a portable manner. */
11072 if (TREE_CODE (arg1) == REAL_CST)
11074 if (flag_reciprocal_math
11075 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11076 arg1, 0)))
11077 return fold_build2 (MULT_EXPR, type, arg0, tem);
11078 /* Find the reciprocal if optimizing and the result is exact. */
11079 if (optimize)
11081 REAL_VALUE_TYPE r;
11082 r = TREE_REAL_CST (arg1);
11083 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11085 tem = build_real (type, r);
11086 return fold_build2 (MULT_EXPR, type,
11087 fold_convert (type, arg0), tem);
11091 /* Convert A/B/C to A/(B*C). */
11092 if (flag_reciprocal_math
11093 && TREE_CODE (arg0) == RDIV_EXPR)
11094 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11095 fold_build2 (MULT_EXPR, type,
11096 TREE_OPERAND (arg0, 1), arg1));
11098 /* Convert A/(B/C) to (A/B)*C. */
11099 if (flag_reciprocal_math
11100 && TREE_CODE (arg1) == RDIV_EXPR)
11101 return fold_build2 (MULT_EXPR, type,
11102 fold_build2 (RDIV_EXPR, type, arg0,
11103 TREE_OPERAND (arg1, 0)),
11104 TREE_OPERAND (arg1, 1));
11106 /* Convert C1/(X*C2) into (C1/C2)/X. */
11107 if (flag_reciprocal_math
11108 && TREE_CODE (arg1) == MULT_EXPR
11109 && TREE_CODE (arg0) == REAL_CST
11110 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11112 tree tem = const_binop (RDIV_EXPR, arg0,
11113 TREE_OPERAND (arg1, 1), 0);
11114 if (tem)
11115 return fold_build2 (RDIV_EXPR, type, tem,
11116 TREE_OPERAND (arg1, 0));
11119 if (flag_unsafe_math_optimizations)
11121 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11122 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11124 /* Optimize sin(x)/cos(x) as tan(x). */
11125 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11126 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11127 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11128 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11129 CALL_EXPR_ARG (arg1, 0), 0))
11131 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11133 if (tanfn != NULL_TREE)
11134 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11137 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11138 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11139 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11140 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11141 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11142 CALL_EXPR_ARG (arg1, 0), 0))
11144 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11146 if (tanfn != NULL_TREE)
11148 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11149 return fold_build2 (RDIV_EXPR, type,
11150 build_real (type, dconst1), tmp);
11154 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11155 NaNs or Infinities. */
11156 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11157 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11158 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11160 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11161 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11163 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11164 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11165 && operand_equal_p (arg00, arg01, 0))
11167 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11169 if (cosfn != NULL_TREE)
11170 return build_call_expr (cosfn, 1, arg00);
11174 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11175 NaNs or Infinities. */
11176 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11177 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11178 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11180 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11181 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11183 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11184 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11185 && operand_equal_p (arg00, arg01, 0))
11187 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11189 if (cosfn != NULL_TREE)
11191 tree tmp = build_call_expr (cosfn, 1, arg00);
11192 return fold_build2 (RDIV_EXPR, type,
11193 build_real (type, dconst1),
11194 tmp);
11199 /* Optimize pow(x,c)/x as pow(x,c-1). */
11200 if (fcode0 == BUILT_IN_POW
11201 || fcode0 == BUILT_IN_POWF
11202 || fcode0 == BUILT_IN_POWL)
11204 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11205 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11206 if (TREE_CODE (arg01) == REAL_CST
11207 && !TREE_OVERFLOW (arg01)
11208 && operand_equal_p (arg1, arg00, 0))
11210 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11211 REAL_VALUE_TYPE c;
11212 tree arg;
11214 c = TREE_REAL_CST (arg01);
11215 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11216 arg = build_real (type, c);
11217 return build_call_expr (powfn, 2, arg1, arg);
11221 /* Optimize a/root(b/c) into a*root(c/b). */
11222 if (BUILTIN_ROOT_P (fcode1))
11224 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11226 if (TREE_CODE (rootarg) == RDIV_EXPR)
11228 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11229 tree b = TREE_OPERAND (rootarg, 0);
11230 tree c = TREE_OPERAND (rootarg, 1);
11232 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11234 tmp = build_call_expr (rootfn, 1, tmp);
11235 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11239 /* Optimize x/expN(y) into x*expN(-y). */
11240 if (BUILTIN_EXPONENT_P (fcode1))
11242 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11243 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11244 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11245 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11248 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11249 if (fcode1 == BUILT_IN_POW
11250 || fcode1 == BUILT_IN_POWF
11251 || fcode1 == BUILT_IN_POWL)
11253 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11254 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11255 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11256 tree neg11 = fold_convert (type, negate_expr (arg11));
11257 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11258 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11261 return NULL_TREE;
11263 case TRUNC_DIV_EXPR:
11264 case FLOOR_DIV_EXPR:
11265 /* Simplify A / (B << N) where A and B are positive and B is
11266 a power of 2, to A >> (N + log2(B)). */
11267 strict_overflow_p = false;
11268 if (TREE_CODE (arg1) == LSHIFT_EXPR
11269 && (TYPE_UNSIGNED (type)
11270 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11272 tree sval = TREE_OPERAND (arg1, 0);
11273 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11275 tree sh_cnt = TREE_OPERAND (arg1, 1);
11276 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11278 if (strict_overflow_p)
11279 fold_overflow_warning (("assuming signed overflow does not "
11280 "occur when simplifying A / (B << N)"),
11281 WARN_STRICT_OVERFLOW_MISC);
11283 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11284 sh_cnt, build_int_cst (NULL_TREE, pow2));
11285 return fold_build2 (RSHIFT_EXPR, type,
11286 fold_convert (type, arg0), sh_cnt);
11290 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11291 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11292 if (INTEGRAL_TYPE_P (type)
11293 && TYPE_UNSIGNED (type)
11294 && code == FLOOR_DIV_EXPR)
11295 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11297 /* Fall thru */
11299 case ROUND_DIV_EXPR:
11300 case CEIL_DIV_EXPR:
11301 case EXACT_DIV_EXPR:
11302 if (integer_onep (arg1))
11303 return non_lvalue (fold_convert (type, arg0));
11304 if (integer_zerop (arg1))
11305 return NULL_TREE;
11306 /* X / -1 is -X. */
11307 if (!TYPE_UNSIGNED (type)
11308 && TREE_CODE (arg1) == INTEGER_CST
11309 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11310 && TREE_INT_CST_HIGH (arg1) == -1)
11311 return fold_convert (type, negate_expr (arg0));
11313 /* Convert -A / -B to A / B when the type is signed and overflow is
11314 undefined. */
11315 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11316 && TREE_CODE (arg0) == NEGATE_EXPR
11317 && negate_expr_p (arg1))
11319 if (INTEGRAL_TYPE_P (type))
11320 fold_overflow_warning (("assuming signed overflow does not occur "
11321 "when distributing negation across "
11322 "division"),
11323 WARN_STRICT_OVERFLOW_MISC);
11324 return fold_build2 (code, type,
11325 fold_convert (type, TREE_OPERAND (arg0, 0)),
11326 negate_expr (arg1));
11328 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11329 && TREE_CODE (arg1) == NEGATE_EXPR
11330 && negate_expr_p (arg0))
11332 if (INTEGRAL_TYPE_P (type))
11333 fold_overflow_warning (("assuming signed overflow does not occur "
11334 "when distributing negation across "
11335 "division"),
11336 WARN_STRICT_OVERFLOW_MISC);
11337 return fold_build2 (code, type, negate_expr (arg0),
11338 TREE_OPERAND (arg1, 0));
11341 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11342 operation, EXACT_DIV_EXPR.
11344 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11345 At one time others generated faster code, it's not clear if they do
11346 after the last round to changes to the DIV code in expmed.c. */
11347 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11348 && multiple_of_p (type, arg0, arg1))
11349 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11351 strict_overflow_p = false;
11352 if (TREE_CODE (arg1) == INTEGER_CST
11353 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11354 &strict_overflow_p)))
11356 if (strict_overflow_p)
11357 fold_overflow_warning (("assuming signed overflow does not occur "
11358 "when simplifying division"),
11359 WARN_STRICT_OVERFLOW_MISC);
11360 return fold_convert (type, tem);
11363 return NULL_TREE;
11365 case CEIL_MOD_EXPR:
11366 case FLOOR_MOD_EXPR:
11367 case ROUND_MOD_EXPR:
11368 case TRUNC_MOD_EXPR:
11369 /* X % 1 is always zero, but be sure to preserve any side
11370 effects in X. */
11371 if (integer_onep (arg1))
11372 return omit_one_operand (type, integer_zero_node, arg0);
11374 /* X % 0, return X % 0 unchanged so that we can get the
11375 proper warnings and errors. */
11376 if (integer_zerop (arg1))
11377 return NULL_TREE;
11379 /* 0 % X is always zero, but be sure to preserve any side
11380 effects in X. Place this after checking for X == 0. */
11381 if (integer_zerop (arg0))
11382 return omit_one_operand (type, integer_zero_node, arg1);
11384 /* X % -1 is zero. */
11385 if (!TYPE_UNSIGNED (type)
11386 && TREE_CODE (arg1) == INTEGER_CST
11387 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11388 && TREE_INT_CST_HIGH (arg1) == -1)
11389 return omit_one_operand (type, integer_zero_node, arg0);
11391 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11392 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11393 strict_overflow_p = false;
11394 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11395 && (TYPE_UNSIGNED (type)
11396 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11398 tree c = arg1;
11399 /* Also optimize A % (C << N) where C is a power of 2,
11400 to A & ((C << N) - 1). */
11401 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11402 c = TREE_OPERAND (arg1, 0);
11404 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11406 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11407 build_int_cst (TREE_TYPE (arg1), 1));
11408 if (strict_overflow_p)
11409 fold_overflow_warning (("assuming signed overflow does not "
11410 "occur when simplifying "
11411 "X % (power of two)"),
11412 WARN_STRICT_OVERFLOW_MISC);
11413 return fold_build2 (BIT_AND_EXPR, type,
11414 fold_convert (type, arg0),
11415 fold_convert (type, mask));
11419 /* X % -C is the same as X % C. */
11420 if (code == TRUNC_MOD_EXPR
11421 && !TYPE_UNSIGNED (type)
11422 && TREE_CODE (arg1) == INTEGER_CST
11423 && !TREE_OVERFLOW (arg1)
11424 && TREE_INT_CST_HIGH (arg1) < 0
11425 && !TYPE_OVERFLOW_TRAPS (type)
11426 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11427 && !sign_bit_p (arg1, arg1))
11428 return fold_build2 (code, type, fold_convert (type, arg0),
11429 fold_convert (type, negate_expr (arg1)));
11431 /* X % -Y is the same as X % Y. */
11432 if (code == TRUNC_MOD_EXPR
11433 && !TYPE_UNSIGNED (type)
11434 && TREE_CODE (arg1) == NEGATE_EXPR
11435 && !TYPE_OVERFLOW_TRAPS (type))
11436 return fold_build2 (code, type, fold_convert (type, arg0),
11437 fold_convert (type, TREE_OPERAND (arg1, 0)));
11439 if (TREE_CODE (arg1) == INTEGER_CST
11440 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11441 &strict_overflow_p)))
11443 if (strict_overflow_p)
11444 fold_overflow_warning (("assuming signed overflow does not occur "
11445 "when simplifying modulus"),
11446 WARN_STRICT_OVERFLOW_MISC);
11447 return fold_convert (type, tem);
11450 return NULL_TREE;
11452 case LROTATE_EXPR:
11453 case RROTATE_EXPR:
11454 if (integer_all_onesp (arg0))
11455 return omit_one_operand (type, arg0, arg1);
11456 goto shift;
11458 case RSHIFT_EXPR:
11459 /* Optimize -1 >> x for arithmetic right shifts. */
11460 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11461 return omit_one_operand (type, arg0, arg1);
11462 /* ... fall through ... */
11464 case LSHIFT_EXPR:
11465 shift:
11466 if (integer_zerop (arg1))
11467 return non_lvalue (fold_convert (type, arg0));
11468 if (integer_zerop (arg0))
11469 return omit_one_operand (type, arg0, arg1);
11471 /* Since negative shift count is not well-defined,
11472 don't try to compute it in the compiler. */
11473 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11474 return NULL_TREE;
11476 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11477 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11478 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11479 && host_integerp (TREE_OPERAND (arg0, 1), false)
11480 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11482 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11483 + TREE_INT_CST_LOW (arg1));
11485 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11486 being well defined. */
11487 if (low >= TYPE_PRECISION (type))
11489 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11490 low = low % TYPE_PRECISION (type);
11491 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11492 return build_int_cst (type, 0);
11493 else
11494 low = TYPE_PRECISION (type) - 1;
11497 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11498 build_int_cst (type, low));
11501 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11502 into x & ((unsigned)-1 >> c) for unsigned types. */
11503 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11504 || (TYPE_UNSIGNED (type)
11505 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11506 && host_integerp (arg1, false)
11507 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11508 && host_integerp (TREE_OPERAND (arg0, 1), false)
11509 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11511 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11512 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11513 tree lshift;
11514 tree arg00;
11516 if (low0 == low1)
11518 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11520 lshift = build_int_cst (type, -1);
11521 lshift = int_const_binop (code, lshift, arg1, 0);
11523 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11527 /* Rewrite an LROTATE_EXPR by a constant into an
11528 RROTATE_EXPR by a new constant. */
11529 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11531 tree tem = build_int_cst (TREE_TYPE (arg1),
11532 TYPE_PRECISION (type));
11533 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11534 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11537 /* If we have a rotate of a bit operation with the rotate count and
11538 the second operand of the bit operation both constant,
11539 permute the two operations. */
11540 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11541 && (TREE_CODE (arg0) == BIT_AND_EXPR
11542 || TREE_CODE (arg0) == BIT_IOR_EXPR
11543 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11544 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11545 return fold_build2 (TREE_CODE (arg0), type,
11546 fold_build2 (code, type,
11547 TREE_OPERAND (arg0, 0), arg1),
11548 fold_build2 (code, type,
11549 TREE_OPERAND (arg0, 1), arg1));
11551 /* Two consecutive rotates adding up to the precision of the
11552 type can be ignored. */
11553 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11554 && TREE_CODE (arg0) == RROTATE_EXPR
11555 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11556 && TREE_INT_CST_HIGH (arg1) == 0
11557 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11558 && ((TREE_INT_CST_LOW (arg1)
11559 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11560 == (unsigned int) TYPE_PRECISION (type)))
11561 return TREE_OPERAND (arg0, 0);
11563 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11564 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11565 if the latter can be further optimized. */
11566 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11567 && TREE_CODE (arg0) == BIT_AND_EXPR
11568 && TREE_CODE (arg1) == INTEGER_CST
11569 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11571 tree mask = fold_build2 (code, type,
11572 fold_convert (type, TREE_OPERAND (arg0, 1)),
11573 arg1);
11574 tree shift = fold_build2 (code, type,
11575 fold_convert (type, TREE_OPERAND (arg0, 0)),
11576 arg1);
11577 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11578 if (tem)
11579 return tem;
11582 return NULL_TREE;
11584 case MIN_EXPR:
11585 if (operand_equal_p (arg0, arg1, 0))
11586 return omit_one_operand (type, arg0, arg1);
11587 if (INTEGRAL_TYPE_P (type)
11588 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11589 return omit_one_operand (type, arg1, arg0);
11590 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11591 if (tem)
11592 return tem;
11593 goto associate;
11595 case MAX_EXPR:
11596 if (operand_equal_p (arg0, arg1, 0))
11597 return omit_one_operand (type, arg0, arg1);
11598 if (INTEGRAL_TYPE_P (type)
11599 && TYPE_MAX_VALUE (type)
11600 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11601 return omit_one_operand (type, arg1, arg0);
11602 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11603 if (tem)
11604 return tem;
11605 goto associate;
11607 case TRUTH_ANDIF_EXPR:
11608 /* Note that the operands of this must be ints
11609 and their values must be 0 or 1.
11610 ("true" is a fixed value perhaps depending on the language.) */
11611 /* If first arg is constant zero, return it. */
11612 if (integer_zerop (arg0))
11613 return fold_convert (type, arg0);
11614 case TRUTH_AND_EXPR:
11615 /* If either arg is constant true, drop it. */
11616 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11617 return non_lvalue (fold_convert (type, arg1));
11618 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11619 /* Preserve sequence points. */
11620 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11621 return non_lvalue (fold_convert (type, arg0));
11622 /* If second arg is constant zero, result is zero, but first arg
11623 must be evaluated. */
11624 if (integer_zerop (arg1))
11625 return omit_one_operand (type, arg1, arg0);
11626 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11627 case will be handled here. */
11628 if (integer_zerop (arg0))
11629 return omit_one_operand (type, arg0, arg1);
11631 /* !X && X is always false. */
11632 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11633 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11634 return omit_one_operand (type, integer_zero_node, arg1);
11635 /* X && !X is always false. */
11636 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11637 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11638 return omit_one_operand (type, integer_zero_node, arg0);
11640 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11641 means A >= Y && A != MAX, but in this case we know that
11642 A < X <= MAX. */
11644 if (!TREE_SIDE_EFFECTS (arg0)
11645 && !TREE_SIDE_EFFECTS (arg1))
11647 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11648 if (tem && !operand_equal_p (tem, arg0, 0))
11649 return fold_build2 (code, type, tem, arg1);
11651 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11652 if (tem && !operand_equal_p (tem, arg1, 0))
11653 return fold_build2 (code, type, arg0, tem);
11656 truth_andor:
11657 /* We only do these simplifications if we are optimizing. */
11658 if (!optimize)
11659 return NULL_TREE;
11661 /* Check for things like (A || B) && (A || C). We can convert this
11662 to A || (B && C). Note that either operator can be any of the four
11663 truth and/or operations and the transformation will still be
11664 valid. Also note that we only care about order for the
11665 ANDIF and ORIF operators. If B contains side effects, this
11666 might change the truth-value of A. */
11667 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11668 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11669 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11670 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11671 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11672 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11674 tree a00 = TREE_OPERAND (arg0, 0);
11675 tree a01 = TREE_OPERAND (arg0, 1);
11676 tree a10 = TREE_OPERAND (arg1, 0);
11677 tree a11 = TREE_OPERAND (arg1, 1);
11678 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11679 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11680 && (code == TRUTH_AND_EXPR
11681 || code == TRUTH_OR_EXPR));
11683 if (operand_equal_p (a00, a10, 0))
11684 return fold_build2 (TREE_CODE (arg0), type, a00,
11685 fold_build2 (code, type, a01, a11));
11686 else if (commutative && operand_equal_p (a00, a11, 0))
11687 return fold_build2 (TREE_CODE (arg0), type, a00,
11688 fold_build2 (code, type, a01, a10));
11689 else if (commutative && operand_equal_p (a01, a10, 0))
11690 return fold_build2 (TREE_CODE (arg0), type, a01,
11691 fold_build2 (code, type, a00, a11));
11693 /* This case if tricky because we must either have commutative
11694 operators or else A10 must not have side-effects. */
11696 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11697 && operand_equal_p (a01, a11, 0))
11698 return fold_build2 (TREE_CODE (arg0), type,
11699 fold_build2 (code, type, a00, a10),
11700 a01);
11703 /* See if we can build a range comparison. */
11704 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11705 return tem;
11707 /* Check for the possibility of merging component references. If our
11708 lhs is another similar operation, try to merge its rhs with our
11709 rhs. Then try to merge our lhs and rhs. */
11710 if (TREE_CODE (arg0) == code
11711 && 0 != (tem = fold_truthop (code, type,
11712 TREE_OPERAND (arg0, 1), arg1)))
11713 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11715 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11716 return tem;
11718 return NULL_TREE;
11720 case TRUTH_ORIF_EXPR:
11721 /* Note that the operands of this must be ints
11722 and their values must be 0 or true.
11723 ("true" is a fixed value perhaps depending on the language.) */
11724 /* If first arg is constant true, return it. */
11725 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11726 return fold_convert (type, arg0);
11727 case TRUTH_OR_EXPR:
11728 /* If either arg is constant zero, drop it. */
11729 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11730 return non_lvalue (fold_convert (type, arg1));
11731 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11732 /* Preserve sequence points. */
11733 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11734 return non_lvalue (fold_convert (type, arg0));
11735 /* If second arg is constant true, result is true, but we must
11736 evaluate first arg. */
11737 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11738 return omit_one_operand (type, arg1, arg0);
11739 /* Likewise for first arg, but note this only occurs here for
11740 TRUTH_OR_EXPR. */
11741 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11742 return omit_one_operand (type, arg0, arg1);
11744 /* !X || X is always true. */
11745 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11746 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11747 return omit_one_operand (type, integer_one_node, arg1);
11748 /* X || !X is always true. */
11749 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11750 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11751 return omit_one_operand (type, integer_one_node, arg0);
11753 goto truth_andor;
11755 case TRUTH_XOR_EXPR:
11756 /* If the second arg is constant zero, drop it. */
11757 if (integer_zerop (arg1))
11758 return non_lvalue (fold_convert (type, arg0));
11759 /* If the second arg is constant true, this is a logical inversion. */
11760 if (integer_onep (arg1))
11762 /* Only call invert_truthvalue if operand is a truth value. */
11763 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11764 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11765 else
11766 tem = invert_truthvalue (arg0);
11767 return non_lvalue (fold_convert (type, tem));
11769 /* Identical arguments cancel to zero. */
11770 if (operand_equal_p (arg0, arg1, 0))
11771 return omit_one_operand (type, integer_zero_node, arg0);
11773 /* !X ^ X is always true. */
11774 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11775 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11776 return omit_one_operand (type, integer_one_node, arg1);
11778 /* X ^ !X is always true. */
11779 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11780 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11781 return omit_one_operand (type, integer_one_node, arg0);
11783 return NULL_TREE;
11785 case EQ_EXPR:
11786 case NE_EXPR:
11787 tem = fold_comparison (code, type, op0, op1);
11788 if (tem != NULL_TREE)
11789 return tem;
11791 /* bool_var != 0 becomes bool_var. */
11792 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11793 && code == NE_EXPR)
11794 return non_lvalue (fold_convert (type, arg0));
11796 /* bool_var == 1 becomes bool_var. */
11797 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11798 && code == EQ_EXPR)
11799 return non_lvalue (fold_convert (type, arg0));
11801 /* bool_var != 1 becomes !bool_var. */
11802 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11803 && code == NE_EXPR)
11804 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11806 /* bool_var == 0 becomes !bool_var. */
11807 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11808 && code == EQ_EXPR)
11809 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11811 /* If this is an equality comparison of the address of two non-weak,
11812 unaliased symbols neither of which are extern (since we do not
11813 have access to attributes for externs), then we know the result. */
11814 if (TREE_CODE (arg0) == ADDR_EXPR
11815 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11816 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11817 && ! lookup_attribute ("alias",
11818 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11819 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11820 && TREE_CODE (arg1) == ADDR_EXPR
11821 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11822 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11823 && ! lookup_attribute ("alias",
11824 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11825 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11827 /* We know that we're looking at the address of two
11828 non-weak, unaliased, static _DECL nodes.
11830 It is both wasteful and incorrect to call operand_equal_p
11831 to compare the two ADDR_EXPR nodes. It is wasteful in that
11832 all we need to do is test pointer equality for the arguments
11833 to the two ADDR_EXPR nodes. It is incorrect to use
11834 operand_equal_p as that function is NOT equivalent to a
11835 C equality test. It can in fact return false for two
11836 objects which would test as equal using the C equality
11837 operator. */
11838 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11839 return constant_boolean_node (equal
11840 ? code == EQ_EXPR : code != EQ_EXPR,
11841 type);
11844 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11845 a MINUS_EXPR of a constant, we can convert it into a comparison with
11846 a revised constant as long as no overflow occurs. */
11847 if (TREE_CODE (arg1) == INTEGER_CST
11848 && (TREE_CODE (arg0) == PLUS_EXPR
11849 || TREE_CODE (arg0) == MINUS_EXPR)
11850 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11851 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11852 ? MINUS_EXPR : PLUS_EXPR,
11853 fold_convert (TREE_TYPE (arg0), arg1),
11854 TREE_OPERAND (arg0, 1), 0))
11855 && !TREE_OVERFLOW (tem))
11856 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11858 /* Similarly for a NEGATE_EXPR. */
11859 if (TREE_CODE (arg0) == NEGATE_EXPR
11860 && TREE_CODE (arg1) == INTEGER_CST
11861 && 0 != (tem = negate_expr (arg1))
11862 && TREE_CODE (tem) == INTEGER_CST
11863 && !TREE_OVERFLOW (tem))
11864 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11866 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11867 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11868 && TREE_CODE (arg1) == INTEGER_CST
11869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11870 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11871 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11872 fold_convert (TREE_TYPE (arg0), arg1),
11873 TREE_OPERAND (arg0, 1)));
11875 /* Transform comparisons of the form X +- C CMP X. */
11876 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11877 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11879 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11880 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11882 tree cst = TREE_OPERAND (arg0, 1);
11884 if (code == EQ_EXPR
11885 && !integer_zerop (cst))
11886 return omit_two_operands (type, boolean_false_node,
11887 TREE_OPERAND (arg0, 0), arg1);
11888 else
11889 return omit_two_operands (type, boolean_true_node,
11890 TREE_OPERAND (arg0, 0), arg1);
11893 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11894 for !=. Don't do this for ordered comparisons due to overflow. */
11895 if (TREE_CODE (arg0) == MINUS_EXPR
11896 && integer_zerop (arg1))
11897 return fold_build2 (code, type,
11898 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11900 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11901 if (TREE_CODE (arg0) == ABS_EXPR
11902 && (integer_zerop (arg1) || real_zerop (arg1)))
11903 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11905 /* If this is an EQ or NE comparison with zero and ARG0 is
11906 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11907 two operations, but the latter can be done in one less insn
11908 on machines that have only two-operand insns or on which a
11909 constant cannot be the first operand. */
11910 if (TREE_CODE (arg0) == BIT_AND_EXPR
11911 && integer_zerop (arg1))
11913 tree arg00 = TREE_OPERAND (arg0, 0);
11914 tree arg01 = TREE_OPERAND (arg0, 1);
11915 if (TREE_CODE (arg00) == LSHIFT_EXPR
11916 && integer_onep (TREE_OPERAND (arg00, 0)))
11918 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11919 arg01, TREE_OPERAND (arg00, 1));
11920 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11921 build_int_cst (TREE_TYPE (arg0), 1));
11922 return fold_build2 (code, type,
11923 fold_convert (TREE_TYPE (arg1), tem), arg1);
11925 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11926 && integer_onep (TREE_OPERAND (arg01, 0)))
11928 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11929 arg00, TREE_OPERAND (arg01, 1));
11930 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11931 build_int_cst (TREE_TYPE (arg0), 1));
11932 return fold_build2 (code, type,
11933 fold_convert (TREE_TYPE (arg1), tem), arg1);
11937 /* If this is an NE or EQ comparison of zero against the result of a
11938 signed MOD operation whose second operand is a power of 2, make
11939 the MOD operation unsigned since it is simpler and equivalent. */
11940 if (integer_zerop (arg1)
11941 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11942 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11943 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11944 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11945 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11946 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11948 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11949 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11950 fold_convert (newtype,
11951 TREE_OPERAND (arg0, 0)),
11952 fold_convert (newtype,
11953 TREE_OPERAND (arg0, 1)));
11955 return fold_build2 (code, type, newmod,
11956 fold_convert (newtype, arg1));
11959 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11960 C1 is a valid shift constant, and C2 is a power of two, i.e.
11961 a single bit. */
11962 if (TREE_CODE (arg0) == BIT_AND_EXPR
11963 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11964 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11965 == INTEGER_CST
11966 && integer_pow2p (TREE_OPERAND (arg0, 1))
11967 && integer_zerop (arg1))
11969 tree itype = TREE_TYPE (arg0);
11970 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11971 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11973 /* Check for a valid shift count. */
11974 if (TREE_INT_CST_HIGH (arg001) == 0
11975 && TREE_INT_CST_LOW (arg001) < prec)
11977 tree arg01 = TREE_OPERAND (arg0, 1);
11978 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11979 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11980 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11981 can be rewritten as (X & (C2 << C1)) != 0. */
11982 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11984 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11985 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11986 return fold_build2 (code, type, tem, arg1);
11988 /* Otherwise, for signed (arithmetic) shifts,
11989 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11990 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11991 else if (!TYPE_UNSIGNED (itype))
11992 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11993 arg000, build_int_cst (itype, 0));
11994 /* Otherwise, of unsigned (logical) shifts,
11995 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11996 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11997 else
11998 return omit_one_operand (type,
11999 code == EQ_EXPR ? integer_one_node
12000 : integer_zero_node,
12001 arg000);
12005 /* If this is an NE comparison of zero with an AND of one, remove the
12006 comparison since the AND will give the correct value. */
12007 if (code == NE_EXPR
12008 && integer_zerop (arg1)
12009 && TREE_CODE (arg0) == BIT_AND_EXPR
12010 && integer_onep (TREE_OPERAND (arg0, 1)))
12011 return fold_convert (type, arg0);
12013 /* If we have (A & C) == C where C is a power of 2, convert this into
12014 (A & C) != 0. Similarly for NE_EXPR. */
12015 if (TREE_CODE (arg0) == BIT_AND_EXPR
12016 && integer_pow2p (TREE_OPERAND (arg0, 1))
12017 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12018 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12019 arg0, fold_convert (TREE_TYPE (arg0),
12020 integer_zero_node));
12022 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12023 bit, then fold the expression into A < 0 or A >= 0. */
12024 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12025 if (tem)
12026 return tem;
12028 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12029 Similarly for NE_EXPR. */
12030 if (TREE_CODE (arg0) == BIT_AND_EXPR
12031 && TREE_CODE (arg1) == INTEGER_CST
12032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12034 tree notc = fold_build1 (BIT_NOT_EXPR,
12035 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12036 TREE_OPERAND (arg0, 1));
12037 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12038 arg1, notc);
12039 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12040 if (integer_nonzerop (dandnotc))
12041 return omit_one_operand (type, rslt, arg0);
12044 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12045 Similarly for NE_EXPR. */
12046 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12047 && TREE_CODE (arg1) == INTEGER_CST
12048 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12050 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12051 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12052 TREE_OPERAND (arg0, 1), notd);
12053 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12054 if (integer_nonzerop (candnotd))
12055 return omit_one_operand (type, rslt, arg0);
12058 /* Optimize comparisons of strlen vs zero to a compare of the
12059 first character of the string vs zero. To wit,
12060 strlen(ptr) == 0 => *ptr == 0
12061 strlen(ptr) != 0 => *ptr != 0
12062 Other cases should reduce to one of these two (or a constant)
12063 due to the return value of strlen being unsigned. */
12064 if (TREE_CODE (arg0) == CALL_EXPR
12065 && integer_zerop (arg1))
12067 tree fndecl = get_callee_fndecl (arg0);
12069 if (fndecl
12070 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12071 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12072 && call_expr_nargs (arg0) == 1
12073 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12075 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12076 return fold_build2 (code, type, iref,
12077 build_int_cst (TREE_TYPE (iref), 0));
12081 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12082 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12083 if (TREE_CODE (arg0) == RSHIFT_EXPR
12084 && integer_zerop (arg1)
12085 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12087 tree arg00 = TREE_OPERAND (arg0, 0);
12088 tree arg01 = TREE_OPERAND (arg0, 1);
12089 tree itype = TREE_TYPE (arg00);
12090 if (TREE_INT_CST_HIGH (arg01) == 0
12091 && TREE_INT_CST_LOW (arg01)
12092 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12094 if (TYPE_UNSIGNED (itype))
12096 itype = signed_type_for (itype);
12097 arg00 = fold_convert (itype, arg00);
12099 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12100 type, arg00, build_int_cst (itype, 0));
12104 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12105 if (integer_zerop (arg1)
12106 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12107 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12108 TREE_OPERAND (arg0, 1));
12110 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12111 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12112 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12113 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12114 build_int_cst (TREE_TYPE (arg1), 0));
12115 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12116 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12117 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12118 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12119 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12120 build_int_cst (TREE_TYPE (arg1), 0));
12122 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12123 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12124 && TREE_CODE (arg1) == INTEGER_CST
12125 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12126 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12127 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12128 TREE_OPERAND (arg0, 1), arg1));
12130 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12131 (X & C) == 0 when C is a single bit. */
12132 if (TREE_CODE (arg0) == BIT_AND_EXPR
12133 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12134 && integer_zerop (arg1)
12135 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12137 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12138 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12139 TREE_OPERAND (arg0, 1));
12140 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12141 type, tem, arg1);
12144 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12145 constant C is a power of two, i.e. a single bit. */
12146 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12147 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12148 && integer_zerop (arg1)
12149 && integer_pow2p (TREE_OPERAND (arg0, 1))
12150 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12151 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12153 tree arg00 = TREE_OPERAND (arg0, 0);
12154 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12155 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12158 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12159 when is C is a power of two, i.e. a single bit. */
12160 if (TREE_CODE (arg0) == BIT_AND_EXPR
12161 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12162 && integer_zerop (arg1)
12163 && integer_pow2p (TREE_OPERAND (arg0, 1))
12164 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12165 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12167 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12168 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12169 arg000, TREE_OPERAND (arg0, 1));
12170 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12171 tem, build_int_cst (TREE_TYPE (tem), 0));
12174 if (integer_zerop (arg1)
12175 && tree_expr_nonzero_p (arg0))
12177 tree res = constant_boolean_node (code==NE_EXPR, type);
12178 return omit_one_operand (type, res, arg0);
12181 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12182 if (TREE_CODE (arg0) == NEGATE_EXPR
12183 && TREE_CODE (arg1) == NEGATE_EXPR)
12184 return fold_build2 (code, type,
12185 TREE_OPERAND (arg0, 0),
12186 TREE_OPERAND (arg1, 0));
12188 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12189 if (TREE_CODE (arg0) == BIT_AND_EXPR
12190 && TREE_CODE (arg1) == BIT_AND_EXPR)
12192 tree arg00 = TREE_OPERAND (arg0, 0);
12193 tree arg01 = TREE_OPERAND (arg0, 1);
12194 tree arg10 = TREE_OPERAND (arg1, 0);
12195 tree arg11 = TREE_OPERAND (arg1, 1);
12196 tree itype = TREE_TYPE (arg0);
12198 if (operand_equal_p (arg01, arg11, 0))
12199 return fold_build2 (code, type,
12200 fold_build2 (BIT_AND_EXPR, itype,
12201 fold_build2 (BIT_XOR_EXPR, itype,
12202 arg00, arg10),
12203 arg01),
12204 build_int_cst (itype, 0));
12206 if (operand_equal_p (arg01, arg10, 0))
12207 return fold_build2 (code, type,
12208 fold_build2 (BIT_AND_EXPR, itype,
12209 fold_build2 (BIT_XOR_EXPR, itype,
12210 arg00, arg11),
12211 arg01),
12212 build_int_cst (itype, 0));
12214 if (operand_equal_p (arg00, arg11, 0))
12215 return fold_build2 (code, type,
12216 fold_build2 (BIT_AND_EXPR, itype,
12217 fold_build2 (BIT_XOR_EXPR, itype,
12218 arg01, arg10),
12219 arg00),
12220 build_int_cst (itype, 0));
12222 if (operand_equal_p (arg00, arg10, 0))
12223 return fold_build2 (code, type,
12224 fold_build2 (BIT_AND_EXPR, itype,
12225 fold_build2 (BIT_XOR_EXPR, itype,
12226 arg01, arg11),
12227 arg00),
12228 build_int_cst (itype, 0));
12231 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12232 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12234 tree arg00 = TREE_OPERAND (arg0, 0);
12235 tree arg01 = TREE_OPERAND (arg0, 1);
12236 tree arg10 = TREE_OPERAND (arg1, 0);
12237 tree arg11 = TREE_OPERAND (arg1, 1);
12238 tree itype = TREE_TYPE (arg0);
12240 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12241 operand_equal_p guarantees no side-effects so we don't need
12242 to use omit_one_operand on Z. */
12243 if (operand_equal_p (arg01, arg11, 0))
12244 return fold_build2 (code, type, arg00, arg10);
12245 if (operand_equal_p (arg01, arg10, 0))
12246 return fold_build2 (code, type, arg00, arg11);
12247 if (operand_equal_p (arg00, arg11, 0))
12248 return fold_build2 (code, type, arg01, arg10);
12249 if (operand_equal_p (arg00, arg10, 0))
12250 return fold_build2 (code, type, arg01, arg11);
12252 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12253 if (TREE_CODE (arg01) == INTEGER_CST
12254 && TREE_CODE (arg11) == INTEGER_CST)
12255 return fold_build2 (code, type,
12256 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12257 fold_build2 (BIT_XOR_EXPR, itype,
12258 arg01, arg11)),
12259 arg10);
12262 /* Attempt to simplify equality/inequality comparisons of complex
12263 values. Only lower the comparison if the result is known or
12264 can be simplified to a single scalar comparison. */
12265 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12266 || TREE_CODE (arg0) == COMPLEX_CST)
12267 && (TREE_CODE (arg1) == COMPLEX_EXPR
12268 || TREE_CODE (arg1) == COMPLEX_CST))
12270 tree real0, imag0, real1, imag1;
12271 tree rcond, icond;
12273 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12275 real0 = TREE_OPERAND (arg0, 0);
12276 imag0 = TREE_OPERAND (arg0, 1);
12278 else
12280 real0 = TREE_REALPART (arg0);
12281 imag0 = TREE_IMAGPART (arg0);
12284 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12286 real1 = TREE_OPERAND (arg1, 0);
12287 imag1 = TREE_OPERAND (arg1, 1);
12289 else
12291 real1 = TREE_REALPART (arg1);
12292 imag1 = TREE_IMAGPART (arg1);
12295 rcond = fold_binary (code, type, real0, real1);
12296 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12298 if (integer_zerop (rcond))
12300 if (code == EQ_EXPR)
12301 return omit_two_operands (type, boolean_false_node,
12302 imag0, imag1);
12303 return fold_build2 (NE_EXPR, type, imag0, imag1);
12305 else
12307 if (code == NE_EXPR)
12308 return omit_two_operands (type, boolean_true_node,
12309 imag0, imag1);
12310 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12314 icond = fold_binary (code, type, imag0, imag1);
12315 if (icond && TREE_CODE (icond) == INTEGER_CST)
12317 if (integer_zerop (icond))
12319 if (code == EQ_EXPR)
12320 return omit_two_operands (type, boolean_false_node,
12321 real0, real1);
12322 return fold_build2 (NE_EXPR, type, real0, real1);
12324 else
12326 if (code == NE_EXPR)
12327 return omit_two_operands (type, boolean_true_node,
12328 real0, real1);
12329 return fold_build2 (EQ_EXPR, type, real0, real1);
12334 return NULL_TREE;
12336 case LT_EXPR:
12337 case GT_EXPR:
12338 case LE_EXPR:
12339 case GE_EXPR:
12340 tem = fold_comparison (code, type, op0, op1);
12341 if (tem != NULL_TREE)
12342 return tem;
12344 /* Transform comparisons of the form X +- C CMP X. */
12345 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12346 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12347 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12348 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12349 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12350 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12352 tree arg01 = TREE_OPERAND (arg0, 1);
12353 enum tree_code code0 = TREE_CODE (arg0);
12354 int is_positive;
12356 if (TREE_CODE (arg01) == REAL_CST)
12357 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12358 else
12359 is_positive = tree_int_cst_sgn (arg01);
12361 /* (X - c) > X becomes false. */
12362 if (code == GT_EXPR
12363 && ((code0 == MINUS_EXPR && is_positive >= 0)
12364 || (code0 == PLUS_EXPR && is_positive <= 0)))
12366 if (TREE_CODE (arg01) == INTEGER_CST
12367 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12368 fold_overflow_warning (("assuming signed overflow does not "
12369 "occur when assuming that (X - c) > X "
12370 "is always false"),
12371 WARN_STRICT_OVERFLOW_ALL);
12372 return constant_boolean_node (0, type);
12375 /* Likewise (X + c) < X becomes false. */
12376 if (code == LT_EXPR
12377 && ((code0 == PLUS_EXPR && is_positive >= 0)
12378 || (code0 == MINUS_EXPR && is_positive <= 0)))
12380 if (TREE_CODE (arg01) == INTEGER_CST
12381 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12382 fold_overflow_warning (("assuming signed overflow does not "
12383 "occur when assuming that "
12384 "(X + c) < X is always false"),
12385 WARN_STRICT_OVERFLOW_ALL);
12386 return constant_boolean_node (0, type);
12389 /* Convert (X - c) <= X to true. */
12390 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12391 && code == LE_EXPR
12392 && ((code0 == MINUS_EXPR && is_positive >= 0)
12393 || (code0 == PLUS_EXPR && is_positive <= 0)))
12395 if (TREE_CODE (arg01) == INTEGER_CST
12396 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12397 fold_overflow_warning (("assuming signed overflow does not "
12398 "occur when assuming that "
12399 "(X - c) <= X is always true"),
12400 WARN_STRICT_OVERFLOW_ALL);
12401 return constant_boolean_node (1, type);
12404 /* Convert (X + c) >= X to true. */
12405 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12406 && code == GE_EXPR
12407 && ((code0 == PLUS_EXPR && is_positive >= 0)
12408 || (code0 == MINUS_EXPR && is_positive <= 0)))
12410 if (TREE_CODE (arg01) == INTEGER_CST
12411 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12412 fold_overflow_warning (("assuming signed overflow does not "
12413 "occur when assuming that "
12414 "(X + c) >= X is always true"),
12415 WARN_STRICT_OVERFLOW_ALL);
12416 return constant_boolean_node (1, type);
12419 if (TREE_CODE (arg01) == INTEGER_CST)
12421 /* Convert X + c > X and X - c < X to true for integers. */
12422 if (code == GT_EXPR
12423 && ((code0 == PLUS_EXPR && is_positive > 0)
12424 || (code0 == MINUS_EXPR && is_positive < 0)))
12426 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12427 fold_overflow_warning (("assuming signed overflow does "
12428 "not occur when assuming that "
12429 "(X + c) > X is always true"),
12430 WARN_STRICT_OVERFLOW_ALL);
12431 return constant_boolean_node (1, type);
12434 if (code == LT_EXPR
12435 && ((code0 == MINUS_EXPR && is_positive > 0)
12436 || (code0 == PLUS_EXPR && is_positive < 0)))
12438 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12439 fold_overflow_warning (("assuming signed overflow does "
12440 "not occur when assuming that "
12441 "(X - c) < X is always true"),
12442 WARN_STRICT_OVERFLOW_ALL);
12443 return constant_boolean_node (1, type);
12446 /* Convert X + c <= X and X - c >= X to false for integers. */
12447 if (code == LE_EXPR
12448 && ((code0 == PLUS_EXPR && is_positive > 0)
12449 || (code0 == MINUS_EXPR && is_positive < 0)))
12451 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12452 fold_overflow_warning (("assuming signed overflow does "
12453 "not occur when assuming that "
12454 "(X + c) <= X is always false"),
12455 WARN_STRICT_OVERFLOW_ALL);
12456 return constant_boolean_node (0, type);
12459 if (code == GE_EXPR
12460 && ((code0 == MINUS_EXPR && is_positive > 0)
12461 || (code0 == PLUS_EXPR && is_positive < 0)))
12463 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12464 fold_overflow_warning (("assuming signed overflow does "
12465 "not occur when assuming that "
12466 "(X - c) >= X is always false"),
12467 WARN_STRICT_OVERFLOW_ALL);
12468 return constant_boolean_node (0, type);
12473 /* Comparisons with the highest or lowest possible integer of
12474 the specified precision will have known values. */
12476 tree arg1_type = TREE_TYPE (arg1);
12477 unsigned int width = TYPE_PRECISION (arg1_type);
12479 if (TREE_CODE (arg1) == INTEGER_CST
12480 && width <= 2 * HOST_BITS_PER_WIDE_INT
12481 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12483 HOST_WIDE_INT signed_max_hi;
12484 unsigned HOST_WIDE_INT signed_max_lo;
12485 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12487 if (width <= HOST_BITS_PER_WIDE_INT)
12489 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12490 - 1;
12491 signed_max_hi = 0;
12492 max_hi = 0;
12494 if (TYPE_UNSIGNED (arg1_type))
12496 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12497 min_lo = 0;
12498 min_hi = 0;
12500 else
12502 max_lo = signed_max_lo;
12503 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12504 min_hi = -1;
12507 else
12509 width -= HOST_BITS_PER_WIDE_INT;
12510 signed_max_lo = -1;
12511 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12512 - 1;
12513 max_lo = -1;
12514 min_lo = 0;
12516 if (TYPE_UNSIGNED (arg1_type))
12518 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12519 min_hi = 0;
12521 else
12523 max_hi = signed_max_hi;
12524 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12528 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12529 && TREE_INT_CST_LOW (arg1) == max_lo)
12530 switch (code)
12532 case GT_EXPR:
12533 return omit_one_operand (type, integer_zero_node, arg0);
12535 case GE_EXPR:
12536 return fold_build2 (EQ_EXPR, type, op0, op1);
12538 case LE_EXPR:
12539 return omit_one_operand (type, integer_one_node, arg0);
12541 case LT_EXPR:
12542 return fold_build2 (NE_EXPR, type, op0, op1);
12544 /* The GE_EXPR and LT_EXPR cases above are not normally
12545 reached because of previous transformations. */
12547 default:
12548 break;
12550 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12551 == max_hi
12552 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12553 switch (code)
12555 case GT_EXPR:
12556 arg1 = const_binop (PLUS_EXPR, arg1,
12557 build_int_cst (TREE_TYPE (arg1), 1), 0);
12558 return fold_build2 (EQ_EXPR, type,
12559 fold_convert (TREE_TYPE (arg1), arg0),
12560 arg1);
12561 case LE_EXPR:
12562 arg1 = const_binop (PLUS_EXPR, arg1,
12563 build_int_cst (TREE_TYPE (arg1), 1), 0);
12564 return fold_build2 (NE_EXPR, type,
12565 fold_convert (TREE_TYPE (arg1), arg0),
12566 arg1);
12567 default:
12568 break;
12570 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12571 == min_hi
12572 && TREE_INT_CST_LOW (arg1) == min_lo)
12573 switch (code)
12575 case LT_EXPR:
12576 return omit_one_operand (type, integer_zero_node, arg0);
12578 case LE_EXPR:
12579 return fold_build2 (EQ_EXPR, type, op0, op1);
12581 case GE_EXPR:
12582 return omit_one_operand (type, integer_one_node, arg0);
12584 case GT_EXPR:
12585 return fold_build2 (NE_EXPR, type, op0, op1);
12587 default:
12588 break;
12590 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12591 == min_hi
12592 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12593 switch (code)
12595 case GE_EXPR:
12596 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12597 return fold_build2 (NE_EXPR, type,
12598 fold_convert (TREE_TYPE (arg1), arg0),
12599 arg1);
12600 case LT_EXPR:
12601 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12602 return fold_build2 (EQ_EXPR, type,
12603 fold_convert (TREE_TYPE (arg1), arg0),
12604 arg1);
12605 default:
12606 break;
12609 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12610 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12611 && TYPE_UNSIGNED (arg1_type)
12612 /* We will flip the signedness of the comparison operator
12613 associated with the mode of arg1, so the sign bit is
12614 specified by this mode. Check that arg1 is the signed
12615 max associated with this sign bit. */
12616 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12617 /* signed_type does not work on pointer types. */
12618 && INTEGRAL_TYPE_P (arg1_type))
12620 /* The following case also applies to X < signed_max+1
12621 and X >= signed_max+1 because previous transformations. */
12622 if (code == LE_EXPR || code == GT_EXPR)
12624 tree st;
12625 st = signed_type_for (TREE_TYPE (arg1));
12626 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12627 type, fold_convert (st, arg0),
12628 build_int_cst (st, 0));
12634 /* If we are comparing an ABS_EXPR with a constant, we can
12635 convert all the cases into explicit comparisons, but they may
12636 well not be faster than doing the ABS and one comparison.
12637 But ABS (X) <= C is a range comparison, which becomes a subtraction
12638 and a comparison, and is probably faster. */
12639 if (code == LE_EXPR
12640 && TREE_CODE (arg1) == INTEGER_CST
12641 && TREE_CODE (arg0) == ABS_EXPR
12642 && ! TREE_SIDE_EFFECTS (arg0)
12643 && (0 != (tem = negate_expr (arg1)))
12644 && TREE_CODE (tem) == INTEGER_CST
12645 && !TREE_OVERFLOW (tem))
12646 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12647 build2 (GE_EXPR, type,
12648 TREE_OPERAND (arg0, 0), tem),
12649 build2 (LE_EXPR, type,
12650 TREE_OPERAND (arg0, 0), arg1));
12652 /* Convert ABS_EXPR<x> >= 0 to true. */
12653 strict_overflow_p = false;
12654 if (code == GE_EXPR
12655 && (integer_zerop (arg1)
12656 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12657 && real_zerop (arg1)))
12658 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12660 if (strict_overflow_p)
12661 fold_overflow_warning (("assuming signed overflow does not occur "
12662 "when simplifying comparison of "
12663 "absolute value and zero"),
12664 WARN_STRICT_OVERFLOW_CONDITIONAL);
12665 return omit_one_operand (type, integer_one_node, arg0);
12668 /* Convert ABS_EXPR<x> < 0 to false. */
12669 strict_overflow_p = false;
12670 if (code == LT_EXPR
12671 && (integer_zerop (arg1) || real_zerop (arg1))
12672 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12674 if (strict_overflow_p)
12675 fold_overflow_warning (("assuming signed overflow does not occur "
12676 "when simplifying comparison of "
12677 "absolute value and zero"),
12678 WARN_STRICT_OVERFLOW_CONDITIONAL);
12679 return omit_one_operand (type, integer_zero_node, arg0);
12682 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12683 and similarly for >= into !=. */
12684 if ((code == LT_EXPR || code == GE_EXPR)
12685 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12686 && TREE_CODE (arg1) == LSHIFT_EXPR
12687 && integer_onep (TREE_OPERAND (arg1, 0)))
12688 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12689 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12690 TREE_OPERAND (arg1, 1)),
12691 build_int_cst (TREE_TYPE (arg0), 0));
12693 if ((code == LT_EXPR || code == GE_EXPR)
12694 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12695 && CONVERT_EXPR_P (arg1)
12696 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12697 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12698 return
12699 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12700 fold_convert (TREE_TYPE (arg0),
12701 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12702 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12703 1))),
12704 build_int_cst (TREE_TYPE (arg0), 0));
12706 return NULL_TREE;
12708 case UNORDERED_EXPR:
12709 case ORDERED_EXPR:
12710 case UNLT_EXPR:
12711 case UNLE_EXPR:
12712 case UNGT_EXPR:
12713 case UNGE_EXPR:
12714 case UNEQ_EXPR:
12715 case LTGT_EXPR:
12716 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12718 t1 = fold_relational_const (code, type, arg0, arg1);
12719 if (t1 != NULL_TREE)
12720 return t1;
12723 /* If the first operand is NaN, the result is constant. */
12724 if (TREE_CODE (arg0) == REAL_CST
12725 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12726 && (code != LTGT_EXPR || ! flag_trapping_math))
12728 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12729 ? integer_zero_node
12730 : integer_one_node;
12731 return omit_one_operand (type, t1, arg1);
12734 /* If the second operand is NaN, the result is constant. */
12735 if (TREE_CODE (arg1) == REAL_CST
12736 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12737 && (code != LTGT_EXPR || ! flag_trapping_math))
12739 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12740 ? integer_zero_node
12741 : integer_one_node;
12742 return omit_one_operand (type, t1, arg0);
12745 /* Simplify unordered comparison of something with itself. */
12746 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12747 && operand_equal_p (arg0, arg1, 0))
12748 return constant_boolean_node (1, type);
12750 if (code == LTGT_EXPR
12751 && !flag_trapping_math
12752 && operand_equal_p (arg0, arg1, 0))
12753 return constant_boolean_node (0, type);
12755 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12757 tree targ0 = strip_float_extensions (arg0);
12758 tree targ1 = strip_float_extensions (arg1);
12759 tree newtype = TREE_TYPE (targ0);
12761 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12762 newtype = TREE_TYPE (targ1);
12764 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12765 return fold_build2 (code, type, fold_convert (newtype, targ0),
12766 fold_convert (newtype, targ1));
12769 return NULL_TREE;
12771 case COMPOUND_EXPR:
12772 /* When pedantic, a compound expression can be neither an lvalue
12773 nor an integer constant expression. */
12774 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12775 return NULL_TREE;
12776 /* Don't let (0, 0) be null pointer constant. */
12777 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12778 : fold_convert (type, arg1);
12779 return pedantic_non_lvalue (tem);
12781 case COMPLEX_EXPR:
12782 if ((TREE_CODE (arg0) == REAL_CST
12783 && TREE_CODE (arg1) == REAL_CST)
12784 || (TREE_CODE (arg0) == INTEGER_CST
12785 && TREE_CODE (arg1) == INTEGER_CST))
12786 return build_complex (type, arg0, arg1);
12787 return NULL_TREE;
12789 case ASSERT_EXPR:
12790 /* An ASSERT_EXPR should never be passed to fold_binary. */
12791 gcc_unreachable ();
12793 default:
12794 return NULL_TREE;
12795 } /* switch (code) */
12798 /* Callback for walk_tree, looking for LABEL_EXPR.
12799 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12800 Do not check the sub-tree of GOTO_EXPR. */
12802 static tree
12803 contains_label_1 (tree *tp,
12804 int *walk_subtrees,
12805 void *data ATTRIBUTE_UNUSED)
12807 switch (TREE_CODE (*tp))
12809 case LABEL_EXPR:
12810 return *tp;
12811 case GOTO_EXPR:
12812 *walk_subtrees = 0;
12813 /* no break */
12814 default:
12815 return NULL_TREE;
12819 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12820 accessible from outside the sub-tree. Returns NULL_TREE if no
12821 addressable label is found. */
12823 static bool
12824 contains_label_p (tree st)
12826 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12829 /* Fold a ternary expression of code CODE and type TYPE with operands
12830 OP0, OP1, and OP2. Return the folded expression if folding is
12831 successful. Otherwise, return NULL_TREE. */
12833 tree
12834 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12836 tree tem;
12837 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12838 enum tree_code_class kind = TREE_CODE_CLASS (code);
12840 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12841 && TREE_CODE_LENGTH (code) == 3);
12843 /* Strip any conversions that don't change the mode. This is safe
12844 for every expression, except for a comparison expression because
12845 its signedness is derived from its operands. So, in the latter
12846 case, only strip conversions that don't change the signedness.
12848 Note that this is done as an internal manipulation within the
12849 constant folder, in order to find the simplest representation of
12850 the arguments so that their form can be studied. In any cases,
12851 the appropriate type conversions should be put back in the tree
12852 that will get out of the constant folder. */
12853 if (op0)
12855 arg0 = op0;
12856 STRIP_NOPS (arg0);
12859 if (op1)
12861 arg1 = op1;
12862 STRIP_NOPS (arg1);
12865 switch (code)
12867 case COMPONENT_REF:
12868 if (TREE_CODE (arg0) == CONSTRUCTOR
12869 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12871 unsigned HOST_WIDE_INT idx;
12872 tree field, value;
12873 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12874 if (field == arg1)
12875 return value;
12877 return NULL_TREE;
12879 case COND_EXPR:
12880 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12881 so all simple results must be passed through pedantic_non_lvalue. */
12882 if (TREE_CODE (arg0) == INTEGER_CST)
12884 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12885 tem = integer_zerop (arg0) ? op2 : op1;
12886 /* Only optimize constant conditions when the selected branch
12887 has the same type as the COND_EXPR. This avoids optimizing
12888 away "c ? x : throw", where the throw has a void type.
12889 Avoid throwing away that operand which contains label. */
12890 if ((!TREE_SIDE_EFFECTS (unused_op)
12891 || !contains_label_p (unused_op))
12892 && (! VOID_TYPE_P (TREE_TYPE (tem))
12893 || VOID_TYPE_P (type)))
12894 return pedantic_non_lvalue (tem);
12895 return NULL_TREE;
12897 if (operand_equal_p (arg1, op2, 0))
12898 return pedantic_omit_one_operand (type, arg1, arg0);
12900 /* If we have A op B ? A : C, we may be able to convert this to a
12901 simpler expression, depending on the operation and the values
12902 of B and C. Signed zeros prevent all of these transformations,
12903 for reasons given above each one.
12905 Also try swapping the arguments and inverting the conditional. */
12906 if (COMPARISON_CLASS_P (arg0)
12907 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12908 arg1, TREE_OPERAND (arg0, 1))
12909 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12911 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12912 if (tem)
12913 return tem;
12916 if (COMPARISON_CLASS_P (arg0)
12917 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12918 op2,
12919 TREE_OPERAND (arg0, 1))
12920 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12922 tem = fold_truth_not_expr (arg0);
12923 if (tem && COMPARISON_CLASS_P (tem))
12925 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12926 if (tem)
12927 return tem;
12931 /* If the second operand is simpler than the third, swap them
12932 since that produces better jump optimization results. */
12933 if (truth_value_p (TREE_CODE (arg0))
12934 && tree_swap_operands_p (op1, op2, false))
12936 /* See if this can be inverted. If it can't, possibly because
12937 it was a floating-point inequality comparison, don't do
12938 anything. */
12939 tem = fold_truth_not_expr (arg0);
12940 if (tem)
12941 return fold_build3 (code, type, tem, op2, op1);
12944 /* Convert A ? 1 : 0 to simply A. */
12945 if (integer_onep (op1)
12946 && integer_zerop (op2)
12947 /* If we try to convert OP0 to our type, the
12948 call to fold will try to move the conversion inside
12949 a COND, which will recurse. In that case, the COND_EXPR
12950 is probably the best choice, so leave it alone. */
12951 && type == TREE_TYPE (arg0))
12952 return pedantic_non_lvalue (arg0);
12954 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12955 over COND_EXPR in cases such as floating point comparisons. */
12956 if (integer_zerop (op1)
12957 && integer_onep (op2)
12958 && truth_value_p (TREE_CODE (arg0)))
12959 return pedantic_non_lvalue (fold_convert (type,
12960 invert_truthvalue (arg0)));
12962 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12963 if (TREE_CODE (arg0) == LT_EXPR
12964 && integer_zerop (TREE_OPERAND (arg0, 1))
12965 && integer_zerop (op2)
12966 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12968 /* sign_bit_p only checks ARG1 bits within A's precision.
12969 If <sign bit of A> has wider type than A, bits outside
12970 of A's precision in <sign bit of A> need to be checked.
12971 If they are all 0, this optimization needs to be done
12972 in unsigned A's type, if they are all 1 in signed A's type,
12973 otherwise this can't be done. */
12974 if (TYPE_PRECISION (TREE_TYPE (tem))
12975 < TYPE_PRECISION (TREE_TYPE (arg1))
12976 && TYPE_PRECISION (TREE_TYPE (tem))
12977 < TYPE_PRECISION (type))
12979 unsigned HOST_WIDE_INT mask_lo;
12980 HOST_WIDE_INT mask_hi;
12981 int inner_width, outer_width;
12982 tree tem_type;
12984 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12985 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12986 if (outer_width > TYPE_PRECISION (type))
12987 outer_width = TYPE_PRECISION (type);
12989 if (outer_width > HOST_BITS_PER_WIDE_INT)
12991 mask_hi = ((unsigned HOST_WIDE_INT) -1
12992 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12993 mask_lo = -1;
12995 else
12997 mask_hi = 0;
12998 mask_lo = ((unsigned HOST_WIDE_INT) -1
12999 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13001 if (inner_width > HOST_BITS_PER_WIDE_INT)
13003 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13004 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13005 mask_lo = 0;
13007 else
13008 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13009 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13011 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13012 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13014 tem_type = signed_type_for (TREE_TYPE (tem));
13015 tem = fold_convert (tem_type, tem);
13017 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13018 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13020 tem_type = unsigned_type_for (TREE_TYPE (tem));
13021 tem = fold_convert (tem_type, tem);
13023 else
13024 tem = NULL;
13027 if (tem)
13028 return fold_convert (type,
13029 fold_build2 (BIT_AND_EXPR,
13030 TREE_TYPE (tem), tem,
13031 fold_convert (TREE_TYPE (tem),
13032 arg1)));
13035 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13036 already handled above. */
13037 if (TREE_CODE (arg0) == BIT_AND_EXPR
13038 && integer_onep (TREE_OPERAND (arg0, 1))
13039 && integer_zerop (op2)
13040 && integer_pow2p (arg1))
13042 tree tem = TREE_OPERAND (arg0, 0);
13043 STRIP_NOPS (tem);
13044 if (TREE_CODE (tem) == RSHIFT_EXPR
13045 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13046 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13047 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13048 return fold_build2 (BIT_AND_EXPR, type,
13049 TREE_OPERAND (tem, 0), arg1);
13052 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13053 is probably obsolete because the first operand should be a
13054 truth value (that's why we have the two cases above), but let's
13055 leave it in until we can confirm this for all front-ends. */
13056 if (integer_zerop (op2)
13057 && TREE_CODE (arg0) == NE_EXPR
13058 && integer_zerop (TREE_OPERAND (arg0, 1))
13059 && integer_pow2p (arg1)
13060 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13061 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13062 arg1, OEP_ONLY_CONST))
13063 return pedantic_non_lvalue (fold_convert (type,
13064 TREE_OPERAND (arg0, 0)));
13066 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13067 if (integer_zerop (op2)
13068 && truth_value_p (TREE_CODE (arg0))
13069 && truth_value_p (TREE_CODE (arg1)))
13070 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13071 fold_convert (type, arg0),
13072 arg1);
13074 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13075 if (integer_onep (op2)
13076 && truth_value_p (TREE_CODE (arg0))
13077 && truth_value_p (TREE_CODE (arg1)))
13079 /* Only perform transformation if ARG0 is easily inverted. */
13080 tem = fold_truth_not_expr (arg0);
13081 if (tem)
13082 return fold_build2 (TRUTH_ORIF_EXPR, type,
13083 fold_convert (type, tem),
13084 arg1);
13087 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13088 if (integer_zerop (arg1)
13089 && truth_value_p (TREE_CODE (arg0))
13090 && truth_value_p (TREE_CODE (op2)))
13092 /* Only perform transformation if ARG0 is easily inverted. */
13093 tem = fold_truth_not_expr (arg0);
13094 if (tem)
13095 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13096 fold_convert (type, tem),
13097 op2);
13100 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13101 if (integer_onep (arg1)
13102 && truth_value_p (TREE_CODE (arg0))
13103 && truth_value_p (TREE_CODE (op2)))
13104 return fold_build2 (TRUTH_ORIF_EXPR, type,
13105 fold_convert (type, arg0),
13106 op2);
13108 return NULL_TREE;
13110 case CALL_EXPR:
13111 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13112 of fold_ternary on them. */
13113 gcc_unreachable ();
13115 case BIT_FIELD_REF:
13116 if ((TREE_CODE (arg0) == VECTOR_CST
13117 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13118 && type == TREE_TYPE (TREE_TYPE (arg0)))
13120 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13121 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13123 if (width != 0
13124 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13125 && (idx % width) == 0
13126 && (idx = idx / width)
13127 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13129 tree elements = NULL_TREE;
13131 if (TREE_CODE (arg0) == VECTOR_CST)
13132 elements = TREE_VECTOR_CST_ELTS (arg0);
13133 else
13135 unsigned HOST_WIDE_INT idx;
13136 tree value;
13138 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13139 elements = tree_cons (NULL_TREE, value, elements);
13141 while (idx-- > 0 && elements)
13142 elements = TREE_CHAIN (elements);
13143 if (elements)
13144 return TREE_VALUE (elements);
13145 else
13146 return fold_convert (type, integer_zero_node);
13150 /* A bit-field-ref that referenced the full argument can be stripped. */
13151 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13152 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13153 && integer_zerop (op2))
13154 return fold_convert (type, arg0);
13156 return NULL_TREE;
13158 default:
13159 return NULL_TREE;
13160 } /* switch (code) */
13163 /* Perform constant folding and related simplification of EXPR.
13164 The related simplifications include x*1 => x, x*0 => 0, etc.,
13165 and application of the associative law.
13166 NOP_EXPR conversions may be removed freely (as long as we
13167 are careful not to change the type of the overall expression).
13168 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13169 but we can constant-fold them if they have constant operands. */
13171 #ifdef ENABLE_FOLD_CHECKING
13172 # define fold(x) fold_1 (x)
13173 static tree fold_1 (tree);
13174 static
13175 #endif
13176 tree
13177 fold (tree expr)
13179 const tree t = expr;
13180 enum tree_code code = TREE_CODE (t);
13181 enum tree_code_class kind = TREE_CODE_CLASS (code);
13182 tree tem;
13184 /* Return right away if a constant. */
13185 if (kind == tcc_constant)
13186 return t;
13188 /* CALL_EXPR-like objects with variable numbers of operands are
13189 treated specially. */
13190 if (kind == tcc_vl_exp)
13192 if (code == CALL_EXPR)
13194 tem = fold_call_expr (expr, false);
13195 return tem ? tem : expr;
13197 return expr;
13200 if (IS_EXPR_CODE_CLASS (kind))
13202 tree type = TREE_TYPE (t);
13203 tree op0, op1, op2;
13205 switch (TREE_CODE_LENGTH (code))
13207 case 1:
13208 op0 = TREE_OPERAND (t, 0);
13209 tem = fold_unary (code, type, op0);
13210 return tem ? tem : expr;
13211 case 2:
13212 op0 = TREE_OPERAND (t, 0);
13213 op1 = TREE_OPERAND (t, 1);
13214 tem = fold_binary (code, type, op0, op1);
13215 return tem ? tem : expr;
13216 case 3:
13217 op0 = TREE_OPERAND (t, 0);
13218 op1 = TREE_OPERAND (t, 1);
13219 op2 = TREE_OPERAND (t, 2);
13220 tem = fold_ternary (code, type, op0, op1, op2);
13221 return tem ? tem : expr;
13222 default:
13223 break;
13227 switch (code)
13229 case ARRAY_REF:
13231 tree op0 = TREE_OPERAND (t, 0);
13232 tree op1 = TREE_OPERAND (t, 1);
13234 if (TREE_CODE (op1) == INTEGER_CST
13235 && TREE_CODE (op0) == CONSTRUCTOR
13236 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13238 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13239 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13240 unsigned HOST_WIDE_INT begin = 0;
13242 /* Find a matching index by means of a binary search. */
13243 while (begin != end)
13245 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13246 tree index = VEC_index (constructor_elt, elts, middle)->index;
13248 if (TREE_CODE (index) == INTEGER_CST
13249 && tree_int_cst_lt (index, op1))
13250 begin = middle + 1;
13251 else if (TREE_CODE (index) == INTEGER_CST
13252 && tree_int_cst_lt (op1, index))
13253 end = middle;
13254 else if (TREE_CODE (index) == RANGE_EXPR
13255 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13256 begin = middle + 1;
13257 else if (TREE_CODE (index) == RANGE_EXPR
13258 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13259 end = middle;
13260 else
13261 return VEC_index (constructor_elt, elts, middle)->value;
13265 return t;
13268 case CONST_DECL:
13269 return fold (DECL_INITIAL (t));
13271 default:
13272 return t;
13273 } /* switch (code) */
13276 #ifdef ENABLE_FOLD_CHECKING
13277 #undef fold
13279 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13280 static void fold_check_failed (const_tree, const_tree);
13281 void print_fold_checksum (const_tree);
13283 /* When --enable-checking=fold, compute a digest of expr before
13284 and after actual fold call to see if fold did not accidentally
13285 change original expr. */
13287 tree
13288 fold (tree expr)
13290 tree ret;
13291 struct md5_ctx ctx;
13292 unsigned char checksum_before[16], checksum_after[16];
13293 htab_t ht;
13295 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13296 md5_init_ctx (&ctx);
13297 fold_checksum_tree (expr, &ctx, ht);
13298 md5_finish_ctx (&ctx, checksum_before);
13299 htab_empty (ht);
13301 ret = fold_1 (expr);
13303 md5_init_ctx (&ctx);
13304 fold_checksum_tree (expr, &ctx, ht);
13305 md5_finish_ctx (&ctx, checksum_after);
13306 htab_delete (ht);
13308 if (memcmp (checksum_before, checksum_after, 16))
13309 fold_check_failed (expr, ret);
13311 return ret;
13314 void
13315 print_fold_checksum (const_tree expr)
13317 struct md5_ctx ctx;
13318 unsigned char checksum[16], cnt;
13319 htab_t ht;
13321 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13322 md5_init_ctx (&ctx);
13323 fold_checksum_tree (expr, &ctx, ht);
13324 md5_finish_ctx (&ctx, checksum);
13325 htab_delete (ht);
13326 for (cnt = 0; cnt < 16; ++cnt)
13327 fprintf (stderr, "%02x", checksum[cnt]);
13328 putc ('\n', stderr);
13331 static void
13332 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13334 internal_error ("fold check: original tree changed by fold");
13337 static void
13338 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13340 const void **slot;
13341 enum tree_code code;
13342 struct tree_function_decl buf;
13343 int i, len;
13345 recursive_label:
13347 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13348 <= sizeof (struct tree_function_decl))
13349 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13350 if (expr == NULL)
13351 return;
13352 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13353 if (*slot != NULL)
13354 return;
13355 *slot = expr;
13356 code = TREE_CODE (expr);
13357 if (TREE_CODE_CLASS (code) == tcc_declaration
13358 && DECL_ASSEMBLER_NAME_SET_P (expr))
13360 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13361 memcpy ((char *) &buf, expr, tree_size (expr));
13362 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13363 expr = (tree) &buf;
13365 else if (TREE_CODE_CLASS (code) == tcc_type
13366 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13367 || TYPE_CACHED_VALUES_P (expr)
13368 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13370 /* Allow these fields to be modified. */
13371 tree tmp;
13372 memcpy ((char *) &buf, expr, tree_size (expr));
13373 expr = tmp = (tree) &buf;
13374 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13375 TYPE_POINTER_TO (tmp) = NULL;
13376 TYPE_REFERENCE_TO (tmp) = NULL;
13377 if (TYPE_CACHED_VALUES_P (tmp))
13379 TYPE_CACHED_VALUES_P (tmp) = 0;
13380 TYPE_CACHED_VALUES (tmp) = NULL;
13383 md5_process_bytes (expr, tree_size (expr), ctx);
13384 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13385 if (TREE_CODE_CLASS (code) != tcc_type
13386 && TREE_CODE_CLASS (code) != tcc_declaration
13387 && code != TREE_LIST
13388 && code != SSA_NAME)
13389 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13390 switch (TREE_CODE_CLASS (code))
13392 case tcc_constant:
13393 switch (code)
13395 case STRING_CST:
13396 md5_process_bytes (TREE_STRING_POINTER (expr),
13397 TREE_STRING_LENGTH (expr), ctx);
13398 break;
13399 case COMPLEX_CST:
13400 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13401 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13402 break;
13403 case VECTOR_CST:
13404 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13405 break;
13406 default:
13407 break;
13409 break;
13410 case tcc_exceptional:
13411 switch (code)
13413 case TREE_LIST:
13414 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13415 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13416 expr = TREE_CHAIN (expr);
13417 goto recursive_label;
13418 break;
13419 case TREE_VEC:
13420 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13421 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13422 break;
13423 default:
13424 break;
13426 break;
13427 case tcc_expression:
13428 case tcc_reference:
13429 case tcc_comparison:
13430 case tcc_unary:
13431 case tcc_binary:
13432 case tcc_statement:
13433 case tcc_vl_exp:
13434 len = TREE_OPERAND_LENGTH (expr);
13435 for (i = 0; i < len; ++i)
13436 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13437 break;
13438 case tcc_declaration:
13439 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13440 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13441 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13443 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13444 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13445 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13446 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13447 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13449 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13450 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13452 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13454 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13455 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13456 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13458 break;
13459 case tcc_type:
13460 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13461 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13462 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13463 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13464 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13465 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13466 if (INTEGRAL_TYPE_P (expr)
13467 || SCALAR_FLOAT_TYPE_P (expr))
13469 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13470 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13472 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13473 if (TREE_CODE (expr) == RECORD_TYPE
13474 || TREE_CODE (expr) == UNION_TYPE
13475 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13476 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13477 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13478 break;
13479 default:
13480 break;
13484 /* Helper function for outputting the checksum of a tree T. When
13485 debugging with gdb, you can "define mynext" to be "next" followed
13486 by "call debug_fold_checksum (op0)", then just trace down till the
13487 outputs differ. */
13489 void
13490 debug_fold_checksum (const_tree t)
13492 int i;
13493 unsigned char checksum[16];
13494 struct md5_ctx ctx;
13495 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13497 md5_init_ctx (&ctx);
13498 fold_checksum_tree (t, &ctx, ht);
13499 md5_finish_ctx (&ctx, checksum);
13500 htab_empty (ht);
13502 for (i = 0; i < 16; i++)
13503 fprintf (stderr, "%d ", checksum[i]);
13505 fprintf (stderr, "\n");
13508 #endif
13510 /* Fold a unary tree expression with code CODE of type TYPE with an
13511 operand OP0. Return a folded expression if successful. Otherwise,
13512 return a tree expression with code CODE of type TYPE with an
13513 operand OP0. */
13515 tree
13516 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13518 tree tem;
13519 #ifdef ENABLE_FOLD_CHECKING
13520 unsigned char checksum_before[16], checksum_after[16];
13521 struct md5_ctx ctx;
13522 htab_t ht;
13524 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13525 md5_init_ctx (&ctx);
13526 fold_checksum_tree (op0, &ctx, ht);
13527 md5_finish_ctx (&ctx, checksum_before);
13528 htab_empty (ht);
13529 #endif
13531 tem = fold_unary (code, type, op0);
13532 if (!tem)
13533 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13535 #ifdef ENABLE_FOLD_CHECKING
13536 md5_init_ctx (&ctx);
13537 fold_checksum_tree (op0, &ctx, ht);
13538 md5_finish_ctx (&ctx, checksum_after);
13539 htab_delete (ht);
13541 if (memcmp (checksum_before, checksum_after, 16))
13542 fold_check_failed (op0, tem);
13543 #endif
13544 return tem;
13547 /* Fold a binary tree expression with code CODE of type TYPE with
13548 operands OP0 and OP1. Return a folded expression if successful.
13549 Otherwise, return a tree expression with code CODE of type TYPE
13550 with operands OP0 and OP1. */
13552 tree
13553 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13554 MEM_STAT_DECL)
13556 tree tem;
13557 #ifdef ENABLE_FOLD_CHECKING
13558 unsigned char checksum_before_op0[16],
13559 checksum_before_op1[16],
13560 checksum_after_op0[16],
13561 checksum_after_op1[16];
13562 struct md5_ctx ctx;
13563 htab_t ht;
13565 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13566 md5_init_ctx (&ctx);
13567 fold_checksum_tree (op0, &ctx, ht);
13568 md5_finish_ctx (&ctx, checksum_before_op0);
13569 htab_empty (ht);
13571 md5_init_ctx (&ctx);
13572 fold_checksum_tree (op1, &ctx, ht);
13573 md5_finish_ctx (&ctx, checksum_before_op1);
13574 htab_empty (ht);
13575 #endif
13577 tem = fold_binary (code, type, op0, op1);
13578 if (!tem)
13579 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13581 #ifdef ENABLE_FOLD_CHECKING
13582 md5_init_ctx (&ctx);
13583 fold_checksum_tree (op0, &ctx, ht);
13584 md5_finish_ctx (&ctx, checksum_after_op0);
13585 htab_empty (ht);
13587 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13588 fold_check_failed (op0, tem);
13590 md5_init_ctx (&ctx);
13591 fold_checksum_tree (op1, &ctx, ht);
13592 md5_finish_ctx (&ctx, checksum_after_op1);
13593 htab_delete (ht);
13595 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13596 fold_check_failed (op1, tem);
13597 #endif
13598 return tem;
13601 /* Fold a ternary tree expression with code CODE of type TYPE with
13602 operands OP0, OP1, and OP2. Return a folded expression if
13603 successful. Otherwise, return a tree expression with code CODE of
13604 type TYPE with operands OP0, OP1, and OP2. */
13606 tree
13607 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13608 MEM_STAT_DECL)
13610 tree tem;
13611 #ifdef ENABLE_FOLD_CHECKING
13612 unsigned char checksum_before_op0[16],
13613 checksum_before_op1[16],
13614 checksum_before_op2[16],
13615 checksum_after_op0[16],
13616 checksum_after_op1[16],
13617 checksum_after_op2[16];
13618 struct md5_ctx ctx;
13619 htab_t ht;
13621 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13622 md5_init_ctx (&ctx);
13623 fold_checksum_tree (op0, &ctx, ht);
13624 md5_finish_ctx (&ctx, checksum_before_op0);
13625 htab_empty (ht);
13627 md5_init_ctx (&ctx);
13628 fold_checksum_tree (op1, &ctx, ht);
13629 md5_finish_ctx (&ctx, checksum_before_op1);
13630 htab_empty (ht);
13632 md5_init_ctx (&ctx);
13633 fold_checksum_tree (op2, &ctx, ht);
13634 md5_finish_ctx (&ctx, checksum_before_op2);
13635 htab_empty (ht);
13636 #endif
13638 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13639 tem = fold_ternary (code, type, op0, op1, op2);
13640 if (!tem)
13641 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13643 #ifdef ENABLE_FOLD_CHECKING
13644 md5_init_ctx (&ctx);
13645 fold_checksum_tree (op0, &ctx, ht);
13646 md5_finish_ctx (&ctx, checksum_after_op0);
13647 htab_empty (ht);
13649 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13650 fold_check_failed (op0, tem);
13652 md5_init_ctx (&ctx);
13653 fold_checksum_tree (op1, &ctx, ht);
13654 md5_finish_ctx (&ctx, checksum_after_op1);
13655 htab_empty (ht);
13657 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13658 fold_check_failed (op1, tem);
13660 md5_init_ctx (&ctx);
13661 fold_checksum_tree (op2, &ctx, ht);
13662 md5_finish_ctx (&ctx, checksum_after_op2);
13663 htab_delete (ht);
13665 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13666 fold_check_failed (op2, tem);
13667 #endif
13668 return tem;
13671 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13672 arguments in ARGARRAY, and a null static chain.
13673 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13674 of type TYPE from the given operands as constructed by build_call_array. */
13676 tree
13677 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13679 tree tem;
13680 #ifdef ENABLE_FOLD_CHECKING
13681 unsigned char checksum_before_fn[16],
13682 checksum_before_arglist[16],
13683 checksum_after_fn[16],
13684 checksum_after_arglist[16];
13685 struct md5_ctx ctx;
13686 htab_t ht;
13687 int i;
13689 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13690 md5_init_ctx (&ctx);
13691 fold_checksum_tree (fn, &ctx, ht);
13692 md5_finish_ctx (&ctx, checksum_before_fn);
13693 htab_empty (ht);
13695 md5_init_ctx (&ctx);
13696 for (i = 0; i < nargs; i++)
13697 fold_checksum_tree (argarray[i], &ctx, ht);
13698 md5_finish_ctx (&ctx, checksum_before_arglist);
13699 htab_empty (ht);
13700 #endif
13702 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13704 #ifdef ENABLE_FOLD_CHECKING
13705 md5_init_ctx (&ctx);
13706 fold_checksum_tree (fn, &ctx, ht);
13707 md5_finish_ctx (&ctx, checksum_after_fn);
13708 htab_empty (ht);
13710 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13711 fold_check_failed (fn, tem);
13713 md5_init_ctx (&ctx);
13714 for (i = 0; i < nargs; i++)
13715 fold_checksum_tree (argarray[i], &ctx, ht);
13716 md5_finish_ctx (&ctx, checksum_after_arglist);
13717 htab_delete (ht);
13719 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13720 fold_check_failed (NULL_TREE, tem);
13721 #endif
13722 return tem;
13725 /* Perform constant folding and related simplification of initializer
13726 expression EXPR. These behave identically to "fold_buildN" but ignore
13727 potential run-time traps and exceptions that fold must preserve. */
13729 #define START_FOLD_INIT \
13730 int saved_signaling_nans = flag_signaling_nans;\
13731 int saved_trapping_math = flag_trapping_math;\
13732 int saved_rounding_math = flag_rounding_math;\
13733 int saved_trapv = flag_trapv;\
13734 int saved_folding_initializer = folding_initializer;\
13735 flag_signaling_nans = 0;\
13736 flag_trapping_math = 0;\
13737 flag_rounding_math = 0;\
13738 flag_trapv = 0;\
13739 folding_initializer = 1;
13741 #define END_FOLD_INIT \
13742 flag_signaling_nans = saved_signaling_nans;\
13743 flag_trapping_math = saved_trapping_math;\
13744 flag_rounding_math = saved_rounding_math;\
13745 flag_trapv = saved_trapv;\
13746 folding_initializer = saved_folding_initializer;
13748 tree
13749 fold_build1_initializer (enum tree_code code, tree type, tree op)
13751 tree result;
13752 START_FOLD_INIT;
13754 result = fold_build1 (code, type, op);
13756 END_FOLD_INIT;
13757 return result;
13760 tree
13761 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13763 tree result;
13764 START_FOLD_INIT;
13766 result = fold_build2 (code, type, op0, op1);
13768 END_FOLD_INIT;
13769 return result;
13772 tree
13773 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13774 tree op2)
13776 tree result;
13777 START_FOLD_INIT;
13779 result = fold_build3 (code, type, op0, op1, op2);
13781 END_FOLD_INIT;
13782 return result;
13785 tree
13786 fold_build_call_array_initializer (tree type, tree fn,
13787 int nargs, tree *argarray)
13789 tree result;
13790 START_FOLD_INIT;
13792 result = fold_build_call_array (type, fn, nargs, argarray);
13794 END_FOLD_INIT;
13795 return result;
13798 #undef START_FOLD_INIT
13799 #undef END_FOLD_INIT
13801 /* Determine if first argument is a multiple of second argument. Return 0 if
13802 it is not, or we cannot easily determined it to be.
13804 An example of the sort of thing we care about (at this point; this routine
13805 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13806 fold cases do now) is discovering that
13808 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13810 is a multiple of
13812 SAVE_EXPR (J * 8)
13814 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13816 This code also handles discovering that
13818 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13820 is a multiple of 8 so we don't have to worry about dealing with a
13821 possible remainder.
13823 Note that we *look* inside a SAVE_EXPR only to determine how it was
13824 calculated; it is not safe for fold to do much of anything else with the
13825 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13826 at run time. For example, the latter example above *cannot* be implemented
13827 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13828 evaluation time of the original SAVE_EXPR is not necessarily the same at
13829 the time the new expression is evaluated. The only optimization of this
13830 sort that would be valid is changing
13832 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13834 divided by 8 to
13836 SAVE_EXPR (I) * SAVE_EXPR (J)
13838 (where the same SAVE_EXPR (J) is used in the original and the
13839 transformed version). */
13842 multiple_of_p (tree type, const_tree top, const_tree bottom)
13844 if (operand_equal_p (top, bottom, 0))
13845 return 1;
13847 if (TREE_CODE (type) != INTEGER_TYPE)
13848 return 0;
13850 switch (TREE_CODE (top))
13852 case BIT_AND_EXPR:
13853 /* Bitwise and provides a power of two multiple. If the mask is
13854 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13855 if (!integer_pow2p (bottom))
13856 return 0;
13857 /* FALLTHRU */
13859 case MULT_EXPR:
13860 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13861 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13863 case PLUS_EXPR:
13864 case MINUS_EXPR:
13865 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13866 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13868 case LSHIFT_EXPR:
13869 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13871 tree op1, t1;
13873 op1 = TREE_OPERAND (top, 1);
13874 /* const_binop may not detect overflow correctly,
13875 so check for it explicitly here. */
13876 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13877 > TREE_INT_CST_LOW (op1)
13878 && TREE_INT_CST_HIGH (op1) == 0
13879 && 0 != (t1 = fold_convert (type,
13880 const_binop (LSHIFT_EXPR,
13881 size_one_node,
13882 op1, 0)))
13883 && !TREE_OVERFLOW (t1))
13884 return multiple_of_p (type, t1, bottom);
13886 return 0;
13888 case NOP_EXPR:
13889 /* Can't handle conversions from non-integral or wider integral type. */
13890 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13891 || (TYPE_PRECISION (type)
13892 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13893 return 0;
13895 /* .. fall through ... */
13897 case SAVE_EXPR:
13898 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13900 case INTEGER_CST:
13901 if (TREE_CODE (bottom) != INTEGER_CST
13902 || integer_zerop (bottom)
13903 || (TYPE_UNSIGNED (type)
13904 && (tree_int_cst_sgn (top) < 0
13905 || tree_int_cst_sgn (bottom) < 0)))
13906 return 0;
13907 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13908 top, bottom, 0));
13910 default:
13911 return 0;
13915 /* Return true if CODE or TYPE is known to be non-negative. */
13917 static bool
13918 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13920 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13921 && truth_value_p (code))
13922 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13923 have a signed:1 type (where the value is -1 and 0). */
13924 return true;
13925 return false;
13928 /* Return true if (CODE OP0) is known to be non-negative. If the return
13929 value is based on the assumption that signed overflow is undefined,
13930 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13931 *STRICT_OVERFLOW_P. */
13933 bool
13934 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13935 bool *strict_overflow_p)
13937 if (TYPE_UNSIGNED (type))
13938 return true;
13940 switch (code)
13942 case ABS_EXPR:
13943 /* We can't return 1 if flag_wrapv is set because
13944 ABS_EXPR<INT_MIN> = INT_MIN. */
13945 if (!INTEGRAL_TYPE_P (type))
13946 return true;
13947 if (TYPE_OVERFLOW_UNDEFINED (type))
13949 *strict_overflow_p = true;
13950 return true;
13952 break;
13954 case NON_LVALUE_EXPR:
13955 case FLOAT_EXPR:
13956 case FIX_TRUNC_EXPR:
13957 return tree_expr_nonnegative_warnv_p (op0,
13958 strict_overflow_p);
13960 case NOP_EXPR:
13962 tree inner_type = TREE_TYPE (op0);
13963 tree outer_type = type;
13965 if (TREE_CODE (outer_type) == REAL_TYPE)
13967 if (TREE_CODE (inner_type) == REAL_TYPE)
13968 return tree_expr_nonnegative_warnv_p (op0,
13969 strict_overflow_p);
13970 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13972 if (TYPE_UNSIGNED (inner_type))
13973 return true;
13974 return tree_expr_nonnegative_warnv_p (op0,
13975 strict_overflow_p);
13978 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13980 if (TREE_CODE (inner_type) == REAL_TYPE)
13981 return tree_expr_nonnegative_warnv_p (op0,
13982 strict_overflow_p);
13983 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13984 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13985 && TYPE_UNSIGNED (inner_type);
13988 break;
13990 default:
13991 return tree_simple_nonnegative_warnv_p (code, type);
13994 /* We don't know sign of `t', so be conservative and return false. */
13995 return false;
13998 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13999 value is based on the assumption that signed overflow is undefined,
14000 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14001 *STRICT_OVERFLOW_P. */
14003 bool
14004 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14005 tree op1, bool *strict_overflow_p)
14007 if (TYPE_UNSIGNED (type))
14008 return true;
14010 switch (code)
14012 case POINTER_PLUS_EXPR:
14013 case PLUS_EXPR:
14014 if (FLOAT_TYPE_P (type))
14015 return (tree_expr_nonnegative_warnv_p (op0,
14016 strict_overflow_p)
14017 && tree_expr_nonnegative_warnv_p (op1,
14018 strict_overflow_p));
14020 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14021 both unsigned and at least 2 bits shorter than the result. */
14022 if (TREE_CODE (type) == INTEGER_TYPE
14023 && TREE_CODE (op0) == NOP_EXPR
14024 && TREE_CODE (op1) == NOP_EXPR)
14026 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14027 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14028 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14029 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14031 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14032 TYPE_PRECISION (inner2)) + 1;
14033 return prec < TYPE_PRECISION (type);
14036 break;
14038 case MULT_EXPR:
14039 if (FLOAT_TYPE_P (type))
14041 /* x * x for floating point x is always non-negative. */
14042 if (operand_equal_p (op0, op1, 0))
14043 return true;
14044 return (tree_expr_nonnegative_warnv_p (op0,
14045 strict_overflow_p)
14046 && tree_expr_nonnegative_warnv_p (op1,
14047 strict_overflow_p));
14050 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14051 both unsigned and their total bits is shorter than the result. */
14052 if (TREE_CODE (type) == INTEGER_TYPE
14053 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14054 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14056 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14057 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14058 : TREE_TYPE (op0);
14059 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14060 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14061 : TREE_TYPE (op1);
14063 bool unsigned0 = TYPE_UNSIGNED (inner0);
14064 bool unsigned1 = TYPE_UNSIGNED (inner1);
14066 if (TREE_CODE (op0) == INTEGER_CST)
14067 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14069 if (TREE_CODE (op1) == INTEGER_CST)
14070 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14072 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14073 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14075 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14076 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14077 : TYPE_PRECISION (inner0);
14079 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14080 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14081 : TYPE_PRECISION (inner1);
14083 return precision0 + precision1 < TYPE_PRECISION (type);
14086 return false;
14088 case BIT_AND_EXPR:
14089 case MAX_EXPR:
14090 return (tree_expr_nonnegative_warnv_p (op0,
14091 strict_overflow_p)
14092 || tree_expr_nonnegative_warnv_p (op1,
14093 strict_overflow_p));
14095 case BIT_IOR_EXPR:
14096 case BIT_XOR_EXPR:
14097 case MIN_EXPR:
14098 case RDIV_EXPR:
14099 case TRUNC_DIV_EXPR:
14100 case CEIL_DIV_EXPR:
14101 case FLOOR_DIV_EXPR:
14102 case ROUND_DIV_EXPR:
14103 return (tree_expr_nonnegative_warnv_p (op0,
14104 strict_overflow_p)
14105 && tree_expr_nonnegative_warnv_p (op1,
14106 strict_overflow_p));
14108 case TRUNC_MOD_EXPR:
14109 case CEIL_MOD_EXPR:
14110 case FLOOR_MOD_EXPR:
14111 case ROUND_MOD_EXPR:
14112 return tree_expr_nonnegative_warnv_p (op0,
14113 strict_overflow_p);
14114 default:
14115 return tree_simple_nonnegative_warnv_p (code, type);
14118 /* We don't know sign of `t', so be conservative and return false. */
14119 return false;
14122 /* Return true if T is known to be non-negative. If the return
14123 value is based on the assumption that signed overflow is undefined,
14124 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14125 *STRICT_OVERFLOW_P. */
14127 bool
14128 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14130 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14131 return true;
14133 switch (TREE_CODE (t))
14135 case INTEGER_CST:
14136 return tree_int_cst_sgn (t) >= 0;
14138 case REAL_CST:
14139 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14141 case FIXED_CST:
14142 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14144 case COND_EXPR:
14145 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14146 strict_overflow_p)
14147 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14148 strict_overflow_p));
14149 default:
14150 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14151 TREE_TYPE (t));
14153 /* We don't know sign of `t', so be conservative and return false. */
14154 return false;
14157 /* Return true if T is known to be non-negative. If the return
14158 value is based on the assumption that signed overflow is undefined,
14159 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14160 *STRICT_OVERFLOW_P. */
14162 bool
14163 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14164 tree arg0, tree arg1, bool *strict_overflow_p)
14166 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14167 switch (DECL_FUNCTION_CODE (fndecl))
14169 CASE_FLT_FN (BUILT_IN_ACOS):
14170 CASE_FLT_FN (BUILT_IN_ACOSH):
14171 CASE_FLT_FN (BUILT_IN_CABS):
14172 CASE_FLT_FN (BUILT_IN_COSH):
14173 CASE_FLT_FN (BUILT_IN_ERFC):
14174 CASE_FLT_FN (BUILT_IN_EXP):
14175 CASE_FLT_FN (BUILT_IN_EXP10):
14176 CASE_FLT_FN (BUILT_IN_EXP2):
14177 CASE_FLT_FN (BUILT_IN_FABS):
14178 CASE_FLT_FN (BUILT_IN_FDIM):
14179 CASE_FLT_FN (BUILT_IN_HYPOT):
14180 CASE_FLT_FN (BUILT_IN_POW10):
14181 CASE_INT_FN (BUILT_IN_FFS):
14182 CASE_INT_FN (BUILT_IN_PARITY):
14183 CASE_INT_FN (BUILT_IN_POPCOUNT):
14184 case BUILT_IN_BSWAP32:
14185 case BUILT_IN_BSWAP64:
14186 /* Always true. */
14187 return true;
14189 CASE_FLT_FN (BUILT_IN_SQRT):
14190 /* sqrt(-0.0) is -0.0. */
14191 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14192 return true;
14193 return tree_expr_nonnegative_warnv_p (arg0,
14194 strict_overflow_p);
14196 CASE_FLT_FN (BUILT_IN_ASINH):
14197 CASE_FLT_FN (BUILT_IN_ATAN):
14198 CASE_FLT_FN (BUILT_IN_ATANH):
14199 CASE_FLT_FN (BUILT_IN_CBRT):
14200 CASE_FLT_FN (BUILT_IN_CEIL):
14201 CASE_FLT_FN (BUILT_IN_ERF):
14202 CASE_FLT_FN (BUILT_IN_EXPM1):
14203 CASE_FLT_FN (BUILT_IN_FLOOR):
14204 CASE_FLT_FN (BUILT_IN_FMOD):
14205 CASE_FLT_FN (BUILT_IN_FREXP):
14206 CASE_FLT_FN (BUILT_IN_LCEIL):
14207 CASE_FLT_FN (BUILT_IN_LDEXP):
14208 CASE_FLT_FN (BUILT_IN_LFLOOR):
14209 CASE_FLT_FN (BUILT_IN_LLCEIL):
14210 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14211 CASE_FLT_FN (BUILT_IN_LLRINT):
14212 CASE_FLT_FN (BUILT_IN_LLROUND):
14213 CASE_FLT_FN (BUILT_IN_LRINT):
14214 CASE_FLT_FN (BUILT_IN_LROUND):
14215 CASE_FLT_FN (BUILT_IN_MODF):
14216 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14217 CASE_FLT_FN (BUILT_IN_RINT):
14218 CASE_FLT_FN (BUILT_IN_ROUND):
14219 CASE_FLT_FN (BUILT_IN_SCALB):
14220 CASE_FLT_FN (BUILT_IN_SCALBLN):
14221 CASE_FLT_FN (BUILT_IN_SCALBN):
14222 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14223 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14224 CASE_FLT_FN (BUILT_IN_SINH):
14225 CASE_FLT_FN (BUILT_IN_TANH):
14226 CASE_FLT_FN (BUILT_IN_TRUNC):
14227 /* True if the 1st argument is nonnegative. */
14228 return tree_expr_nonnegative_warnv_p (arg0,
14229 strict_overflow_p);
14231 CASE_FLT_FN (BUILT_IN_FMAX):
14232 /* True if the 1st OR 2nd arguments are nonnegative. */
14233 return (tree_expr_nonnegative_warnv_p (arg0,
14234 strict_overflow_p)
14235 || (tree_expr_nonnegative_warnv_p (arg1,
14236 strict_overflow_p)));
14238 CASE_FLT_FN (BUILT_IN_FMIN):
14239 /* True if the 1st AND 2nd arguments are nonnegative. */
14240 return (tree_expr_nonnegative_warnv_p (arg0,
14241 strict_overflow_p)
14242 && (tree_expr_nonnegative_warnv_p (arg1,
14243 strict_overflow_p)));
14245 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14246 /* True if the 2nd argument is nonnegative. */
14247 return tree_expr_nonnegative_warnv_p (arg1,
14248 strict_overflow_p);
14250 CASE_FLT_FN (BUILT_IN_POWI):
14251 /* True if the 1st argument is nonnegative or the second
14252 argument is an even integer. */
14253 if (TREE_CODE (arg1) == INTEGER_CST
14254 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14255 return true;
14256 return tree_expr_nonnegative_warnv_p (arg0,
14257 strict_overflow_p);
14259 CASE_FLT_FN (BUILT_IN_POW):
14260 /* True if the 1st argument is nonnegative or the second
14261 argument is an even integer valued real. */
14262 if (TREE_CODE (arg1) == REAL_CST)
14264 REAL_VALUE_TYPE c;
14265 HOST_WIDE_INT n;
14267 c = TREE_REAL_CST (arg1);
14268 n = real_to_integer (&c);
14269 if ((n & 1) == 0)
14271 REAL_VALUE_TYPE cint;
14272 real_from_integer (&cint, VOIDmode, n,
14273 n < 0 ? -1 : 0, 0);
14274 if (real_identical (&c, &cint))
14275 return true;
14278 return tree_expr_nonnegative_warnv_p (arg0,
14279 strict_overflow_p);
14281 default:
14282 break;
14284 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14285 type);
14288 /* Return true if T is known to be non-negative. If the return
14289 value is based on the assumption that signed overflow is undefined,
14290 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14291 *STRICT_OVERFLOW_P. */
14293 bool
14294 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14296 enum tree_code code = TREE_CODE (t);
14297 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14298 return true;
14300 switch (code)
14302 case TARGET_EXPR:
14304 tree temp = TARGET_EXPR_SLOT (t);
14305 t = TARGET_EXPR_INITIAL (t);
14307 /* If the initializer is non-void, then it's a normal expression
14308 that will be assigned to the slot. */
14309 if (!VOID_TYPE_P (t))
14310 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14312 /* Otherwise, the initializer sets the slot in some way. One common
14313 way is an assignment statement at the end of the initializer. */
14314 while (1)
14316 if (TREE_CODE (t) == BIND_EXPR)
14317 t = expr_last (BIND_EXPR_BODY (t));
14318 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14319 || TREE_CODE (t) == TRY_CATCH_EXPR)
14320 t = expr_last (TREE_OPERAND (t, 0));
14321 else if (TREE_CODE (t) == STATEMENT_LIST)
14322 t = expr_last (t);
14323 else
14324 break;
14326 if (TREE_CODE (t) == MODIFY_EXPR
14327 && TREE_OPERAND (t, 0) == temp)
14328 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14329 strict_overflow_p);
14331 return false;
14334 case CALL_EXPR:
14336 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14337 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14339 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14340 get_callee_fndecl (t),
14341 arg0,
14342 arg1,
14343 strict_overflow_p);
14345 case COMPOUND_EXPR:
14346 case MODIFY_EXPR:
14347 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14348 strict_overflow_p);
14349 case BIND_EXPR:
14350 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14351 strict_overflow_p);
14352 case SAVE_EXPR:
14353 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14354 strict_overflow_p);
14356 default:
14357 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14358 TREE_TYPE (t));
14361 /* We don't know sign of `t', so be conservative and return false. */
14362 return false;
14365 /* Return true if T is known to be non-negative. If the return
14366 value is based on the assumption that signed overflow is undefined,
14367 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14368 *STRICT_OVERFLOW_P. */
14370 bool
14371 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14373 enum tree_code code;
14374 if (t == error_mark_node)
14375 return false;
14377 code = TREE_CODE (t);
14378 switch (TREE_CODE_CLASS (code))
14380 case tcc_binary:
14381 case tcc_comparison:
14382 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14383 TREE_TYPE (t),
14384 TREE_OPERAND (t, 0),
14385 TREE_OPERAND (t, 1),
14386 strict_overflow_p);
14388 case tcc_unary:
14389 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14390 TREE_TYPE (t),
14391 TREE_OPERAND (t, 0),
14392 strict_overflow_p);
14394 case tcc_constant:
14395 case tcc_declaration:
14396 case tcc_reference:
14397 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14399 default:
14400 break;
14403 switch (code)
14405 case TRUTH_AND_EXPR:
14406 case TRUTH_OR_EXPR:
14407 case TRUTH_XOR_EXPR:
14408 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14409 TREE_TYPE (t),
14410 TREE_OPERAND (t, 0),
14411 TREE_OPERAND (t, 1),
14412 strict_overflow_p);
14413 case TRUTH_NOT_EXPR:
14414 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14415 TREE_TYPE (t),
14416 TREE_OPERAND (t, 0),
14417 strict_overflow_p);
14419 case COND_EXPR:
14420 case CONSTRUCTOR:
14421 case OBJ_TYPE_REF:
14422 case ASSERT_EXPR:
14423 case ADDR_EXPR:
14424 case WITH_SIZE_EXPR:
14425 case EXC_PTR_EXPR:
14426 case SSA_NAME:
14427 case FILTER_EXPR:
14428 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14430 default:
14431 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14435 /* Return true if `t' is known to be non-negative. Handle warnings
14436 about undefined signed overflow. */
14438 bool
14439 tree_expr_nonnegative_p (tree t)
14441 bool ret, strict_overflow_p;
14443 strict_overflow_p = false;
14444 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14445 if (strict_overflow_p)
14446 fold_overflow_warning (("assuming signed overflow does not occur when "
14447 "determining that expression is always "
14448 "non-negative"),
14449 WARN_STRICT_OVERFLOW_MISC);
14450 return ret;
14454 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14455 For floating point we further ensure that T is not denormal.
14456 Similar logic is present in nonzero_address in rtlanal.h.
14458 If the return value is based on the assumption that signed overflow
14459 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14460 change *STRICT_OVERFLOW_P. */
14462 bool
14463 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14464 bool *strict_overflow_p)
14466 switch (code)
14468 case ABS_EXPR:
14469 return tree_expr_nonzero_warnv_p (op0,
14470 strict_overflow_p);
14472 case NOP_EXPR:
14474 tree inner_type = TREE_TYPE (op0);
14475 tree outer_type = type;
14477 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14478 && tree_expr_nonzero_warnv_p (op0,
14479 strict_overflow_p));
14481 break;
14483 case NON_LVALUE_EXPR:
14484 return tree_expr_nonzero_warnv_p (op0,
14485 strict_overflow_p);
14487 default:
14488 break;
14491 return false;
14494 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14495 For floating point we further ensure that T is not denormal.
14496 Similar logic is present in nonzero_address in rtlanal.h.
14498 If the return value is based on the assumption that signed overflow
14499 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14500 change *STRICT_OVERFLOW_P. */
14502 bool
14503 tree_binary_nonzero_warnv_p (enum tree_code code,
14504 tree type,
14505 tree op0,
14506 tree op1, bool *strict_overflow_p)
14508 bool sub_strict_overflow_p;
14509 switch (code)
14511 case POINTER_PLUS_EXPR:
14512 case PLUS_EXPR:
14513 if (TYPE_OVERFLOW_UNDEFINED (type))
14515 /* With the presence of negative values it is hard
14516 to say something. */
14517 sub_strict_overflow_p = false;
14518 if (!tree_expr_nonnegative_warnv_p (op0,
14519 &sub_strict_overflow_p)
14520 || !tree_expr_nonnegative_warnv_p (op1,
14521 &sub_strict_overflow_p))
14522 return false;
14523 /* One of operands must be positive and the other non-negative. */
14524 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14525 overflows, on a twos-complement machine the sum of two
14526 nonnegative numbers can never be zero. */
14527 return (tree_expr_nonzero_warnv_p (op0,
14528 strict_overflow_p)
14529 || tree_expr_nonzero_warnv_p (op1,
14530 strict_overflow_p));
14532 break;
14534 case MULT_EXPR:
14535 if (TYPE_OVERFLOW_UNDEFINED (type))
14537 if (tree_expr_nonzero_warnv_p (op0,
14538 strict_overflow_p)
14539 && tree_expr_nonzero_warnv_p (op1,
14540 strict_overflow_p))
14542 *strict_overflow_p = true;
14543 return true;
14546 break;
14548 case MIN_EXPR:
14549 sub_strict_overflow_p = false;
14550 if (tree_expr_nonzero_warnv_p (op0,
14551 &sub_strict_overflow_p)
14552 && tree_expr_nonzero_warnv_p (op1,
14553 &sub_strict_overflow_p))
14555 if (sub_strict_overflow_p)
14556 *strict_overflow_p = true;
14558 break;
14560 case MAX_EXPR:
14561 sub_strict_overflow_p = false;
14562 if (tree_expr_nonzero_warnv_p (op0,
14563 &sub_strict_overflow_p))
14565 if (sub_strict_overflow_p)
14566 *strict_overflow_p = true;
14568 /* When both operands are nonzero, then MAX must be too. */
14569 if (tree_expr_nonzero_warnv_p (op1,
14570 strict_overflow_p))
14571 return true;
14573 /* MAX where operand 0 is positive is positive. */
14574 return tree_expr_nonnegative_warnv_p (op0,
14575 strict_overflow_p);
14577 /* MAX where operand 1 is positive is positive. */
14578 else if (tree_expr_nonzero_warnv_p (op1,
14579 &sub_strict_overflow_p)
14580 && tree_expr_nonnegative_warnv_p (op1,
14581 &sub_strict_overflow_p))
14583 if (sub_strict_overflow_p)
14584 *strict_overflow_p = true;
14585 return true;
14587 break;
14589 case BIT_IOR_EXPR:
14590 return (tree_expr_nonzero_warnv_p (op1,
14591 strict_overflow_p)
14592 || tree_expr_nonzero_warnv_p (op0,
14593 strict_overflow_p));
14595 default:
14596 break;
14599 return false;
14602 /* Return true when T is an address and is known to be nonzero.
14603 For floating point we further ensure that T is not denormal.
14604 Similar logic is present in nonzero_address in rtlanal.h.
14606 If the return value is based on the assumption that signed overflow
14607 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14608 change *STRICT_OVERFLOW_P. */
14610 bool
14611 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14613 bool sub_strict_overflow_p;
14614 switch (TREE_CODE (t))
14616 case INTEGER_CST:
14617 return !integer_zerop (t);
14619 case ADDR_EXPR:
14621 tree base = get_base_address (TREE_OPERAND (t, 0));
14623 if (!base)
14624 return false;
14626 /* Weak declarations may link to NULL. */
14627 if (VAR_OR_FUNCTION_DECL_P (base))
14628 return !DECL_WEAK (base);
14630 /* Constants are never weak. */
14631 if (CONSTANT_CLASS_P (base))
14632 return true;
14634 return false;
14637 case COND_EXPR:
14638 sub_strict_overflow_p = false;
14639 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14640 &sub_strict_overflow_p)
14641 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14642 &sub_strict_overflow_p))
14644 if (sub_strict_overflow_p)
14645 *strict_overflow_p = true;
14646 return true;
14648 break;
14650 default:
14651 break;
14653 return false;
14656 /* Return true when T is an address and is known to be nonzero.
14657 For floating point we further ensure that T is not denormal.
14658 Similar logic is present in nonzero_address in rtlanal.h.
14660 If the return value is based on the assumption that signed overflow
14661 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14662 change *STRICT_OVERFLOW_P. */
14664 bool
14665 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14667 tree type = TREE_TYPE (t);
14668 enum tree_code code;
14670 /* Doing something useful for floating point would need more work. */
14671 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14672 return false;
14674 code = TREE_CODE (t);
14675 switch (TREE_CODE_CLASS (code))
14677 case tcc_unary:
14678 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14679 strict_overflow_p);
14680 case tcc_binary:
14681 case tcc_comparison:
14682 return tree_binary_nonzero_warnv_p (code, type,
14683 TREE_OPERAND (t, 0),
14684 TREE_OPERAND (t, 1),
14685 strict_overflow_p);
14686 case tcc_constant:
14687 case tcc_declaration:
14688 case tcc_reference:
14689 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14691 default:
14692 break;
14695 switch (code)
14697 case TRUTH_NOT_EXPR:
14698 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14699 strict_overflow_p);
14701 case TRUTH_AND_EXPR:
14702 case TRUTH_OR_EXPR:
14703 case TRUTH_XOR_EXPR:
14704 return tree_binary_nonzero_warnv_p (code, type,
14705 TREE_OPERAND (t, 0),
14706 TREE_OPERAND (t, 1),
14707 strict_overflow_p);
14709 case COND_EXPR:
14710 case CONSTRUCTOR:
14711 case OBJ_TYPE_REF:
14712 case ASSERT_EXPR:
14713 case ADDR_EXPR:
14714 case WITH_SIZE_EXPR:
14715 case EXC_PTR_EXPR:
14716 case SSA_NAME:
14717 case FILTER_EXPR:
14718 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14720 case COMPOUND_EXPR:
14721 case MODIFY_EXPR:
14722 case BIND_EXPR:
14723 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14724 strict_overflow_p);
14726 case SAVE_EXPR:
14727 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14728 strict_overflow_p);
14730 case CALL_EXPR:
14731 return alloca_call_p (t);
14733 default:
14734 break;
14736 return false;
14739 /* Return true when T is an address and is known to be nonzero.
14740 Handle warnings about undefined signed overflow. */
14742 bool
14743 tree_expr_nonzero_p (tree t)
14745 bool ret, strict_overflow_p;
14747 strict_overflow_p = false;
14748 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14749 if (strict_overflow_p)
14750 fold_overflow_warning (("assuming signed overflow does not occur when "
14751 "determining that expression is always "
14752 "non-zero"),
14753 WARN_STRICT_OVERFLOW_MISC);
14754 return ret;
14757 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14758 attempt to fold the expression to a constant without modifying TYPE,
14759 OP0 or OP1.
14761 If the expression could be simplified to a constant, then return
14762 the constant. If the expression would not be simplified to a
14763 constant, then return NULL_TREE. */
14765 tree
14766 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14768 tree tem = fold_binary (code, type, op0, op1);
14769 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14772 /* Given the components of a unary expression CODE, TYPE and OP0,
14773 attempt to fold the expression to a constant without modifying
14774 TYPE or OP0.
14776 If the expression could be simplified to a constant, then return
14777 the constant. If the expression would not be simplified to a
14778 constant, then return NULL_TREE. */
14780 tree
14781 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14783 tree tem = fold_unary (code, type, op0);
14784 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14787 /* If EXP represents referencing an element in a constant string
14788 (either via pointer arithmetic or array indexing), return the
14789 tree representing the value accessed, otherwise return NULL. */
14791 tree
14792 fold_read_from_constant_string (tree exp)
14794 if ((TREE_CODE (exp) == INDIRECT_REF
14795 || TREE_CODE (exp) == ARRAY_REF)
14796 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14798 tree exp1 = TREE_OPERAND (exp, 0);
14799 tree index;
14800 tree string;
14802 if (TREE_CODE (exp) == INDIRECT_REF)
14803 string = string_constant (exp1, &index);
14804 else
14806 tree low_bound = array_ref_low_bound (exp);
14807 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14809 /* Optimize the special-case of a zero lower bound.
14811 We convert the low_bound to sizetype to avoid some problems
14812 with constant folding. (E.g. suppose the lower bound is 1,
14813 and its mode is QI. Without the conversion,l (ARRAY
14814 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14815 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14816 if (! integer_zerop (low_bound))
14817 index = size_diffop (index, fold_convert (sizetype, low_bound));
14819 string = exp1;
14822 if (string
14823 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14824 && TREE_CODE (string) == STRING_CST
14825 && TREE_CODE (index) == INTEGER_CST
14826 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14827 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14828 == MODE_INT)
14829 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14830 return build_int_cst_type (TREE_TYPE (exp),
14831 (TREE_STRING_POINTER (string)
14832 [TREE_INT_CST_LOW (index)]));
14834 return NULL;
14837 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14838 an integer constant, real, or fixed-point constant.
14840 TYPE is the type of the result. */
14842 static tree
14843 fold_negate_const (tree arg0, tree type)
14845 tree t = NULL_TREE;
14847 switch (TREE_CODE (arg0))
14849 case INTEGER_CST:
14851 unsigned HOST_WIDE_INT low;
14852 HOST_WIDE_INT high;
14853 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14854 TREE_INT_CST_HIGH (arg0),
14855 &low, &high);
14856 t = force_fit_type_double (type, low, high, 1,
14857 (overflow | TREE_OVERFLOW (arg0))
14858 && !TYPE_UNSIGNED (type));
14859 break;
14862 case REAL_CST:
14863 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14864 break;
14866 case FIXED_CST:
14868 FIXED_VALUE_TYPE f;
14869 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14870 &(TREE_FIXED_CST (arg0)), NULL,
14871 TYPE_SATURATING (type));
14872 t = build_fixed (type, f);
14873 /* Propagate overflow flags. */
14874 if (overflow_p | TREE_OVERFLOW (arg0))
14876 TREE_OVERFLOW (t) = 1;
14877 TREE_CONSTANT_OVERFLOW (t) = 1;
14879 else if (TREE_CONSTANT_OVERFLOW (arg0))
14880 TREE_CONSTANT_OVERFLOW (t) = 1;
14881 break;
14884 default:
14885 gcc_unreachable ();
14888 return t;
14891 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14892 an integer constant or real constant.
14894 TYPE is the type of the result. */
14896 tree
14897 fold_abs_const (tree arg0, tree type)
14899 tree t = NULL_TREE;
14901 switch (TREE_CODE (arg0))
14903 case INTEGER_CST:
14904 /* If the value is unsigned, then the absolute value is
14905 the same as the ordinary value. */
14906 if (TYPE_UNSIGNED (type))
14907 t = arg0;
14908 /* Similarly, if the value is non-negative. */
14909 else if (INT_CST_LT (integer_minus_one_node, arg0))
14910 t = arg0;
14911 /* If the value is negative, then the absolute value is
14912 its negation. */
14913 else
14915 unsigned HOST_WIDE_INT low;
14916 HOST_WIDE_INT high;
14917 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14918 TREE_INT_CST_HIGH (arg0),
14919 &low, &high);
14920 t = force_fit_type_double (type, low, high, -1,
14921 overflow | TREE_OVERFLOW (arg0));
14923 break;
14925 case REAL_CST:
14926 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14927 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14928 else
14929 t = arg0;
14930 break;
14932 default:
14933 gcc_unreachable ();
14936 return t;
14939 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14940 constant. TYPE is the type of the result. */
14942 static tree
14943 fold_not_const (tree arg0, tree type)
14945 tree t = NULL_TREE;
14947 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14949 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14950 ~TREE_INT_CST_HIGH (arg0), 0,
14951 TREE_OVERFLOW (arg0));
14953 return t;
14956 /* Given CODE, a relational operator, the target type, TYPE and two
14957 constant operands OP0 and OP1, return the result of the
14958 relational operation. If the result is not a compile time
14959 constant, then return NULL_TREE. */
14961 static tree
14962 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14964 int result, invert;
14966 /* From here on, the only cases we handle are when the result is
14967 known to be a constant. */
14969 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14971 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14972 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14974 /* Handle the cases where either operand is a NaN. */
14975 if (real_isnan (c0) || real_isnan (c1))
14977 switch (code)
14979 case EQ_EXPR:
14980 case ORDERED_EXPR:
14981 result = 0;
14982 break;
14984 case NE_EXPR:
14985 case UNORDERED_EXPR:
14986 case UNLT_EXPR:
14987 case UNLE_EXPR:
14988 case UNGT_EXPR:
14989 case UNGE_EXPR:
14990 case UNEQ_EXPR:
14991 result = 1;
14992 break;
14994 case LT_EXPR:
14995 case LE_EXPR:
14996 case GT_EXPR:
14997 case GE_EXPR:
14998 case LTGT_EXPR:
14999 if (flag_trapping_math)
15000 return NULL_TREE;
15001 result = 0;
15002 break;
15004 default:
15005 gcc_unreachable ();
15008 return constant_boolean_node (result, type);
15011 return constant_boolean_node (real_compare (code, c0, c1), type);
15014 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15016 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15017 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15018 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15021 /* Handle equality/inequality of complex constants. */
15022 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15024 tree rcond = fold_relational_const (code, type,
15025 TREE_REALPART (op0),
15026 TREE_REALPART (op1));
15027 tree icond = fold_relational_const (code, type,
15028 TREE_IMAGPART (op0),
15029 TREE_IMAGPART (op1));
15030 if (code == EQ_EXPR)
15031 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15032 else if (code == NE_EXPR)
15033 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15034 else
15035 return NULL_TREE;
15038 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15040 To compute GT, swap the arguments and do LT.
15041 To compute GE, do LT and invert the result.
15042 To compute LE, swap the arguments, do LT and invert the result.
15043 To compute NE, do EQ and invert the result.
15045 Therefore, the code below must handle only EQ and LT. */
15047 if (code == LE_EXPR || code == GT_EXPR)
15049 tree tem = op0;
15050 op0 = op1;
15051 op1 = tem;
15052 code = swap_tree_comparison (code);
15055 /* Note that it is safe to invert for real values here because we
15056 have already handled the one case that it matters. */
15058 invert = 0;
15059 if (code == NE_EXPR || code == GE_EXPR)
15061 invert = 1;
15062 code = invert_tree_comparison (code, false);
15065 /* Compute a result for LT or EQ if args permit;
15066 Otherwise return T. */
15067 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15069 if (code == EQ_EXPR)
15070 result = tree_int_cst_equal (op0, op1);
15071 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15072 result = INT_CST_LT_UNSIGNED (op0, op1);
15073 else
15074 result = INT_CST_LT (op0, op1);
15076 else
15077 return NULL_TREE;
15079 if (invert)
15080 result ^= 1;
15081 return constant_boolean_node (result, type);
15084 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15085 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15086 itself. */
15088 tree
15089 fold_build_cleanup_point_expr (tree type, tree expr)
15091 /* If the expression does not have side effects then we don't have to wrap
15092 it with a cleanup point expression. */
15093 if (!TREE_SIDE_EFFECTS (expr))
15094 return expr;
15096 /* If the expression is a return, check to see if the expression inside the
15097 return has no side effects or the right hand side of the modify expression
15098 inside the return. If either don't have side effects set we don't need to
15099 wrap the expression in a cleanup point expression. Note we don't check the
15100 left hand side of the modify because it should always be a return decl. */
15101 if (TREE_CODE (expr) == RETURN_EXPR)
15103 tree op = TREE_OPERAND (expr, 0);
15104 if (!op || !TREE_SIDE_EFFECTS (op))
15105 return expr;
15106 op = TREE_OPERAND (op, 1);
15107 if (!TREE_SIDE_EFFECTS (op))
15108 return expr;
15111 return build1 (CLEANUP_POINT_EXPR, type, expr);
15114 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15115 of an indirection through OP0, or NULL_TREE if no simplification is
15116 possible. */
15118 tree
15119 fold_indirect_ref_1 (tree type, tree op0)
15121 tree sub = op0;
15122 tree subtype;
15124 STRIP_NOPS (sub);
15125 subtype = TREE_TYPE (sub);
15126 if (!POINTER_TYPE_P (subtype))
15127 return NULL_TREE;
15129 if (TREE_CODE (sub) == ADDR_EXPR)
15131 tree op = TREE_OPERAND (sub, 0);
15132 tree optype = TREE_TYPE (op);
15133 /* *&CONST_DECL -> to the value of the const decl. */
15134 if (TREE_CODE (op) == CONST_DECL)
15135 return DECL_INITIAL (op);
15136 /* *&p => p; make sure to handle *&"str"[cst] here. */
15137 if (type == optype)
15139 tree fop = fold_read_from_constant_string (op);
15140 if (fop)
15141 return fop;
15142 else
15143 return op;
15145 /* *(foo *)&fooarray => fooarray[0] */
15146 else if (TREE_CODE (optype) == ARRAY_TYPE
15147 && type == TREE_TYPE (optype))
15149 tree type_domain = TYPE_DOMAIN (optype);
15150 tree min_val = size_zero_node;
15151 if (type_domain && TYPE_MIN_VALUE (type_domain))
15152 min_val = TYPE_MIN_VALUE (type_domain);
15153 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15155 /* *(foo *)&complexfoo => __real__ complexfoo */
15156 else if (TREE_CODE (optype) == COMPLEX_TYPE
15157 && type == TREE_TYPE (optype))
15158 return fold_build1 (REALPART_EXPR, type, op);
15159 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15160 else if (TREE_CODE (optype) == VECTOR_TYPE
15161 && type == TREE_TYPE (optype))
15163 tree part_width = TYPE_SIZE (type);
15164 tree index = bitsize_int (0);
15165 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15169 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15170 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15171 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15173 tree op00 = TREE_OPERAND (sub, 0);
15174 tree op01 = TREE_OPERAND (sub, 1);
15175 tree op00type;
15177 STRIP_NOPS (op00);
15178 op00type = TREE_TYPE (op00);
15179 if (TREE_CODE (op00) == ADDR_EXPR
15180 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15181 && type == TREE_TYPE (TREE_TYPE (op00type)))
15183 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15184 tree part_width = TYPE_SIZE (type);
15185 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15186 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15187 tree index = bitsize_int (indexi);
15189 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15190 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15191 part_width, index);
15197 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15198 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15199 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15201 tree op00 = TREE_OPERAND (sub, 0);
15202 tree op01 = TREE_OPERAND (sub, 1);
15203 tree op00type;
15205 STRIP_NOPS (op00);
15206 op00type = TREE_TYPE (op00);
15207 if (TREE_CODE (op00) == ADDR_EXPR
15208 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15209 && type == TREE_TYPE (TREE_TYPE (op00type)))
15211 tree size = TYPE_SIZE_UNIT (type);
15212 if (tree_int_cst_equal (size, op01))
15213 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15217 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15218 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15219 && type == TREE_TYPE (TREE_TYPE (subtype)))
15221 tree type_domain;
15222 tree min_val = size_zero_node;
15223 sub = build_fold_indirect_ref (sub);
15224 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15225 if (type_domain && TYPE_MIN_VALUE (type_domain))
15226 min_val = TYPE_MIN_VALUE (type_domain);
15227 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15230 return NULL_TREE;
15233 /* Builds an expression for an indirection through T, simplifying some
15234 cases. */
15236 tree
15237 build_fold_indirect_ref (tree t)
15239 tree type = TREE_TYPE (TREE_TYPE (t));
15240 tree sub = fold_indirect_ref_1 (type, t);
15242 if (sub)
15243 return sub;
15244 else
15245 return build1 (INDIRECT_REF, type, t);
15248 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15250 tree
15251 fold_indirect_ref (tree t)
15253 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15255 if (sub)
15256 return sub;
15257 else
15258 return t;
15261 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15262 whose result is ignored. The type of the returned tree need not be
15263 the same as the original expression. */
15265 tree
15266 fold_ignored_result (tree t)
15268 if (!TREE_SIDE_EFFECTS (t))
15269 return integer_zero_node;
15271 for (;;)
15272 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15274 case tcc_unary:
15275 t = TREE_OPERAND (t, 0);
15276 break;
15278 case tcc_binary:
15279 case tcc_comparison:
15280 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15281 t = TREE_OPERAND (t, 0);
15282 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15283 t = TREE_OPERAND (t, 1);
15284 else
15285 return t;
15286 break;
15288 case tcc_expression:
15289 switch (TREE_CODE (t))
15291 case COMPOUND_EXPR:
15292 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15293 return t;
15294 t = TREE_OPERAND (t, 0);
15295 break;
15297 case COND_EXPR:
15298 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15299 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15300 return t;
15301 t = TREE_OPERAND (t, 0);
15302 break;
15304 default:
15305 return t;
15307 break;
15309 default:
15310 return t;
15314 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15315 This can only be applied to objects of a sizetype. */
15317 tree
15318 round_up (tree value, int divisor)
15320 tree div = NULL_TREE;
15322 gcc_assert (divisor > 0);
15323 if (divisor == 1)
15324 return value;
15326 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15327 have to do anything. Only do this when we are not given a const,
15328 because in that case, this check is more expensive than just
15329 doing it. */
15330 if (TREE_CODE (value) != INTEGER_CST)
15332 div = build_int_cst (TREE_TYPE (value), divisor);
15334 if (multiple_of_p (TREE_TYPE (value), value, div))
15335 return value;
15338 /* If divisor is a power of two, simplify this to bit manipulation. */
15339 if (divisor == (divisor & -divisor))
15341 if (TREE_CODE (value) == INTEGER_CST)
15343 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15344 unsigned HOST_WIDE_INT high;
15345 bool overflow_p;
15347 if ((low & (divisor - 1)) == 0)
15348 return value;
15350 overflow_p = TREE_OVERFLOW (value);
15351 high = TREE_INT_CST_HIGH (value);
15352 low &= ~(divisor - 1);
15353 low += divisor;
15354 if (low == 0)
15356 high++;
15357 if (high == 0)
15358 overflow_p = true;
15361 return force_fit_type_double (TREE_TYPE (value), low, high,
15362 -1, overflow_p);
15364 else
15366 tree t;
15368 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15369 value = size_binop (PLUS_EXPR, value, t);
15370 t = build_int_cst (TREE_TYPE (value), -divisor);
15371 value = size_binop (BIT_AND_EXPR, value, t);
15374 else
15376 if (!div)
15377 div = build_int_cst (TREE_TYPE (value), divisor);
15378 value = size_binop (CEIL_DIV_EXPR, value, div);
15379 value = size_binop (MULT_EXPR, value, div);
15382 return value;
15385 /* Likewise, but round down. */
15387 tree
15388 round_down (tree value, int divisor)
15390 tree div = NULL_TREE;
15392 gcc_assert (divisor > 0);
15393 if (divisor == 1)
15394 return value;
15396 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15397 have to do anything. Only do this when we are not given a const,
15398 because in that case, this check is more expensive than just
15399 doing it. */
15400 if (TREE_CODE (value) != INTEGER_CST)
15402 div = build_int_cst (TREE_TYPE (value), divisor);
15404 if (multiple_of_p (TREE_TYPE (value), value, div))
15405 return value;
15408 /* If divisor is a power of two, simplify this to bit manipulation. */
15409 if (divisor == (divisor & -divisor))
15411 tree t;
15413 t = build_int_cst (TREE_TYPE (value), -divisor);
15414 value = size_binop (BIT_AND_EXPR, value, t);
15416 else
15418 if (!div)
15419 div = build_int_cst (TREE_TYPE (value), divisor);
15420 value = size_binop (FLOOR_DIV_EXPR, value, div);
15421 value = size_binop (MULT_EXPR, value, div);
15424 return value;
15427 /* Returns the pointer to the base of the object addressed by EXP and
15428 extracts the information about the offset of the access, storing it
15429 to PBITPOS and POFFSET. */
15431 static tree
15432 split_address_to_core_and_offset (tree exp,
15433 HOST_WIDE_INT *pbitpos, tree *poffset)
15435 tree core;
15436 enum machine_mode mode;
15437 int unsignedp, volatilep;
15438 HOST_WIDE_INT bitsize;
15440 if (TREE_CODE (exp) == ADDR_EXPR)
15442 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15443 poffset, &mode, &unsignedp, &volatilep,
15444 false);
15445 core = fold_addr_expr (core);
15447 else
15449 core = exp;
15450 *pbitpos = 0;
15451 *poffset = NULL_TREE;
15454 return core;
15457 /* Returns true if addresses of E1 and E2 differ by a constant, false
15458 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15460 bool
15461 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15463 tree core1, core2;
15464 HOST_WIDE_INT bitpos1, bitpos2;
15465 tree toffset1, toffset2, tdiff, type;
15467 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15468 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15470 if (bitpos1 % BITS_PER_UNIT != 0
15471 || bitpos2 % BITS_PER_UNIT != 0
15472 || !operand_equal_p (core1, core2, 0))
15473 return false;
15475 if (toffset1 && toffset2)
15477 type = TREE_TYPE (toffset1);
15478 if (type != TREE_TYPE (toffset2))
15479 toffset2 = fold_convert (type, toffset2);
15481 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15482 if (!cst_and_fits_in_hwi (tdiff))
15483 return false;
15485 *diff = int_cst_value (tdiff);
15487 else if (toffset1 || toffset2)
15489 /* If only one of the offsets is non-constant, the difference cannot
15490 be a constant. */
15491 return false;
15493 else
15494 *diff = 0;
15496 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15497 return true;
15500 /* Simplify the floating point expression EXP when the sign of the
15501 result is not significant. Return NULL_TREE if no simplification
15502 is possible. */
15504 tree
15505 fold_strip_sign_ops (tree exp)
15507 tree arg0, arg1;
15509 switch (TREE_CODE (exp))
15511 case ABS_EXPR:
15512 case NEGATE_EXPR:
15513 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15514 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15516 case MULT_EXPR:
15517 case RDIV_EXPR:
15518 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15519 return NULL_TREE;
15520 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15521 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15522 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15523 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15524 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15525 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15526 break;
15528 case COMPOUND_EXPR:
15529 arg0 = TREE_OPERAND (exp, 0);
15530 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15531 if (arg1)
15532 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15533 break;
15535 case COND_EXPR:
15536 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15537 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15538 if (arg0 || arg1)
15539 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15540 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15541 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15542 break;
15544 case CALL_EXPR:
15546 const enum built_in_function fcode = builtin_mathfn_code (exp);
15547 switch (fcode)
15549 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15550 /* Strip copysign function call, return the 1st argument. */
15551 arg0 = CALL_EXPR_ARG (exp, 0);
15552 arg1 = CALL_EXPR_ARG (exp, 1);
15553 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15555 default:
15556 /* Strip sign ops from the argument of "odd" math functions. */
15557 if (negate_mathfn_p (fcode))
15559 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15560 if (arg0)
15561 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15563 break;
15566 break;
15568 default:
15569 break;
15571 return NULL_TREE;