re PR target/36634 (-msecure-plt combine gives invalid call insn)
[official-gcc.git] / gcc / fold-const.c
blobfc72b9b38a2158f2606de4cfa570f7ddd4e0e4a5
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
70 otherwise. */
71 int folding_initializer = 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
77 COMPCODE_FALSE = 0,
78 COMPCODE_LT = 1,
79 COMPCODE_EQ = 2,
80 COMPCODE_LE = 3,
81 COMPCODE_GT = 4,
82 COMPCODE_LTGT = 5,
83 COMPCODE_GE = 6,
84 COMPCODE_ORD = 7,
85 COMPCODE_UNORD = 8,
86 COMPCODE_UNLT = 9,
87 COMPCODE_UNEQ = 10,
88 COMPCODE_UNLE = 11,
89 COMPCODE_UNGT = 12,
90 COMPCODE_NE = 13,
91 COMPCODE_UNGE = 14,
92 COMPCODE_TRUE = 15
95 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
96 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
97 static bool negate_mathfn_p (enum built_in_function);
98 static bool negate_expr_p (tree);
99 static tree negate_expr (tree);
100 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
101 static tree associate_trees (tree, tree, enum tree_code, tree);
102 static tree const_binop (enum tree_code, tree, tree, int);
103 static enum comparison_code comparison_to_compcode (enum tree_code);
104 static enum tree_code compcode_to_comparison (enum comparison_code);
105 static tree combine_comparisons (enum tree_code, enum tree_code,
106 enum tree_code, tree, tree, tree);
107 static int truth_value_p (enum tree_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
115 tree *, tree *);
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *, bool *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136 tree, tree, tree);
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
148 addition.
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 sign. */
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 #define LOWPART(x) \
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
170 static void
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 static void
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
185 HOST_WIDE_INT *hi)
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
198 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 unsigned HOST_WIDE_INT low0 = l1;
201 HOST_WIDE_INT high0 = h1;
202 unsigned int prec;
203 int sign_extended_type;
205 if (POINTER_TYPE_P (type)
206 || TREE_CODE (type) == OFFSET_TYPE)
207 prec = POINTER_SIZE;
208 else
209 prec = TYPE_PRECISION (type);
211 /* Size types *are* sign extended. */
212 sign_extended_type = (!TYPE_UNSIGNED (type)
213 || (TREE_CODE (type) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
221 else
223 h1 = 0;
224 if (prec < HOST_BITS_PER_WIDE_INT)
225 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type)
230 /* No sign extension */;
231 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
232 /* Correct width already. */;
233 else if (prec > HOST_BITS_PER_WIDE_INT)
235 /* Sign extend top half? */
236 if (h1 & ((unsigned HOST_WIDE_INT)1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 else if (prec == HOST_BITS_PER_WIDE_INT)
242 if ((HOST_WIDE_INT)l1 < 0)
243 h1 = -1;
245 else
247 /* Sign extend bottom half? */
248 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250 h1 = -1;
251 l1 |= (HOST_WIDE_INT)(-1) << prec;
255 *lv = l1;
256 *hv = h1;
258 /* If the value didn't fit, signal overflow. */
259 return l1 != low0 || h1 != high0;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
277 tree
278 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
279 HOST_WIDE_INT high, int overflowable,
280 bool overflowed)
282 int sign_extended_type;
283 bool overflow;
285 /* Size types *are* sign extended. */
286 sign_extended_type = (!TYPE_UNSIGNED (type)
287 || (TREE_CODE (type) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type)));
290 overflow = fit_double_type (low, high, &low, &high, type);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed || overflow)
295 if (overflowed
296 || overflowable < 0
297 || (overflowable > 0 && sign_extended_type))
299 tree t = make_node (INTEGER_CST);
300 TREE_INT_CST_LOW (t) = low;
301 TREE_INT_CST_HIGH (t) = high;
302 TREE_TYPE (t) = type;
303 TREE_OVERFLOW (t) = 1;
304 return t;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type, low, high);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
322 bool unsigned_p)
324 unsigned HOST_WIDE_INT l;
325 HOST_WIDE_INT h;
327 l = l1 + l2;
328 h = h1 + h2 + (l < l1);
330 *lv = l;
331 *hv = h;
333 if (unsigned_p)
334 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
335 else
336 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 if (l1 == 0)
350 *lv = 0;
351 *hv = - h1;
352 return (*hv & h1) < 0;
354 else
356 *lv = -l1;
357 *hv = ~h1;
358 return 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
372 bool unsigned_p)
374 HOST_WIDE_INT arg1[4];
375 HOST_WIDE_INT arg2[4];
376 HOST_WIDE_INT prod[4 * 2];
377 unsigned HOST_WIDE_INT carry;
378 int i, j, k;
379 unsigned HOST_WIDE_INT toplow, neglow;
380 HOST_WIDE_INT tophigh, neghigh;
382 encode (arg1, l1, h1);
383 encode (arg2, l2, h2);
385 memset (prod, 0, sizeof prod);
387 for (i = 0; i < 4; i++)
389 carry = 0;
390 for (j = 0; j < 4; j++)
392 k = i + j;
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry += arg1[i] * arg2[j];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
396 carry += prod[k];
397 prod[k] = LOWPART (carry);
398 carry = HIGHPART (carry);
400 prod[i + 4] = carry;
403 decode (prod, lv, hv);
404 decode (prod + 4, &toplow, &tophigh);
406 /* Unsigned overflow is immediate. */
407 if (unsigned_p)
408 return (toplow | tophigh) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
412 if (h1 < 0)
414 neg_double (l2, h2, &neglow, &neghigh);
415 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
417 if (h2 < 0)
419 neg_double (l1, h1, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431 void
432 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
433 HOST_WIDE_INT count, unsigned int prec,
434 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 unsigned HOST_WIDE_INT signmask;
438 if (count < 0)
440 rshift_double (l1, h1, -count, prec, lv, hv, arith);
441 return;
444 if (SHIFT_COUNT_TRUNCATED)
445 count %= prec;
447 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
451 *hv = 0;
452 *lv = 0;
454 else if (count >= HOST_BITS_PER_WIDE_INT)
456 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
457 *lv = 0;
459 else
461 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
462 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
463 *lv = l1 << count;
466 /* Sign extend all bits that are beyond the precision. */
468 signmask = -((prec > HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT) *hv
470 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
471 : (*lv >> (prec - 1))) & 1);
473 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
478 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
480 else
482 *hv = signmask;
483 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
484 *lv |= signmask << prec;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
493 void
494 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
495 HOST_WIDE_INT count, unsigned int prec,
496 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
497 int arith)
499 unsigned HOST_WIDE_INT signmask;
501 signmask = (arith
502 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
503 : 0);
505 if (SHIFT_COUNT_TRUNCATED)
506 count %= prec;
508 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
512 *hv = 0;
513 *lv = 0;
515 else if (count >= HOST_BITS_PER_WIDE_INT)
517 *hv = 0;
518 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
520 else
522 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
523 *lv = ((l1 >> count)
524 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count >= (HOST_WIDE_INT)prec)
531 *hv = signmask;
532 *lv = signmask;
534 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
539 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
541 else
543 *hv = signmask;
544 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
545 *lv |= signmask << (prec - count);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
554 void
555 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
556 HOST_WIDE_INT count, unsigned int prec,
557 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 unsigned HOST_WIDE_INT s1l, s2l;
560 HOST_WIDE_INT s1h, s2h;
562 count %= prec;
563 if (count < 0)
564 count += prec;
566 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
567 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
568 *lv = s1l | s2l;
569 *hv = s1h | s2h;
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
576 void
577 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
578 HOST_WIDE_INT count, unsigned int prec,
579 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 unsigned HOST_WIDE_INT s1l, s2l;
582 HOST_WIDE_INT s1h, s2h;
584 count %= prec;
585 if (count < 0)
586 count += prec;
588 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
589 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
590 *lv = s1l | s2l;
591 *hv = s1h | s2h;
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
598 or EXACT_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code, int uns,
605 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig,
607 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig,
609 unsigned HOST_WIDE_INT *lquo,
610 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
611 HOST_WIDE_INT *hrem)
613 int quo_neg = 0;
614 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den[4], quo[4];
616 int i, j;
617 unsigned HOST_WIDE_INT work;
618 unsigned HOST_WIDE_INT carry = 0;
619 unsigned HOST_WIDE_INT lnum = lnum_orig;
620 HOST_WIDE_INT hnum = hnum_orig;
621 unsigned HOST_WIDE_INT lden = lden_orig;
622 HOST_WIDE_INT hden = hden_orig;
623 int overflow = 0;
625 if (hden == 0 && lden == 0)
626 overflow = 1, lden = 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
629 if (!uns)
631 if (hnum < 0)
633 quo_neg = ~ quo_neg;
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum, hnum, &lnum, &hnum)
636 && ((HOST_WIDE_INT) lden & hden) == -1)
637 overflow = 1;
639 if (hden < 0)
641 quo_neg = ~ quo_neg;
642 neg_double (lden, hden, &lden, &hden);
646 if (hnum == 0 && hden == 0)
647 { /* single precision */
648 *hquo = *hrem = 0;
649 /* This unsigned division rounds toward zero. */
650 *lquo = lnum / lden;
651 goto finish_up;
654 if (hnum == 0)
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
657 *hquo = *lquo = 0;
658 *hrem = hnum;
659 *lrem = lnum;
660 goto finish_up;
663 memset (quo, 0, sizeof quo);
665 memset (num, 0, sizeof num); /* to zero 9th element */
666 memset (den, 0, sizeof den);
668 encode (num, lnum, hnum);
669 encode (den, lden, hden);
671 /* Special code for when the divisor < BASE. */
672 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 /* hnum != 0 already checked. */
675 for (i = 4 - 1; i >= 0; i--)
677 work = num[i] + carry * BASE;
678 quo[i] = work / lden;
679 carry = work % lden;
682 else
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig, den_hi_sig;
687 unsigned HOST_WIDE_INT quo_est, scale;
689 /* Find the highest nonzero divisor digit. */
690 for (i = 4 - 1;; i--)
691 if (den[i] != 0)
693 den_hi_sig = i;
694 break;
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale = BASE / (den[den_hi_sig] + 1);
701 if (scale > 1)
702 { /* scale divisor and dividend */
703 carry = 0;
704 for (i = 0; i <= 4 - 1; i++)
706 work = (num[i] * scale) + carry;
707 num[i] = LOWPART (work);
708 carry = HIGHPART (work);
711 num[4] = carry;
712 carry = 0;
713 for (i = 0; i <= 4 - 1; i++)
715 work = (den[i] * scale) + carry;
716 den[i] = LOWPART (work);
717 carry = HIGHPART (work);
718 if (den[i] != 0) den_hi_sig = i;
722 num_hi_sig = 4;
724 /* Main loop */
725 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp;
732 num_hi_sig = i + den_hi_sig + 1;
733 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
734 if (num[num_hi_sig] != den[den_hi_sig])
735 quo_est = work / den[den_hi_sig];
736 else
737 quo_est = BASE - 1;
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp = work - quo_est * den[den_hi_sig];
741 if (tmp < BASE
742 && (den[den_hi_sig - 1] * quo_est
743 > (tmp * BASE + num[num_hi_sig - 2])))
744 quo_est--;
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
750 carry = 0;
751 for (j = 0; j <= den_hi_sig; j++)
753 work = quo_est * den[j] + carry;
754 carry = HIGHPART (work);
755 work = num[i + j] - LOWPART (work);
756 num[i + j] = LOWPART (work);
757 carry += HIGHPART (work) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
764 quo_est--;
765 carry = 0; /* add divisor back in */
766 for (j = 0; j <= den_hi_sig; j++)
768 work = num[i + j] + den[j] + carry;
769 carry = HIGHPART (work);
770 num[i + j] = LOWPART (work);
773 num [num_hi_sig] += carry;
776 /* Store the quotient digit. */
777 quo[i] = quo_est;
781 decode (quo, lquo, hquo);
783 finish_up:
784 /* If result is negative, make it so. */
785 if (quo_neg)
786 neg_double (*lquo, *hquo, lquo, hquo);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
790 neg_double (*lrem, *hrem, lrem, hrem);
791 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
793 switch (code)
795 case TRUNC_DIV_EXPR:
796 case TRUNC_MOD_EXPR: /* round toward zero */
797 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
798 return overflow;
800 case FLOOR_DIV_EXPR:
801 case FLOOR_MOD_EXPR: /* round toward negative infinity */
802 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
804 /* quo = quo - 1; */
805 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
806 lquo, hquo);
808 else
809 return overflow;
810 break;
812 case CEIL_DIV_EXPR:
813 case CEIL_MOD_EXPR: /* round toward positive infinity */
814 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
817 lquo, hquo);
819 else
820 return overflow;
821 break;
823 case ROUND_DIV_EXPR:
824 case ROUND_MOD_EXPR: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem = *lrem;
827 HOST_WIDE_INT habs_rem = *hrem;
828 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
829 HOST_WIDE_INT habs_den = hden, htwice;
831 /* Get absolute values. */
832 if (*hrem < 0)
833 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
834 if (hden < 0)
835 neg_double (lden, hden, &labs_den, &habs_den);
837 /* If (2 * abs (lrem) >= abs (lden)) */
838 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
839 labs_rem, habs_rem, &ltwice, &htwice);
841 if (((unsigned HOST_WIDE_INT) habs_den
842 < (unsigned HOST_WIDE_INT) htwice)
843 || (((unsigned HOST_WIDE_INT) habs_den
844 == (unsigned HOST_WIDE_INT) htwice)
845 && (labs_den < ltwice)))
847 if (*hquo < 0)
848 /* quo = quo - 1; */
849 add_double (*lquo, *hquo,
850 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
851 else
852 /* quo = quo + 1; */
853 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
854 lquo, hquo);
856 else
857 return overflow;
859 break;
861 default:
862 gcc_unreachable ();
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
867 neg_double (*lrem, *hrem, lrem, hrem);
868 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
869 return overflow;
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
876 static tree
877 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 unsigned HOST_WIDE_INT int1l, int2l;
880 HOST_WIDE_INT int1h, int2h;
881 unsigned HOST_WIDE_INT quol, reml;
882 HOST_WIDE_INT quoh, remh;
883 tree type = TREE_TYPE (arg1);
884 int uns = TYPE_UNSIGNED (type);
886 int1l = TREE_INT_CST_LOW (arg1);
887 int1h = TREE_INT_CST_HIGH (arg1);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type))
892 uns = false;
893 type = signed_type_for (type);
894 fit_double_type (int1l, int1h, &int1l, &int1h,
895 type);
897 else
898 fit_double_type (int1l, int1h, &int1l, &int1h, type);
899 int2l = TREE_INT_CST_LOW (arg2);
900 int2h = TREE_INT_CST_HIGH (arg2);
902 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
903 &quol, &quoh, &reml, &remh);
904 if (remh != 0 || reml != 0)
905 return NULL_TREE;
907 return build_int_cst_wide (type, quol, quoh);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
917 used. */
919 static int fold_deferring_overflow_warnings;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
936 void
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
949 deferred code. */
951 void
952 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
954 const char *warnmsg;
955 location_t locus;
957 gcc_assert (fold_deferring_overflow_warnings > 0);
958 --fold_deferring_overflow_warnings;
959 if (fold_deferring_overflow_warnings > 0)
961 if (fold_deferred_overflow_warning != NULL
962 && code != 0
963 && code < (int) fold_deferred_overflow_code)
964 fold_deferred_overflow_code = code;
965 return;
968 warnmsg = fold_deferred_overflow_warning;
969 fold_deferred_overflow_warning = NULL;
971 if (!issue || warnmsg == NULL)
972 return;
974 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
975 return;
977 /* Use the smallest code level when deciding to issue the
978 warning. */
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
983 return;
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
987 else
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
993 warnings. */
995 void
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1003 bool
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1012 static void
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1031 static bool
1032 negate_mathfn_p (enum built_in_function code)
1034 switch (code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1059 return true;
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1067 default:
1068 break;
1070 return false;
1073 /* Check whether we may negate an integer constant T without causing
1074 overflow. */
1076 bool
1077 may_negate_without_overflow_p (const_tree t)
1079 unsigned HOST_WIDE_INT val;
1080 unsigned int prec;
1081 tree type;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1087 return false;
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1093 return true;
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1097 else
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1107 static bool
1108 negate_expr_p (tree t)
1110 tree type;
1112 if (t == 0)
1113 return false;
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1120 case INTEGER_CST:
1121 if (TYPE_OVERFLOW_WRAPS (type))
1122 return true;
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1126 case BIT_NOT_EXPR:
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1130 case FIXED_CST:
1131 case REAL_CST:
1132 case NEGATE_EXPR:
1133 return true;
1135 case COMPLEX_CST:
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1139 case COMPLEX_EXPR:
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1143 case CONJ_EXPR:
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1146 case PLUS_EXPR:
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1149 return false;
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1154 return true;
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1158 case MINUS_EXPR:
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1165 case MULT_EXPR:
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1167 break;
1169 /* Fall through. */
1171 case RDIV_EXPR:
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1175 break;
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1180 case CEIL_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1186 overflow. */
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1189 break;
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1193 case NOP_EXPR:
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1198 if (tem != t)
1199 return negate_expr_p (tem);
1201 break;
1203 case CALL_EXPR:
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1207 break;
1209 case RSHIFT_EXPR:
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1217 return true;
1219 break;
1221 default:
1222 break;
1224 return false;
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1230 returned. */
1232 static tree
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1236 tree tem;
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1241 case BIT_NOT_EXPR:
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1245 break;
1247 case INTEGER_CST:
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1251 return tem;
1252 break;
1254 case REAL_CST:
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1258 return tem;
1259 break;
1261 case FIXED_CST:
1262 tem = fold_negate_const (t, type);
1263 return tem;
1265 case COMPLEX_CST:
1267 tree rpart = negate_expr (TREE_REALPART (t));
1268 tree ipart = negate_expr (TREE_IMAGPART (t));
1270 if ((TREE_CODE (rpart) == REAL_CST
1271 && TREE_CODE (ipart) == REAL_CST)
1272 || (TREE_CODE (rpart) == INTEGER_CST
1273 && TREE_CODE (ipart) == INTEGER_CST))
1274 return build_complex (type, rpart, ipart);
1276 break;
1278 case COMPLEX_EXPR:
1279 if (negate_expr_p (t))
1280 return fold_build2 (COMPLEX_EXPR, type,
1281 fold_negate_expr (TREE_OPERAND (t, 0)),
1282 fold_negate_expr (TREE_OPERAND (t, 1)));
1283 break;
1285 case CONJ_EXPR:
1286 if (negate_expr_p (t))
1287 return fold_build1 (CONJ_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)));
1289 break;
1291 case NEGATE_EXPR:
1292 return TREE_OPERAND (t, 0);
1294 case PLUS_EXPR:
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t, 1))
1300 && reorder_operands_p (TREE_OPERAND (t, 0),
1301 TREE_OPERAND (t, 1)))
1303 tem = negate_expr (TREE_OPERAND (t, 1));
1304 return fold_build2 (MINUS_EXPR, type,
1305 tem, TREE_OPERAND (t, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 tem = negate_expr (TREE_OPERAND (t, 0));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 1));
1316 break;
1318 case MINUS_EXPR:
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1322 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1323 return fold_build2 (MINUS_EXPR, type,
1324 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1325 break;
1327 case MULT_EXPR:
1328 if (TYPE_UNSIGNED (type))
1329 break;
1331 /* Fall through. */
1333 case RDIV_EXPR:
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 tem = TREE_OPERAND (t, 1);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 TREE_OPERAND (t, 0), negate_expr (tem));
1340 tem = TREE_OPERAND (t, 0);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 negate_expr (tem), TREE_OPERAND (t, 1));
1345 break;
1347 case TRUNC_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 case FLOOR_DIV_EXPR:
1350 case CEIL_DIV_EXPR:
1351 case EXACT_DIV_EXPR:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1356 overflow. */
1357 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 const char * const warnmsg = G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem = TREE_OPERAND (t, 1);
1362 if (negate_expr_p (tem))
1364 if (INTEGRAL_TYPE_P (type)
1365 && (TREE_CODE (tem) != INTEGER_CST
1366 || integer_onep (tem)))
1367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1374 if (INTEGRAL_TYPE_P (type)
1375 && (TREE_CODE (tem) != INTEGER_CST
1376 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1377 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1378 return fold_build2 (TREE_CODE (t), type,
1379 negate_expr (tem), TREE_OPERAND (t, 1));
1382 break;
1384 case NOP_EXPR:
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type) == REAL_TYPE)
1388 tem = strip_float_extensions (t);
1389 if (tem != t && negate_expr_p (tem))
1390 return fold_convert (type, negate_expr (tem));
1392 break;
1394 case CALL_EXPR:
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t))
1397 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1399 tree fndecl, arg;
1401 fndecl = get_callee_fndecl (t);
1402 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1403 return build_call_expr (fndecl, 1, arg);
1405 break;
1407 case RSHIFT_EXPR:
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 tree op1 = TREE_OPERAND (t, 1);
1412 if (TREE_INT_CST_HIGH (op1) == 0
1413 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1414 == TREE_INT_CST_LOW (op1))
1416 tree ntype = TYPE_UNSIGNED (type)
1417 ? signed_type_for (type)
1418 : unsigned_type_for (type);
1419 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1420 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1421 return fold_convert (type, temp);
1424 break;
1426 default:
1427 break;
1430 return NULL_TREE;
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1437 static tree
1438 negate_expr (tree t)
1440 tree type, tem;
1442 if (t == NULL_TREE)
1443 return NULL_TREE;
1445 type = TREE_TYPE (t);
1446 STRIP_SIGN_NOPS (t);
1448 tem = fold_negate_expr (t);
1449 if (!tem)
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 return fold_convert (type, tem);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1474 static tree
1475 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1476 tree *minus_litp, int negate_p)
1478 tree var = 0;
1480 *conp = 0;
1481 *litp = 0;
1482 *minus_litp = 0;
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in);
1487 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1488 || TREE_CODE (in) == FIXED_CST)
1489 *litp = in;
1490 else if (TREE_CODE (in) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1498 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 tree op0 = TREE_OPERAND (in, 0);
1501 tree op1 = TREE_OPERAND (in, 1);
1502 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1503 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1507 || TREE_CODE (op0) == FIXED_CST)
1508 *litp = op0, op0 = 0;
1509 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1510 || TREE_CODE (op1) == FIXED_CST)
1511 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513 if (op0 != 0 && TREE_CONSTANT (op0))
1514 *conp = op0, op0 = 0;
1515 else if (op1 != 0 && TREE_CONSTANT (op1))
1516 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0 != 0 && op1 != 0)
1521 var = in;
1522 else if (op0 != 0)
1523 var = op0;
1524 else
1525 var = op1, neg_var_p = neg1_p;
1527 /* Now do any needed negations. */
1528 if (neg_litp_p)
1529 *minus_litp = *litp, *litp = 0;
1530 if (neg_conp_p)
1531 *conp = negate_expr (*conp);
1532 if (neg_var_p)
1533 var = negate_expr (var);
1535 else if (TREE_CONSTANT (in))
1536 *conp = in;
1537 else
1538 var = in;
1540 if (negate_p)
1542 if (*litp)
1543 *minus_litp = *litp, *litp = 0;
1544 else if (*minus_litp)
1545 *litp = *minus_litp, *minus_litp = 0;
1546 *conp = negate_expr (*conp);
1547 var = negate_expr (var);
1550 return var;
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1557 static tree
1558 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1560 if (t1 == 0)
1561 return t2;
1562 else if (t2 == 0)
1563 return t1;
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1569 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 if (code == PLUS_EXPR)
1573 if (TREE_CODE (t1) == NEGATE_EXPR)
1574 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1575 fold_convert (type, TREE_OPERAND (t1, 0)));
1576 else if (TREE_CODE (t2) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1578 fold_convert (type, TREE_OPERAND (t2, 0)));
1579 else if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1582 else if (code == MINUS_EXPR)
1584 if (integer_zerop (t2))
1585 return fold_convert (type, t1);
1588 return build2 (code, type, fold_convert (type, t1),
1589 fold_convert (type, t2));
1592 return fold_build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1599 static bool
1600 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1602 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1603 return false;
1604 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1605 return false;
1607 switch (code)
1609 case LSHIFT_EXPR:
1610 case RSHIFT_EXPR:
1611 case LROTATE_EXPR:
1612 case RROTATE_EXPR:
1613 return true;
1615 default:
1616 break;
1619 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1620 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1621 && TYPE_MODE (type1) == TYPE_MODE (type2);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1631 tree
1632 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 unsigned HOST_WIDE_INT int1l, int2l;
1635 HOST_WIDE_INT int1h, int2h;
1636 unsigned HOST_WIDE_INT low;
1637 HOST_WIDE_INT hi;
1638 unsigned HOST_WIDE_INT garbagel;
1639 HOST_WIDE_INT garbageh;
1640 tree t;
1641 tree type = TREE_TYPE (arg1);
1642 int uns = TYPE_UNSIGNED (type);
1643 int is_sizetype
1644 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1645 int overflow = 0;
1647 int1l = TREE_INT_CST_LOW (arg1);
1648 int1h = TREE_INT_CST_HIGH (arg1);
1649 int2l = TREE_INT_CST_LOW (arg2);
1650 int2h = TREE_INT_CST_HIGH (arg2);
1652 switch (code)
1654 case BIT_IOR_EXPR:
1655 low = int1l | int2l, hi = int1h | int2h;
1656 break;
1658 case BIT_XOR_EXPR:
1659 low = int1l ^ int2l, hi = int1h ^ int2h;
1660 break;
1662 case BIT_AND_EXPR:
1663 low = int1l & int2l, hi = int1h & int2h;
1664 break;
1666 case RSHIFT_EXPR:
1667 int2l = -int2l;
1668 case LSHIFT_EXPR:
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1673 &low, &hi, !uns);
1674 break;
1676 case RROTATE_EXPR:
1677 int2l = - int2l;
1678 case LROTATE_EXPR:
1679 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 &low, &hi);
1681 break;
1683 case PLUS_EXPR:
1684 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1685 break;
1687 case MINUS_EXPR:
1688 neg_double (int2l, int2h, &low, &hi);
1689 add_double (int1l, int1h, low, hi, &low, &hi);
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1691 break;
1693 case MULT_EXPR:
1694 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1695 break;
1697 case TRUNC_DIV_EXPR:
1698 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1699 case EXACT_DIV_EXPR:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1702 && !TREE_OVERFLOW (arg1)
1703 && !TREE_OVERFLOW (arg2)
1704 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 if (code == CEIL_DIV_EXPR)
1707 int1l += int2l - 1;
1709 low = int1l / int2l, hi = 0;
1710 break;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR:
1716 if (int2h == 0 && int2l == 0)
1717 return NULL_TREE;
1718 if (int2h == 0 && int2l == 1)
1720 low = int1l, hi = int1h;
1721 break;
1723 if (int1l == int2l && int1h == int2h
1724 && ! (int1l == 0 && int1h == 0))
1726 low = 1, hi = 0;
1727 break;
1729 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1730 &low, &hi, &garbagel, &garbageh);
1731 break;
1733 case TRUNC_MOD_EXPR:
1734 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1737 && !TREE_OVERFLOW (arg1)
1738 && !TREE_OVERFLOW (arg2)
1739 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 if (code == CEIL_MOD_EXPR)
1742 int1l += int2l - 1;
1743 low = int1l % int2l, hi = 0;
1744 break;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR:
1750 if (int2h == 0 && int2l == 0)
1751 return NULL_TREE;
1752 overflow = div_and_round_double (code, uns,
1753 int1l, int1h, int2l, int2h,
1754 &garbagel, &garbageh, &low, &hi);
1755 break;
1757 case MIN_EXPR:
1758 case MAX_EXPR:
1759 if (uns)
1760 low = (((unsigned HOST_WIDE_INT) int1h
1761 < (unsigned HOST_WIDE_INT) int2h)
1762 || (((unsigned HOST_WIDE_INT) int1h
1763 == (unsigned HOST_WIDE_INT) int2h)
1764 && int1l < int2l));
1765 else
1766 low = (int1h < int2h
1767 || (int1h == int2h && int1l < int2l));
1769 if (low == (code == MIN_EXPR))
1770 low = int1l, hi = int1h;
1771 else
1772 low = int2l, hi = int2h;
1773 break;
1775 default:
1776 return NULL_TREE;
1779 if (notrunc)
1781 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1787 t = copy_node (t);
1788 TREE_OVERFLOW (t) = 1;
1791 else
1792 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1793 ((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1796 return t;
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1806 static tree
1807 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 /* Sanity check for the recursive cases. */
1810 if (!arg1 || !arg2)
1811 return NULL_TREE;
1813 STRIP_NOPS (arg1);
1814 STRIP_NOPS (arg2);
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return int_const_binop (code, arg1, arg2, notrunc);
1819 if (TREE_CODE (arg1) == REAL_CST)
1821 enum machine_mode mode;
1822 REAL_VALUE_TYPE d1;
1823 REAL_VALUE_TYPE d2;
1824 REAL_VALUE_TYPE value;
1825 REAL_VALUE_TYPE result;
1826 bool inexact;
1827 tree t, type;
1829 /* The following codes are handled by real_arithmetic. */
1830 switch (code)
1832 case PLUS_EXPR:
1833 case MINUS_EXPR:
1834 case MULT_EXPR:
1835 case RDIV_EXPR:
1836 case MIN_EXPR:
1837 case MAX_EXPR:
1838 break;
1840 default:
1841 return NULL_TREE;
1844 d1 = TREE_REAL_CST (arg1);
1845 d2 = TREE_REAL_CST (arg2);
1847 type = TREE_TYPE (arg1);
1848 mode = TYPE_MODE (type);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode)
1853 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1854 return NULL_TREE;
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code == RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2, dconst0)
1860 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1861 return NULL_TREE;
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1))
1866 return arg1;
1867 else if (REAL_VALUE_ISNAN (d2))
1868 return arg2;
1870 inexact = real_arithmetic (&value, code, &d1, &d2);
1871 real_convert (&result, mode, &value);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode)
1877 && REAL_VALUE_ISINF (result)
1878 && !REAL_VALUE_ISINF (d1)
1879 && !REAL_VALUE_ISINF (d2))
1880 return NULL_TREE;
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1888 && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1890 return NULL_TREE;
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1895 return t;
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1903 tree t, type;
1904 int sat_p;
1905 bool overflow_p;
1907 /* The following codes are handled by fixed_arithmetic. */
1908 switch (code)
1910 case PLUS_EXPR:
1911 case MINUS_EXPR:
1912 case MULT_EXPR:
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1915 break;
1917 case LSHIFT_EXPR:
1918 case RSHIFT_EXPR:
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1921 f2.mode = SImode;
1922 break;
1924 default:
1925 return NULL_TREE;
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1941 return t;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1951 tree real, imag;
1953 switch (code)
1955 case PLUS_EXPR:
1956 case MINUS_EXPR:
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1959 break;
1961 case MULT_EXPR:
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1965 notrunc);
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1969 notrunc);
1970 break;
1972 case RDIV_EXPR:
1974 tree magsquared
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1978 notrunc);
1979 tree t1
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1983 notrunc);
1984 tree t2
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1988 notrunc);
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
1996 break;
1998 default:
1999 return NULL_TREE;
2002 if (real && imag)
2003 return build_complex (type, real, imag);
2006 return NULL_TREE;
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2012 tree
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2023 tree
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2032 TREE_TYPE (arg1)));
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2041 return arg1;
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2043 return arg0;
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2048 return arg0;
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2053 return arg1;
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2067 tree
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2071 tree ctype;
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2081 ctype = ssizetype;
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2084 else
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2102 else
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2105 arg1, arg0)));
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2111 static tree
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2114 tree t;
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2142 return t;
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2148 static tree
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2151 int overflow = 0;
2152 tree t;
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2164 REAL_VALUE_TYPE r;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2167 switch (code)
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2171 break;
2173 default:
2174 gcc_unreachable ();
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2180 overflow = 1;
2181 high = 0;
2182 low = 0;
2185 /* See if R is less than the lower bound or greater than the
2186 upper bound. */
2188 if (! overflow)
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2194 overflow = 1;
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2200 if (! overflow)
2202 tree ut = TYPE_MAX_VALUE (type);
2203 if (ut)
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2208 overflow = 1;
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2215 if (! overflow)
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2220 return t;
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2226 static tree
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2229 tree t;
2230 double_int temp, temp_trunc;
2231 unsigned int mode;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2248 else
2250 temp.low = 0;
2251 temp.high = 0;
2252 temp_trunc.low = 0;
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2261 double_int one;
2262 one.low = 1;
2263 one.high = 0;
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2270 (temp.high < 0
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2275 return t;
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2281 static tree
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2285 tree t;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2291 return t;
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2297 static tree
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2301 tree t;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2309 return t;
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2315 static tree
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2319 tree t;
2320 bool overflow_p;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2334 return t;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2340 static tree
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2344 tree t;
2345 bool overflow_p;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2361 return t;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2367 static tree
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2371 tree t;
2372 bool overflow_p;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2387 return t;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2393 static tree
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2397 return arg1;
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2401 if (TREE_CODE (arg1) == INTEGER_CST)
2402 return fold_convert_const_int_from_int (type, arg1);
2403 else if (TREE_CODE (arg1) == REAL_CST)
2404 return fold_convert_const_int_from_real (code, type, arg1);
2405 else if (TREE_CODE (arg1) == FIXED_CST)
2406 return fold_convert_const_int_from_fixed (type, arg1);
2408 else if (TREE_CODE (type) == REAL_TYPE)
2410 if (TREE_CODE (arg1) == INTEGER_CST)
2411 return build_real_from_int_cst (type, arg1);
2412 else if (TREE_CODE (arg1) == REAL_CST)
2413 return fold_convert_const_real_from_real (type, arg1);
2414 else if (TREE_CODE (arg1) == FIXED_CST)
2415 return fold_convert_const_real_from_fixed (type, arg1);
2417 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2419 if (TREE_CODE (arg1) == FIXED_CST)
2420 return fold_convert_const_fixed_from_fixed (type, arg1);
2421 else if (TREE_CODE (arg1) == INTEGER_CST)
2422 return fold_convert_const_fixed_from_int (type, arg1);
2423 else if (TREE_CODE (arg1) == REAL_CST)
2424 return fold_convert_const_fixed_from_real (type, arg1);
2426 return NULL_TREE;
2429 /* Construct a vector of zero elements of vector type TYPE. */
2431 static tree
2432 build_zero_vector (tree type)
2434 tree elem, list;
2435 int i, units;
2437 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2438 units = TYPE_VECTOR_SUBPARTS (type);
2440 list = NULL_TREE;
2441 for (i = 0; i < units; i++)
2442 list = tree_cons (NULL_TREE, elem, list);
2443 return build_vector (type, list);
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2448 bool
2449 fold_convertible_p (const_tree type, const_tree arg)
2451 tree orig = TREE_TYPE (arg);
2453 if (type == orig)
2454 return true;
2456 if (TREE_CODE (arg) == ERROR_MARK
2457 || TREE_CODE (type) == ERROR_MARK
2458 || TREE_CODE (orig) == ERROR_MARK)
2459 return false;
2461 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2462 return true;
2464 switch (TREE_CODE (type))
2466 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2467 case POINTER_TYPE: case REFERENCE_TYPE:
2468 case OFFSET_TYPE:
2469 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2470 || TREE_CODE (orig) == OFFSET_TYPE)
2471 return true;
2472 return (TREE_CODE (orig) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2475 case REAL_TYPE:
2476 case FIXED_POINT_TYPE:
2477 case COMPLEX_TYPE:
2478 case VECTOR_TYPE:
2479 case VOID_TYPE:
2480 return TREE_CODE (type) == TREE_CODE (orig);
2482 default:
2483 return false;
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2490 tree
2491 fold_convert (tree type, tree arg)
2493 tree orig = TREE_TYPE (arg);
2494 tree tem;
2496 if (type == orig)
2497 return arg;
2499 if (TREE_CODE (arg) == ERROR_MARK
2500 || TREE_CODE (type) == ERROR_MARK
2501 || TREE_CODE (orig) == ERROR_MARK)
2502 return error_mark_node;
2504 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2505 return fold_build1 (NOP_EXPR, type, arg);
2507 switch (TREE_CODE (type))
2509 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2510 case POINTER_TYPE: case REFERENCE_TYPE:
2511 case OFFSET_TYPE:
2512 if (TREE_CODE (arg) == INTEGER_CST)
2514 tem = fold_convert_const (NOP_EXPR, type, arg);
2515 if (tem != NULL_TREE)
2516 return tem;
2518 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2519 || TREE_CODE (orig) == OFFSET_TYPE)
2520 return fold_build1 (NOP_EXPR, type, arg);
2521 if (TREE_CODE (orig) == COMPLEX_TYPE)
2523 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2524 return fold_convert (type, tem);
2526 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2528 return fold_build1 (NOP_EXPR, type, arg);
2530 case REAL_TYPE:
2531 if (TREE_CODE (arg) == INTEGER_CST)
2533 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2534 if (tem != NULL_TREE)
2535 return tem;
2537 else if (TREE_CODE (arg) == REAL_CST)
2539 tem = fold_convert_const (NOP_EXPR, type, arg);
2540 if (tem != NULL_TREE)
2541 return tem;
2543 else if (TREE_CODE (arg) == FIXED_CST)
2545 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2546 if (tem != NULL_TREE)
2547 return tem;
2550 switch (TREE_CODE (orig))
2552 case INTEGER_TYPE:
2553 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2554 case POINTER_TYPE: case REFERENCE_TYPE:
2555 return fold_build1 (FLOAT_EXPR, type, arg);
2557 case REAL_TYPE:
2558 return fold_build1 (NOP_EXPR, type, arg);
2560 case FIXED_POINT_TYPE:
2561 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2563 case COMPLEX_TYPE:
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2567 default:
2568 gcc_unreachable ();
2571 case FIXED_POINT_TYPE:
2572 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2573 || TREE_CODE (arg) == REAL_CST)
2575 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2576 if (tem != NULL_TREE)
2577 return tem;
2580 switch (TREE_CODE (orig))
2582 case FIXED_POINT_TYPE:
2583 case INTEGER_TYPE:
2584 case ENUMERAL_TYPE:
2585 case BOOLEAN_TYPE:
2586 case REAL_TYPE:
2587 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2589 case COMPLEX_TYPE:
2590 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2591 return fold_convert (type, tem);
2593 default:
2594 gcc_unreachable ();
2597 case COMPLEX_TYPE:
2598 switch (TREE_CODE (orig))
2600 case INTEGER_TYPE:
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2603 case REAL_TYPE:
2604 case FIXED_POINT_TYPE:
2605 return build2 (COMPLEX_EXPR, type,
2606 fold_convert (TREE_TYPE (type), arg),
2607 fold_convert (TREE_TYPE (type), integer_zero_node));
2608 case COMPLEX_TYPE:
2610 tree rpart, ipart;
2612 if (TREE_CODE (arg) == COMPLEX_EXPR)
2614 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2615 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2616 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2619 arg = save_expr (arg);
2620 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2621 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2622 rpart = fold_convert (TREE_TYPE (type), rpart);
2623 ipart = fold_convert (TREE_TYPE (type), ipart);
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2627 default:
2628 gcc_unreachable ();
2631 case VECTOR_TYPE:
2632 if (integer_zerop (arg))
2633 return build_zero_vector (type);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2636 || TREE_CODE (orig) == VECTOR_TYPE);
2637 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2639 case VOID_TYPE:
2640 tem = fold_ignored_result (arg);
2641 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2642 return tem;
2643 return fold_build1 (NOP_EXPR, type, tem);
2645 default:
2646 gcc_unreachable ();
2650 /* Return false if expr can be assumed not to be an lvalue, true
2651 otherwise. */
2653 static bool
2654 maybe_lvalue_p (const_tree x)
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x))
2659 case VAR_DECL:
2660 case PARM_DECL:
2661 case RESULT_DECL:
2662 case LABEL_DECL:
2663 case FUNCTION_DECL:
2664 case SSA_NAME:
2666 case COMPONENT_REF:
2667 case INDIRECT_REF:
2668 case ALIGN_INDIRECT_REF:
2669 case MISALIGNED_INDIRECT_REF:
2670 case ARRAY_REF:
2671 case ARRAY_RANGE_REF:
2672 case BIT_FIELD_REF:
2673 case OBJ_TYPE_REF:
2675 case REALPART_EXPR:
2676 case IMAGPART_EXPR:
2677 case PREINCREMENT_EXPR:
2678 case PREDECREMENT_EXPR:
2679 case SAVE_EXPR:
2680 case TRY_CATCH_EXPR:
2681 case WITH_CLEANUP_EXPR:
2682 case COMPOUND_EXPR:
2683 case MODIFY_EXPR:
2684 case GIMPLE_MODIFY_STMT:
2685 case TARGET_EXPR:
2686 case COND_EXPR:
2687 case BIND_EXPR:
2688 case MIN_EXPR:
2689 case MAX_EXPR:
2690 break;
2692 default:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2695 break;
2696 return false;
2699 return true;
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2704 tree
2705 non_lvalue (tree x)
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2708 us. */
2709 if (in_gimple_form)
2710 return x;
2712 if (! maybe_lvalue_p (x))
2713 return x;
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2725 static tree
2726 pedantic_non_lvalue (tree x)
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2730 else
2731 return x;
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2739 enum tree_code
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2742 if (honor_nans && flag_trapping_math)
2743 return ERROR_MARK;
2745 switch (code)
2747 case EQ_EXPR:
2748 return NE_EXPR;
2749 case NE_EXPR:
2750 return EQ_EXPR;
2751 case GT_EXPR:
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2753 case GE_EXPR:
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2755 case LT_EXPR:
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2757 case LE_EXPR:
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2759 case LTGT_EXPR:
2760 return UNEQ_EXPR;
2761 case UNEQ_EXPR:
2762 return LTGT_EXPR;
2763 case UNGT_EXPR:
2764 return LE_EXPR;
2765 case UNGE_EXPR:
2766 return LT_EXPR;
2767 case UNLT_EXPR:
2768 return GE_EXPR;
2769 case UNLE_EXPR:
2770 return GT_EXPR;
2771 case ORDERED_EXPR:
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2775 default:
2776 gcc_unreachable ();
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2783 enum tree_code
2784 swap_tree_comparison (enum tree_code code)
2786 switch (code)
2788 case EQ_EXPR:
2789 case NE_EXPR:
2790 case ORDERED_EXPR:
2791 case UNORDERED_EXPR:
2792 case LTGT_EXPR:
2793 case UNEQ_EXPR:
2794 return code;
2795 case GT_EXPR:
2796 return LT_EXPR;
2797 case GE_EXPR:
2798 return LE_EXPR;
2799 case LT_EXPR:
2800 return GT_EXPR;
2801 case LE_EXPR:
2802 return GE_EXPR;
2803 case UNGT_EXPR:
2804 return UNLT_EXPR;
2805 case UNGE_EXPR:
2806 return UNLE_EXPR;
2807 case UNLT_EXPR:
2808 return UNGT_EXPR;
2809 case UNLE_EXPR:
2810 return UNGE_EXPR;
2811 default:
2812 gcc_unreachable ();
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2824 switch (code)
2826 case LT_EXPR:
2827 return COMPCODE_LT;
2828 case EQ_EXPR:
2829 return COMPCODE_EQ;
2830 case LE_EXPR:
2831 return COMPCODE_LE;
2832 case GT_EXPR:
2833 return COMPCODE_GT;
2834 case NE_EXPR:
2835 return COMPCODE_NE;
2836 case GE_EXPR:
2837 return COMPCODE_GE;
2838 case ORDERED_EXPR:
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2842 case UNLT_EXPR:
2843 return COMPCODE_UNLT;
2844 case UNEQ_EXPR:
2845 return COMPCODE_UNEQ;
2846 case UNLE_EXPR:
2847 return COMPCODE_UNLE;
2848 case UNGT_EXPR:
2849 return COMPCODE_UNGT;
2850 case LTGT_EXPR:
2851 return COMPCODE_LTGT;
2852 case UNGE_EXPR:
2853 return COMPCODE_UNGE;
2854 default:
2855 gcc_unreachable ();
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2866 switch (code)
2868 case COMPCODE_LT:
2869 return LT_EXPR;
2870 case COMPCODE_EQ:
2871 return EQ_EXPR;
2872 case COMPCODE_LE:
2873 return LE_EXPR;
2874 case COMPCODE_GT:
2875 return GT_EXPR;
2876 case COMPCODE_NE:
2877 return NE_EXPR;
2878 case COMPCODE_GE:
2879 return GE_EXPR;
2880 case COMPCODE_ORD:
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2884 case COMPCODE_UNLT:
2885 return UNLT_EXPR;
2886 case COMPCODE_UNEQ:
2887 return UNEQ_EXPR;
2888 case COMPCODE_UNLE:
2889 return UNLE_EXPR;
2890 case COMPCODE_UNGT:
2891 return UNGT_EXPR;
2892 case COMPCODE_LTGT:
2893 return LTGT_EXPR;
2894 case COMPCODE_UNGE:
2895 return UNGE_EXPR;
2896 default:
2897 gcc_unreachable ();
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2907 tree
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2917 switch (code)
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2921 break;
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2925 break;
2927 default:
2928 return NULL_TREE;
2931 if (!honor_nans)
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2941 else if (flag_trapping_math)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2963 rtrap = false;
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2967 if (rtrap && !ltrap
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2969 return NULL_TREE;
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2973 return NULL_TREE;
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2980 else
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2987 static int
2988 truth_value_p (enum tree_code code)
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3027 return 0;
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. As pointers
3037 strictly don't have a signedness, require either two pointers or
3038 two non-pointers as well. */
3039 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3040 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3041 return 0;
3043 /* If both types don't have the same precision, then it is not safe
3044 to strip NOPs. */
3045 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3046 return 0;
3048 STRIP_NOPS (arg0);
3049 STRIP_NOPS (arg1);
3051 /* In case both args are comparisons but with different comparison
3052 code, try to swap the comparison operands of one arg to produce
3053 a match and compare that variant. */
3054 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3055 && COMPARISON_CLASS_P (arg0)
3056 && COMPARISON_CLASS_P (arg1))
3058 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3060 if (TREE_CODE (arg0) == swap_code)
3061 return operand_equal_p (TREE_OPERAND (arg0, 0),
3062 TREE_OPERAND (arg1, 1), flags)
3063 && operand_equal_p (TREE_OPERAND (arg0, 1),
3064 TREE_OPERAND (arg1, 0), flags);
3067 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3068 /* This is needed for conversions and for COMPONENT_REF.
3069 Might as well play it safe and always test this. */
3070 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3073 return 0;
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below. */
3082 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3083 && (TREE_CODE (arg0) == SAVE_EXPR
3084 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3085 return 1;
3087 /* Next handle constant cases, those for which we can return 1 even
3088 if ONLY_CONST is set. */
3089 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3090 switch (TREE_CODE (arg0))
3092 case INTEGER_CST:
3093 return tree_int_cst_equal (arg0, arg1);
3095 case FIXED_CST:
3096 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3097 TREE_FIXED_CST (arg1));
3099 case REAL_CST:
3100 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3101 TREE_REAL_CST (arg1)))
3102 return 1;
3105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3107 /* If we do not distinguish between signed and unsigned zero,
3108 consider them equal. */
3109 if (real_zerop (arg0) && real_zerop (arg1))
3110 return 1;
3112 return 0;
3114 case VECTOR_CST:
3116 tree v1, v2;
3118 v1 = TREE_VECTOR_CST_ELTS (arg0);
3119 v2 = TREE_VECTOR_CST_ELTS (arg1);
3120 while (v1 && v2)
3122 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3123 flags))
3124 return 0;
3125 v1 = TREE_CHAIN (v1);
3126 v2 = TREE_CHAIN (v2);
3129 return v1 == v2;
3132 case COMPLEX_CST:
3133 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3134 flags)
3135 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3136 flags));
3138 case STRING_CST:
3139 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3140 && ! memcmp (TREE_STRING_POINTER (arg0),
3141 TREE_STRING_POINTER (arg1),
3142 TREE_STRING_LENGTH (arg0)));
3144 case ADDR_EXPR:
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3147 default:
3148 break;
3151 if (flags & OEP_ONLY_CONST)
3152 return 0;
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3167 case tcc_unary:
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3171 CASE_CONVERT:
3172 case FIX_TRUNC_EXPR:
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3174 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3175 return 0;
3176 break;
3177 default:
3178 break;
3181 return OP_SAME (0);
3184 case tcc_comparison:
3185 case tcc_binary:
3186 if (OP_SAME (0) && OP_SAME (1))
3187 return 1;
3189 /* For commutative ops, allow the other order. */
3190 return (commutative_tree_code (TREE_CODE (arg0))
3191 && operand_equal_p (TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 1), flags)
3193 && operand_equal_p (TREE_OPERAND (arg0, 1),
3194 TREE_OPERAND (arg1, 0), flags));
3196 case tcc_reference:
3197 /* If either of the pointer (or reference) expressions we are
3198 dereferencing contain a side effect, these cannot be equal. */
3199 if (TREE_SIDE_EFFECTS (arg0)
3200 || TREE_SIDE_EFFECTS (arg1))
3201 return 0;
3203 switch (TREE_CODE (arg0))
3205 case INDIRECT_REF:
3206 case ALIGN_INDIRECT_REF:
3207 case MISALIGNED_INDIRECT_REF:
3208 case REALPART_EXPR:
3209 case IMAGPART_EXPR:
3210 return OP_SAME (0);
3212 case ARRAY_REF:
3213 case ARRAY_RANGE_REF:
3214 /* Operands 2 and 3 may be null.
3215 Compare the array index by value if it is constant first as we
3216 may have different types but same value here. */
3217 return (OP_SAME (0)
3218 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3219 TREE_OPERAND (arg1, 1))
3220 || OP_SAME (1))
3221 && OP_SAME_WITH_NULL (2)
3222 && OP_SAME_WITH_NULL (3));
3224 case COMPONENT_REF:
3225 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3226 may be NULL when we're called to compare MEM_EXPRs. */
3227 return OP_SAME_WITH_NULL (0)
3228 && OP_SAME (1)
3229 && OP_SAME_WITH_NULL (2);
3231 case BIT_FIELD_REF:
3232 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3234 default:
3235 return 0;
3238 case tcc_expression:
3239 switch (TREE_CODE (arg0))
3241 case ADDR_EXPR:
3242 case TRUTH_NOT_EXPR:
3243 return OP_SAME (0);
3245 case TRUTH_ANDIF_EXPR:
3246 case TRUTH_ORIF_EXPR:
3247 return OP_SAME (0) && OP_SAME (1);
3249 case TRUTH_AND_EXPR:
3250 case TRUTH_OR_EXPR:
3251 case TRUTH_XOR_EXPR:
3252 if (OP_SAME (0) && OP_SAME (1))
3253 return 1;
3255 /* Otherwise take into account this is a commutative operation. */
3256 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 1), flags)
3258 && operand_equal_p (TREE_OPERAND (arg0, 1),
3259 TREE_OPERAND (arg1, 0), flags));
3261 case COND_EXPR:
3262 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3264 default:
3265 return 0;
3268 case tcc_vl_exp:
3269 switch (TREE_CODE (arg0))
3271 case CALL_EXPR:
3272 /* If the CALL_EXPRs call different functions, then they
3273 clearly can not be equal. */
3274 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3275 flags))
3276 return 0;
3279 unsigned int cef = call_expr_flags (arg0);
3280 if (flags & OEP_PURE_SAME)
3281 cef &= ECF_CONST | ECF_PURE;
3282 else
3283 cef &= ECF_CONST;
3284 if (!cef)
3285 return 0;
3288 /* Now see if all the arguments are the same. */
3290 const_call_expr_arg_iterator iter0, iter1;
3291 const_tree a0, a1;
3292 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3293 a1 = first_const_call_expr_arg (arg1, &iter1);
3294 a0 && a1;
3295 a0 = next_const_call_expr_arg (&iter0),
3296 a1 = next_const_call_expr_arg (&iter1))
3297 if (! operand_equal_p (a0, a1, flags))
3298 return 0;
3300 /* If we get here and both argument lists are exhausted
3301 then the CALL_EXPRs are equal. */
3302 return ! (a0 || a1);
3304 default:
3305 return 0;
3308 case tcc_declaration:
3309 /* Consider __builtin_sqrt equal to sqrt. */
3310 return (TREE_CODE (arg0) == FUNCTION_DECL
3311 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3312 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3313 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3315 default:
3316 return 0;
3319 #undef OP_SAME
3320 #undef OP_SAME_WITH_NULL
3323 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3324 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3326 When in doubt, return 0. */
3328 static int
3329 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3331 int unsignedp1, unsignedpo;
3332 tree primarg0, primarg1, primother;
3333 unsigned int correct_width;
3335 if (operand_equal_p (arg0, arg1, 0))
3336 return 1;
3338 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3339 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3340 return 0;
3342 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3343 and see if the inner values are the same. This removes any
3344 signedness comparison, which doesn't matter here. */
3345 primarg0 = arg0, primarg1 = arg1;
3346 STRIP_NOPS (primarg0);
3347 STRIP_NOPS (primarg1);
3348 if (operand_equal_p (primarg0, primarg1, 0))
3349 return 1;
3351 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3352 actual comparison operand, ARG0.
3354 First throw away any conversions to wider types
3355 already present in the operands. */
3357 primarg1 = get_narrower (arg1, &unsignedp1);
3358 primother = get_narrower (other, &unsignedpo);
3360 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3361 if (unsignedp1 == unsignedpo
3362 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3363 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3365 tree type = TREE_TYPE (arg0);
3367 /* Make sure shorter operand is extended the right way
3368 to match the longer operand. */
3369 primarg1 = fold_convert (signed_or_unsigned_type_for
3370 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3372 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3373 return 1;
3376 return 0;
3379 /* See if ARG is an expression that is either a comparison or is performing
3380 arithmetic on comparisons. The comparisons must only be comparing
3381 two different values, which will be stored in *CVAL1 and *CVAL2; if
3382 they are nonzero it means that some operands have already been found.
3383 No variables may be used anywhere else in the expression except in the
3384 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3385 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3387 If this is true, return 1. Otherwise, return zero. */
3389 static int
3390 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3392 enum tree_code code = TREE_CODE (arg);
3393 enum tree_code_class class = TREE_CODE_CLASS (code);
3395 /* We can handle some of the tcc_expression cases here. */
3396 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3397 class = tcc_unary;
3398 else if (class == tcc_expression
3399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3400 || code == COMPOUND_EXPR))
3401 class = tcc_binary;
3403 else if (class == tcc_expression && code == SAVE_EXPR
3404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3406 /* If we've already found a CVAL1 or CVAL2, this expression is
3407 two complex to handle. */
3408 if (*cval1 || *cval2)
3409 return 0;
3411 class = tcc_unary;
3412 *save_p = 1;
3415 switch (class)
3417 case tcc_unary:
3418 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3420 case tcc_binary:
3421 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3422 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3423 cval1, cval2, save_p));
3425 case tcc_constant:
3426 return 1;
3428 case tcc_expression:
3429 if (code == COND_EXPR)
3430 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3431 cval1, cval2, save_p)
3432 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3433 cval1, cval2, save_p)
3434 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3435 cval1, cval2, save_p));
3436 return 0;
3438 case tcc_comparison:
3439 /* First see if we can handle the first operand, then the second. For
3440 the second operand, we know *CVAL1 can't be zero. It must be that
3441 one side of the comparison is each of the values; test for the
3442 case where this isn't true by failing if the two operands
3443 are the same. */
3445 if (operand_equal_p (TREE_OPERAND (arg, 0),
3446 TREE_OPERAND (arg, 1), 0))
3447 return 0;
3449 if (*cval1 == 0)
3450 *cval1 = TREE_OPERAND (arg, 0);
3451 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3453 else if (*cval2 == 0)
3454 *cval2 = TREE_OPERAND (arg, 0);
3455 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3457 else
3458 return 0;
3460 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3462 else if (*cval2 == 0)
3463 *cval2 = TREE_OPERAND (arg, 1);
3464 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3466 else
3467 return 0;
3469 return 1;
3471 default:
3472 return 0;
3476 /* ARG is a tree that is known to contain just arithmetic operations and
3477 comparisons. Evaluate the operations in the tree substituting NEW0 for
3478 any occurrence of OLD0 as an operand of a comparison and likewise for
3479 NEW1 and OLD1. */
3481 static tree
3482 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3484 tree type = TREE_TYPE (arg);
3485 enum tree_code code = TREE_CODE (arg);
3486 enum tree_code_class class = TREE_CODE_CLASS (code);
3488 /* We can handle some of the tcc_expression cases here. */
3489 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3490 class = tcc_unary;
3491 else if (class == tcc_expression
3492 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3493 class = tcc_binary;
3495 switch (class)
3497 case tcc_unary:
3498 return fold_build1 (code, type,
3499 eval_subst (TREE_OPERAND (arg, 0),
3500 old0, new0, old1, new1));
3502 case tcc_binary:
3503 return fold_build2 (code, type,
3504 eval_subst (TREE_OPERAND (arg, 0),
3505 old0, new0, old1, new1),
3506 eval_subst (TREE_OPERAND (arg, 1),
3507 old0, new0, old1, new1));
3509 case tcc_expression:
3510 switch (code)
3512 case SAVE_EXPR:
3513 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3515 case COMPOUND_EXPR:
3516 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3518 case COND_EXPR:
3519 return fold_build3 (code, type,
3520 eval_subst (TREE_OPERAND (arg, 0),
3521 old0, new0, old1, new1),
3522 eval_subst (TREE_OPERAND (arg, 1),
3523 old0, new0, old1, new1),
3524 eval_subst (TREE_OPERAND (arg, 2),
3525 old0, new0, old1, new1));
3526 default:
3527 break;
3529 /* Fall through - ??? */
3531 case tcc_comparison:
3533 tree arg0 = TREE_OPERAND (arg, 0);
3534 tree arg1 = TREE_OPERAND (arg, 1);
3536 /* We need to check both for exact equality and tree equality. The
3537 former will be true if the operand has a side-effect. In that
3538 case, we know the operand occurred exactly once. */
3540 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3541 arg0 = new0;
3542 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3543 arg0 = new1;
3545 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3546 arg1 = new0;
3547 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3548 arg1 = new1;
3550 return fold_build2 (code, type, arg0, arg1);
3553 default:
3554 return arg;
3558 /* Return a tree for the case when the result of an expression is RESULT
3559 converted to TYPE and OMITTED was previously an operand of the expression
3560 but is now not needed (e.g., we folded OMITTED * 0).
3562 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3563 the conversion of RESULT to TYPE. */
3565 tree
3566 omit_one_operand (tree type, tree result, tree omitted)
3568 tree t = fold_convert (type, result);
3570 /* If the resulting operand is an empty statement, just return the omitted
3571 statement casted to void. */
3572 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3573 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3575 if (TREE_SIDE_EFFECTS (omitted))
3576 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3578 return non_lvalue (t);
3581 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3583 static tree
3584 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3586 tree t = fold_convert (type, result);
3588 /* If the resulting operand is an empty statement, just return the omitted
3589 statement casted to void. */
3590 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3591 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3593 if (TREE_SIDE_EFFECTS (omitted))
3594 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3596 return pedantic_non_lvalue (t);
3599 /* Return a tree for the case when the result of an expression is RESULT
3600 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3601 of the expression but are now not needed.
3603 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3604 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3605 evaluated before OMITTED2. Otherwise, if neither has side effects,
3606 just do the conversion of RESULT to TYPE. */
3608 tree
3609 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3611 tree t = fold_convert (type, result);
3613 if (TREE_SIDE_EFFECTS (omitted2))
3614 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3615 if (TREE_SIDE_EFFECTS (omitted1))
3616 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3618 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3622 /* Return a simplified tree node for the truth-negation of ARG. This
3623 never alters ARG itself. We assume that ARG is an operation that
3624 returns a truth value (0 or 1).
3626 FIXME: one would think we would fold the result, but it causes
3627 problems with the dominator optimizer. */
3629 tree
3630 fold_truth_not_expr (tree arg)
3632 tree type = TREE_TYPE (arg);
3633 enum tree_code code = TREE_CODE (arg);
3635 /* If this is a comparison, we can simply invert it, except for
3636 floating-point non-equality comparisons, in which case we just
3637 enclose a TRUTH_NOT_EXPR around what we have. */
3639 if (TREE_CODE_CLASS (code) == tcc_comparison)
3641 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3642 if (FLOAT_TYPE_P (op_type)
3643 && flag_trapping_math
3644 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3645 && code != NE_EXPR && code != EQ_EXPR)
3646 return NULL_TREE;
3647 else
3649 code = invert_tree_comparison (code,
3650 HONOR_NANS (TYPE_MODE (op_type)));
3651 if (code == ERROR_MARK)
3652 return NULL_TREE;
3653 else
3654 return build2 (code, type,
3655 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3659 switch (code)
3661 case INTEGER_CST:
3662 return constant_boolean_node (integer_zerop (arg), type);
3664 case TRUTH_AND_EXPR:
3665 return build2 (TRUTH_OR_EXPR, type,
3666 invert_truthvalue (TREE_OPERAND (arg, 0)),
3667 invert_truthvalue (TREE_OPERAND (arg, 1)));
3669 case TRUTH_OR_EXPR:
3670 return build2 (TRUTH_AND_EXPR, type,
3671 invert_truthvalue (TREE_OPERAND (arg, 0)),
3672 invert_truthvalue (TREE_OPERAND (arg, 1)));
3674 case TRUTH_XOR_EXPR:
3675 /* Here we can invert either operand. We invert the first operand
3676 unless the second operand is a TRUTH_NOT_EXPR in which case our
3677 result is the XOR of the first operand with the inside of the
3678 negation of the second operand. */
3680 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3681 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3682 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3683 else
3684 return build2 (TRUTH_XOR_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 TREE_OPERAND (arg, 1));
3688 case TRUTH_ANDIF_EXPR:
3689 return build2 (TRUTH_ORIF_EXPR, type,
3690 invert_truthvalue (TREE_OPERAND (arg, 0)),
3691 invert_truthvalue (TREE_OPERAND (arg, 1)));
3693 case TRUTH_ORIF_EXPR:
3694 return build2 (TRUTH_ANDIF_EXPR, type,
3695 invert_truthvalue (TREE_OPERAND (arg, 0)),
3696 invert_truthvalue (TREE_OPERAND (arg, 1)));
3698 case TRUTH_NOT_EXPR:
3699 return TREE_OPERAND (arg, 0);
3701 case COND_EXPR:
3703 tree arg1 = TREE_OPERAND (arg, 1);
3704 tree arg2 = TREE_OPERAND (arg, 2);
3705 /* A COND_EXPR may have a throw as one operand, which
3706 then has void type. Just leave void operands
3707 as they are. */
3708 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3709 VOID_TYPE_P (TREE_TYPE (arg1))
3710 ? arg1 : invert_truthvalue (arg1),
3711 VOID_TYPE_P (TREE_TYPE (arg2))
3712 ? arg2 : invert_truthvalue (arg2));
3715 case COMPOUND_EXPR:
3716 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3717 invert_truthvalue (TREE_OPERAND (arg, 1)));
3719 case NON_LVALUE_EXPR:
3720 return invert_truthvalue (TREE_OPERAND (arg, 0));
3722 case NOP_EXPR:
3723 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3724 return build1 (TRUTH_NOT_EXPR, type, arg);
3726 case CONVERT_EXPR:
3727 case FLOAT_EXPR:
3728 return build1 (TREE_CODE (arg), type,
3729 invert_truthvalue (TREE_OPERAND (arg, 0)));
3731 case BIT_AND_EXPR:
3732 if (!integer_onep (TREE_OPERAND (arg, 1)))
3733 break;
3734 return build2 (EQ_EXPR, type, arg,
3735 build_int_cst (type, 0));
3737 case SAVE_EXPR:
3738 return build1 (TRUTH_NOT_EXPR, type, arg);
3740 case CLEANUP_POINT_EXPR:
3741 return build1 (CLEANUP_POINT_EXPR, type,
3742 invert_truthvalue (TREE_OPERAND (arg, 0)));
3744 default:
3745 break;
3748 return NULL_TREE;
3751 /* Return a simplified tree node for the truth-negation of ARG. This
3752 never alters ARG itself. We assume that ARG is an operation that
3753 returns a truth value (0 or 1).
3755 FIXME: one would think we would fold the result, but it causes
3756 problems with the dominator optimizer. */
3758 tree
3759 invert_truthvalue (tree arg)
3761 tree tem;
3763 if (TREE_CODE (arg) == ERROR_MARK)
3764 return arg;
3766 tem = fold_truth_not_expr (arg);
3767 if (!tem)
3768 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3770 return tem;
3773 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3774 operands are another bit-wise operation with a common input. If so,
3775 distribute the bit operations to save an operation and possibly two if
3776 constants are involved. For example, convert
3777 (A | B) & (A | C) into A | (B & C)
3778 Further simplification will occur if B and C are constants.
3780 If this optimization cannot be done, 0 will be returned. */
3782 static tree
3783 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3785 tree common;
3786 tree left, right;
3788 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3789 || TREE_CODE (arg0) == code
3790 || (TREE_CODE (arg0) != BIT_AND_EXPR
3791 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3792 return 0;
3794 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3796 common = TREE_OPERAND (arg0, 0);
3797 left = TREE_OPERAND (arg0, 1);
3798 right = TREE_OPERAND (arg1, 1);
3800 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3802 common = TREE_OPERAND (arg0, 0);
3803 left = TREE_OPERAND (arg0, 1);
3804 right = TREE_OPERAND (arg1, 0);
3806 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3808 common = TREE_OPERAND (arg0, 1);
3809 left = TREE_OPERAND (arg0, 0);
3810 right = TREE_OPERAND (arg1, 1);
3812 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3814 common = TREE_OPERAND (arg0, 1);
3815 left = TREE_OPERAND (arg0, 0);
3816 right = TREE_OPERAND (arg1, 0);
3818 else
3819 return 0;
3821 return fold_build2 (TREE_CODE (arg0), type, common,
3822 fold_build2 (code, type, left, right));
3825 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3826 with code CODE. This optimization is unsafe. */
3827 static tree
3828 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3830 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3831 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3833 /* (A / C) +- (B / C) -> (A +- B) / C. */
3834 if (mul0 == mul1
3835 && operand_equal_p (TREE_OPERAND (arg0, 1),
3836 TREE_OPERAND (arg1, 1), 0))
3837 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3838 fold_build2 (code, type,
3839 TREE_OPERAND (arg0, 0),
3840 TREE_OPERAND (arg1, 0)),
3841 TREE_OPERAND (arg0, 1));
3843 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3844 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3845 TREE_OPERAND (arg1, 0), 0)
3846 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3847 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3849 REAL_VALUE_TYPE r0, r1;
3850 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3851 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3852 if (!mul0)
3853 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3854 if (!mul1)
3855 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3856 real_arithmetic (&r0, code, &r0, &r1);
3857 return fold_build2 (MULT_EXPR, type,
3858 TREE_OPERAND (arg0, 0),
3859 build_real (type, r0));
3862 return NULL_TREE;
3865 /* Subroutine for fold_truthop: decode a field reference.
3867 If EXP is a comparison reference, we return the innermost reference.
3869 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3870 set to the starting bit number.
3872 If the innermost field can be completely contained in a mode-sized
3873 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3875 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3876 otherwise it is not changed.
3878 *PUNSIGNEDP is set to the signedness of the field.
3880 *PMASK is set to the mask used. This is either contained in a
3881 BIT_AND_EXPR or derived from the width of the field.
3883 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3885 Return 0 if this is not a component reference or is one that we can't
3886 do anything with. */
3888 static tree
3889 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3890 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3891 int *punsignedp, int *pvolatilep,
3892 tree *pmask, tree *pand_mask)
3894 tree outer_type = 0;
3895 tree and_mask = 0;
3896 tree mask, inner, offset;
3897 tree unsigned_type;
3898 unsigned int precision;
3900 /* All the optimizations using this function assume integer fields.
3901 There are problems with FP fields since the type_for_size call
3902 below can fail for, e.g., XFmode. */
3903 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3904 return 0;
3906 /* We are interested in the bare arrangement of bits, so strip everything
3907 that doesn't affect the machine mode. However, record the type of the
3908 outermost expression if it may matter below. */
3909 if (CONVERT_EXPR_P (exp)
3910 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3911 outer_type = TREE_TYPE (exp);
3912 STRIP_NOPS (exp);
3914 if (TREE_CODE (exp) == BIT_AND_EXPR)
3916 and_mask = TREE_OPERAND (exp, 1);
3917 exp = TREE_OPERAND (exp, 0);
3918 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3919 if (TREE_CODE (and_mask) != INTEGER_CST)
3920 return 0;
3923 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3924 punsignedp, pvolatilep, false);
3925 if ((inner == exp && and_mask == 0)
3926 || *pbitsize < 0 || offset != 0
3927 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3928 return 0;
3930 /* If the number of bits in the reference is the same as the bitsize of
3931 the outer type, then the outer type gives the signedness. Otherwise
3932 (in case of a small bitfield) the signedness is unchanged. */
3933 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3934 *punsignedp = TYPE_UNSIGNED (outer_type);
3936 /* Compute the mask to access the bitfield. */
3937 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3938 precision = TYPE_PRECISION (unsigned_type);
3940 mask = build_int_cst_type (unsigned_type, -1);
3942 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3943 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3945 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3946 if (and_mask != 0)
3947 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3948 fold_convert (unsigned_type, and_mask), mask);
3950 *pmask = mask;
3951 *pand_mask = and_mask;
3952 return inner;
3955 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3956 represents the sign bit of EXP's type. If EXP represents a sign
3957 or zero extension, also test VAL against the unextended type.
3958 The return value is the (sub)expression whose sign bit is VAL,
3959 or NULL_TREE otherwise. */
3961 static tree
3962 sign_bit_p (tree exp, const_tree val)
3964 unsigned HOST_WIDE_INT mask_lo, lo;
3965 HOST_WIDE_INT mask_hi, hi;
3966 int width;
3967 tree t;
3969 /* Tree EXP must have an integral type. */
3970 t = TREE_TYPE (exp);
3971 if (! INTEGRAL_TYPE_P (t))
3972 return NULL_TREE;
3974 /* Tree VAL must be an integer constant. */
3975 if (TREE_CODE (val) != INTEGER_CST
3976 || TREE_OVERFLOW (val))
3977 return NULL_TREE;
3979 width = TYPE_PRECISION (t);
3980 if (width > HOST_BITS_PER_WIDE_INT)
3982 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3983 lo = 0;
3985 mask_hi = ((unsigned HOST_WIDE_INT) -1
3986 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3987 mask_lo = -1;
3989 else
3991 hi = 0;
3992 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3994 mask_hi = 0;
3995 mask_lo = ((unsigned HOST_WIDE_INT) -1
3996 >> (HOST_BITS_PER_WIDE_INT - width));
3999 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4000 treat VAL as if it were unsigned. */
4001 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4002 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4003 return exp;
4005 /* Handle extension from a narrower type. */
4006 if (TREE_CODE (exp) == NOP_EXPR
4007 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4008 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4010 return NULL_TREE;
4013 /* Subroutine for fold_truthop: determine if an operand is simple enough
4014 to be evaluated unconditionally. */
4016 static int
4017 simple_operand_p (const_tree exp)
4019 /* Strip any conversions that don't change the machine mode. */
4020 STRIP_NOPS (exp);
4022 return (CONSTANT_CLASS_P (exp)
4023 || TREE_CODE (exp) == SSA_NAME
4024 || (DECL_P (exp)
4025 && ! TREE_ADDRESSABLE (exp)
4026 && ! TREE_THIS_VOLATILE (exp)
4027 && ! DECL_NONLOCAL (exp)
4028 /* Don't regard global variables as simple. They may be
4029 allocated in ways unknown to the compiler (shared memory,
4030 #pragma weak, etc). */
4031 && ! TREE_PUBLIC (exp)
4032 && ! DECL_EXTERNAL (exp)
4033 /* Loading a static variable is unduly expensive, but global
4034 registers aren't expensive. */
4035 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4038 /* The following functions are subroutines to fold_range_test and allow it to
4039 try to change a logical combination of comparisons into a range test.
4041 For example, both
4042 X == 2 || X == 3 || X == 4 || X == 5
4044 X >= 2 && X <= 5
4045 are converted to
4046 (unsigned) (X - 2) <= 3
4048 We describe each set of comparisons as being either inside or outside
4049 a range, using a variable named like IN_P, and then describe the
4050 range with a lower and upper bound. If one of the bounds is omitted,
4051 it represents either the highest or lowest value of the type.
4053 In the comments below, we represent a range by two numbers in brackets
4054 preceded by a "+" to designate being inside that range, or a "-" to
4055 designate being outside that range, so the condition can be inverted by
4056 flipping the prefix. An omitted bound is represented by a "-". For
4057 example, "- [-, 10]" means being outside the range starting at the lowest
4058 possible value and ending at 10, in other words, being greater than 10.
4059 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4060 always false.
4062 We set up things so that the missing bounds are handled in a consistent
4063 manner so neither a missing bound nor "true" and "false" need to be
4064 handled using a special case. */
4066 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4067 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4068 and UPPER1_P are nonzero if the respective argument is an upper bound
4069 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4070 must be specified for a comparison. ARG1 will be converted to ARG0's
4071 type if both are specified. */
4073 static tree
4074 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4075 tree arg1, int upper1_p)
4077 tree tem;
4078 int result;
4079 int sgn0, sgn1;
4081 /* If neither arg represents infinity, do the normal operation.
4082 Else, if not a comparison, return infinity. Else handle the special
4083 comparison rules. Note that most of the cases below won't occur, but
4084 are handled for consistency. */
4086 if (arg0 != 0 && arg1 != 0)
4088 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4089 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4090 STRIP_NOPS (tem);
4091 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4094 if (TREE_CODE_CLASS (code) != tcc_comparison)
4095 return 0;
4097 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4098 for neither. In real maths, we cannot assume open ended ranges are
4099 the same. But, this is computer arithmetic, where numbers are finite.
4100 We can therefore make the transformation of any unbounded range with
4101 the value Z, Z being greater than any representable number. This permits
4102 us to treat unbounded ranges as equal. */
4103 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4104 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4105 switch (code)
4107 case EQ_EXPR:
4108 result = sgn0 == sgn1;
4109 break;
4110 case NE_EXPR:
4111 result = sgn0 != sgn1;
4112 break;
4113 case LT_EXPR:
4114 result = sgn0 < sgn1;
4115 break;
4116 case LE_EXPR:
4117 result = sgn0 <= sgn1;
4118 break;
4119 case GT_EXPR:
4120 result = sgn0 > sgn1;
4121 break;
4122 case GE_EXPR:
4123 result = sgn0 >= sgn1;
4124 break;
4125 default:
4126 gcc_unreachable ();
4129 return constant_boolean_node (result, type);
4132 /* Given EXP, a logical expression, set the range it is testing into
4133 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4134 actually being tested. *PLOW and *PHIGH will be made of the same
4135 type as the returned expression. If EXP is not a comparison, we
4136 will most likely not be returning a useful value and range. Set
4137 *STRICT_OVERFLOW_P to true if the return value is only valid
4138 because signed overflow is undefined; otherwise, do not change
4139 *STRICT_OVERFLOW_P. */
4141 static tree
4142 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4143 bool *strict_overflow_p)
4145 enum tree_code code;
4146 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4147 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4148 int in_p, n_in_p;
4149 tree low, high, n_low, n_high;
4151 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4152 and see if we can refine the range. Some of the cases below may not
4153 happen, but it doesn't seem worth worrying about this. We "continue"
4154 the outer loop when we've changed something; otherwise we "break"
4155 the switch, which will "break" the while. */
4157 in_p = 0;
4158 low = high = build_int_cst (TREE_TYPE (exp), 0);
4160 while (1)
4162 code = TREE_CODE (exp);
4163 exp_type = TREE_TYPE (exp);
4165 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4167 if (TREE_OPERAND_LENGTH (exp) > 0)
4168 arg0 = TREE_OPERAND (exp, 0);
4169 if (TREE_CODE_CLASS (code) == tcc_comparison
4170 || TREE_CODE_CLASS (code) == tcc_unary
4171 || TREE_CODE_CLASS (code) == tcc_binary)
4172 arg0_type = TREE_TYPE (arg0);
4173 if (TREE_CODE_CLASS (code) == tcc_binary
4174 || TREE_CODE_CLASS (code) == tcc_comparison
4175 || (TREE_CODE_CLASS (code) == tcc_expression
4176 && TREE_OPERAND_LENGTH (exp) > 1))
4177 arg1 = TREE_OPERAND (exp, 1);
4180 switch (code)
4182 case TRUTH_NOT_EXPR:
4183 in_p = ! in_p, exp = arg0;
4184 continue;
4186 case EQ_EXPR: case NE_EXPR:
4187 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4188 /* We can only do something if the range is testing for zero
4189 and if the second operand is an integer constant. Note that
4190 saying something is "in" the range we make is done by
4191 complementing IN_P since it will set in the initial case of
4192 being not equal to zero; "out" is leaving it alone. */
4193 if (low == 0 || high == 0
4194 || ! integer_zerop (low) || ! integer_zerop (high)
4195 || TREE_CODE (arg1) != INTEGER_CST)
4196 break;
4198 switch (code)
4200 case NE_EXPR: /* - [c, c] */
4201 low = high = arg1;
4202 break;
4203 case EQ_EXPR: /* + [c, c] */
4204 in_p = ! in_p, low = high = arg1;
4205 break;
4206 case GT_EXPR: /* - [-, c] */
4207 low = 0, high = arg1;
4208 break;
4209 case GE_EXPR: /* + [c, -] */
4210 in_p = ! in_p, low = arg1, high = 0;
4211 break;
4212 case LT_EXPR: /* - [c, -] */
4213 low = arg1, high = 0;
4214 break;
4215 case LE_EXPR: /* + [-, c] */
4216 in_p = ! in_p, low = 0, high = arg1;
4217 break;
4218 default:
4219 gcc_unreachable ();
4222 /* If this is an unsigned comparison, we also know that EXP is
4223 greater than or equal to zero. We base the range tests we make
4224 on that fact, so we record it here so we can parse existing
4225 range tests. We test arg0_type since often the return type
4226 of, e.g. EQ_EXPR, is boolean. */
4227 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4229 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4230 in_p, low, high, 1,
4231 build_int_cst (arg0_type, 0),
4232 NULL_TREE))
4233 break;
4235 in_p = n_in_p, low = n_low, high = n_high;
4237 /* If the high bound is missing, but we have a nonzero low
4238 bound, reverse the range so it goes from zero to the low bound
4239 minus 1. */
4240 if (high == 0 && low && ! integer_zerop (low))
4242 in_p = ! in_p;
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4244 integer_one_node, 0);
4245 low = build_int_cst (arg0_type, 0);
4249 exp = arg0;
4250 continue;
4252 case NEGATE_EXPR:
4253 /* (-x) IN [a,b] -> x in [-b, -a] */
4254 n_low = range_binop (MINUS_EXPR, exp_type,
4255 build_int_cst (exp_type, 0),
4256 0, high, 1);
4257 n_high = range_binop (MINUS_EXPR, exp_type,
4258 build_int_cst (exp_type, 0),
4259 0, low, 0);
4260 low = n_low, high = n_high;
4261 exp = arg0;
4262 continue;
4264 case BIT_NOT_EXPR:
4265 /* ~ X -> -X - 1 */
4266 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4267 build_int_cst (exp_type, 1));
4268 continue;
4270 case PLUS_EXPR: case MINUS_EXPR:
4271 if (TREE_CODE (arg1) != INTEGER_CST)
4272 break;
4274 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4275 move a constant to the other side. */
4276 if (!TYPE_UNSIGNED (arg0_type)
4277 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4278 break;
4280 /* If EXP is signed, any overflow in the computation is undefined,
4281 so we don't worry about it so long as our computations on
4282 the bounds don't overflow. For unsigned, overflow is defined
4283 and this is exactly the right thing. */
4284 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4285 arg0_type, low, 0, arg1, 0);
4286 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4287 arg0_type, high, 1, arg1, 0);
4288 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4289 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4290 break;
4292 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4293 *strict_overflow_p = true;
4295 /* Check for an unsigned range which has wrapped around the maximum
4296 value thus making n_high < n_low, and normalize it. */
4297 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4299 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4300 integer_one_node, 0);
4301 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4302 integer_one_node, 0);
4304 /* If the range is of the form +/- [ x+1, x ], we won't
4305 be able to normalize it. But then, it represents the
4306 whole range or the empty set, so make it
4307 +/- [ -, - ]. */
4308 if (tree_int_cst_equal (n_low, low)
4309 && tree_int_cst_equal (n_high, high))
4310 low = high = 0;
4311 else
4312 in_p = ! in_p;
4314 else
4315 low = n_low, high = n_high;
4317 exp = arg0;
4318 continue;
4320 CASE_CONVERT: case NON_LVALUE_EXPR:
4321 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4322 break;
4324 if (! INTEGRAL_TYPE_P (arg0_type)
4325 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4326 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4327 break;
4329 n_low = low, n_high = high;
4331 if (n_low != 0)
4332 n_low = fold_convert (arg0_type, n_low);
4334 if (n_high != 0)
4335 n_high = fold_convert (arg0_type, n_high);
4338 /* If we're converting arg0 from an unsigned type, to exp,
4339 a signed type, we will be doing the comparison as unsigned.
4340 The tests above have already verified that LOW and HIGH
4341 are both positive.
4343 So we have to ensure that we will handle large unsigned
4344 values the same way that the current signed bounds treat
4345 negative values. */
4347 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4349 tree high_positive;
4350 tree equiv_type;
4351 /* For fixed-point modes, we need to pass the saturating flag
4352 as the 2nd parameter. */
4353 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4354 equiv_type = lang_hooks.types.type_for_mode
4355 (TYPE_MODE (arg0_type),
4356 TYPE_SATURATING (arg0_type));
4357 else
4358 equiv_type = lang_hooks.types.type_for_mode
4359 (TYPE_MODE (arg0_type), 1);
4361 /* A range without an upper bound is, naturally, unbounded.
4362 Since convert would have cropped a very large value, use
4363 the max value for the destination type. */
4364 high_positive
4365 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4366 : TYPE_MAX_VALUE (arg0_type);
4368 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4369 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4370 fold_convert (arg0_type,
4371 high_positive),
4372 build_int_cst (arg0_type, 1));
4374 /* If the low bound is specified, "and" the range with the
4375 range for which the original unsigned value will be
4376 positive. */
4377 if (low != 0)
4379 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4380 1, n_low, n_high, 1,
4381 fold_convert (arg0_type,
4382 integer_zero_node),
4383 high_positive))
4384 break;
4386 in_p = (n_in_p == in_p);
4388 else
4390 /* Otherwise, "or" the range with the range of the input
4391 that will be interpreted as negative. */
4392 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4393 0, n_low, n_high, 1,
4394 fold_convert (arg0_type,
4395 integer_zero_node),
4396 high_positive))
4397 break;
4399 in_p = (in_p != n_in_p);
4403 exp = arg0;
4404 low = n_low, high = n_high;
4405 continue;
4407 default:
4408 break;
4411 break;
4414 /* If EXP is a constant, we can evaluate whether this is true or false. */
4415 if (TREE_CODE (exp) == INTEGER_CST)
4417 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4418 exp, 0, low, 0))
4419 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4420 exp, 1, high, 1)));
4421 low = high = 0;
4422 exp = 0;
4425 *pin_p = in_p, *plow = low, *phigh = high;
4426 return exp;
4429 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4430 type, TYPE, return an expression to test if EXP is in (or out of, depending
4431 on IN_P) the range. Return 0 if the test couldn't be created. */
4433 static tree
4434 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4436 tree etype = TREE_TYPE (exp);
4437 tree value;
4439 #ifdef HAVE_canonicalize_funcptr_for_compare
4440 /* Disable this optimization for function pointer expressions
4441 on targets that require function pointer canonicalization. */
4442 if (HAVE_canonicalize_funcptr_for_compare
4443 && TREE_CODE (etype) == POINTER_TYPE
4444 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4445 return NULL_TREE;
4446 #endif
4448 if (! in_p)
4450 value = build_range_check (type, exp, 1, low, high);
4451 if (value != 0)
4452 return invert_truthvalue (value);
4454 return 0;
4457 if (low == 0 && high == 0)
4458 return build_int_cst (type, 1);
4460 if (low == 0)
4461 return fold_build2 (LE_EXPR, type, exp,
4462 fold_convert (etype, high));
4464 if (high == 0)
4465 return fold_build2 (GE_EXPR, type, exp,
4466 fold_convert (etype, low));
4468 if (operand_equal_p (low, high, 0))
4469 return fold_build2 (EQ_EXPR, type, exp,
4470 fold_convert (etype, low));
4472 if (integer_zerop (low))
4474 if (! TYPE_UNSIGNED (etype))
4476 etype = unsigned_type_for (etype);
4477 high = fold_convert (etype, high);
4478 exp = fold_convert (etype, exp);
4480 return build_range_check (type, exp, 1, 0, high);
4483 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4484 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4486 unsigned HOST_WIDE_INT lo;
4487 HOST_WIDE_INT hi;
4488 int prec;
4490 prec = TYPE_PRECISION (etype);
4491 if (prec <= HOST_BITS_PER_WIDE_INT)
4493 hi = 0;
4494 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4496 else
4498 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4499 lo = (unsigned HOST_WIDE_INT) -1;
4502 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4504 if (TYPE_UNSIGNED (etype))
4506 etype = signed_type_for (etype);
4507 exp = fold_convert (etype, exp);
4509 return fold_build2 (GT_EXPR, type, exp,
4510 build_int_cst (etype, 0));
4514 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4515 This requires wrap-around arithmetics for the type of the expression. */
4516 switch (TREE_CODE (etype))
4518 case INTEGER_TYPE:
4519 /* There is no requirement that LOW be within the range of ETYPE
4520 if the latter is a subtype. It must, however, be within the base
4521 type of ETYPE. So be sure we do the subtraction in that type. */
4522 if (TREE_TYPE (etype))
4523 etype = TREE_TYPE (etype);
4524 break;
4526 case ENUMERAL_TYPE:
4527 case BOOLEAN_TYPE:
4528 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4529 TYPE_UNSIGNED (etype));
4530 break;
4532 default:
4533 break;
4536 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4537 if (TREE_CODE (etype) == INTEGER_TYPE
4538 && !TYPE_OVERFLOW_WRAPS (etype))
4540 tree utype, minv, maxv;
4542 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4543 for the type in question, as we rely on this here. */
4544 utype = unsigned_type_for (etype);
4545 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4546 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4547 integer_one_node, 1);
4548 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4550 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4551 minv, 1, maxv, 1)))
4552 etype = utype;
4553 else
4554 return 0;
4557 high = fold_convert (etype, high);
4558 low = fold_convert (etype, low);
4559 exp = fold_convert (etype, exp);
4561 value = const_binop (MINUS_EXPR, high, low, 0);
4564 if (POINTER_TYPE_P (etype))
4566 if (value != 0 && !TREE_OVERFLOW (value))
4568 low = fold_convert (sizetype, low);
4569 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4570 return build_range_check (type,
4571 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4572 1, build_int_cst (etype, 0), value);
4574 return 0;
4577 if (value != 0 && !TREE_OVERFLOW (value))
4578 return build_range_check (type,
4579 fold_build2 (MINUS_EXPR, etype, exp, low),
4580 1, build_int_cst (etype, 0), value);
4582 return 0;
4585 /* Return the predecessor of VAL in its type, handling the infinite case. */
4587 static tree
4588 range_predecessor (tree val)
4590 tree type = TREE_TYPE (val);
4592 if (INTEGRAL_TYPE_P (type)
4593 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4594 return 0;
4595 else
4596 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4599 /* Return the successor of VAL in its type, handling the infinite case. */
4601 static tree
4602 range_successor (tree val)
4604 tree type = TREE_TYPE (val);
4606 if (INTEGRAL_TYPE_P (type)
4607 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4608 return 0;
4609 else
4610 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4613 /* Given two ranges, see if we can merge them into one. Return 1 if we
4614 can, 0 if we can't. Set the output range into the specified parameters. */
4616 static int
4617 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4618 tree high0, int in1_p, tree low1, tree high1)
4620 int no_overlap;
4621 int subset;
4622 int temp;
4623 tree tem;
4624 int in_p;
4625 tree low, high;
4626 int lowequal = ((low0 == 0 && low1 == 0)
4627 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4628 low0, 0, low1, 0)));
4629 int highequal = ((high0 == 0 && high1 == 0)
4630 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4631 high0, 1, high1, 1)));
4633 /* Make range 0 be the range that starts first, or ends last if they
4634 start at the same value. Swap them if it isn't. */
4635 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4636 low0, 0, low1, 0))
4637 || (lowequal
4638 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4639 high1, 1, high0, 1))))
4641 temp = in0_p, in0_p = in1_p, in1_p = temp;
4642 tem = low0, low0 = low1, low1 = tem;
4643 tem = high0, high0 = high1, high1 = tem;
4646 /* Now flag two cases, whether the ranges are disjoint or whether the
4647 second range is totally subsumed in the first. Note that the tests
4648 below are simplified by the ones above. */
4649 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4650 high0, 1, low1, 0));
4651 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4652 high1, 1, high0, 1));
4654 /* We now have four cases, depending on whether we are including or
4655 excluding the two ranges. */
4656 if (in0_p && in1_p)
4658 /* If they don't overlap, the result is false. If the second range
4659 is a subset it is the result. Otherwise, the range is from the start
4660 of the second to the end of the first. */
4661 if (no_overlap)
4662 in_p = 0, low = high = 0;
4663 else if (subset)
4664 in_p = 1, low = low1, high = high1;
4665 else
4666 in_p = 1, low = low1, high = high0;
4669 else if (in0_p && ! in1_p)
4671 /* If they don't overlap, the result is the first range. If they are
4672 equal, the result is false. If the second range is a subset of the
4673 first, and the ranges begin at the same place, we go from just after
4674 the end of the second range to the end of the first. If the second
4675 range is not a subset of the first, or if it is a subset and both
4676 ranges end at the same place, the range starts at the start of the
4677 first range and ends just before the second range.
4678 Otherwise, we can't describe this as a single range. */
4679 if (no_overlap)
4680 in_p = 1, low = low0, high = high0;
4681 else if (lowequal && highequal)
4682 in_p = 0, low = high = 0;
4683 else if (subset && lowequal)
4685 low = range_successor (high1);
4686 high = high0;
4687 in_p = 1;
4688 if (low == 0)
4690 /* We are in the weird situation where high0 > high1 but
4691 high1 has no successor. Punt. */
4692 return 0;
4695 else if (! subset || highequal)
4697 low = low0;
4698 high = range_predecessor (low1);
4699 in_p = 1;
4700 if (high == 0)
4702 /* low0 < low1 but low1 has no predecessor. Punt. */
4703 return 0;
4706 else
4707 return 0;
4710 else if (! in0_p && in1_p)
4712 /* If they don't overlap, the result is the second range. If the second
4713 is a subset of the first, the result is false. Otherwise,
4714 the range starts just after the first range and ends at the
4715 end of the second. */
4716 if (no_overlap)
4717 in_p = 1, low = low1, high = high1;
4718 else if (subset || highequal)
4719 in_p = 0, low = high = 0;
4720 else
4722 low = range_successor (high0);
4723 high = high1;
4724 in_p = 1;
4725 if (low == 0)
4727 /* high1 > high0 but high0 has no successor. Punt. */
4728 return 0;
4733 else
4735 /* The case where we are excluding both ranges. Here the complex case
4736 is if they don't overlap. In that case, the only time we have a
4737 range is if they are adjacent. If the second is a subset of the
4738 first, the result is the first. Otherwise, the range to exclude
4739 starts at the beginning of the first range and ends at the end of the
4740 second. */
4741 if (no_overlap)
4743 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4744 range_successor (high0),
4745 1, low1, 0)))
4746 in_p = 0, low = low0, high = high1;
4747 else
4749 /* Canonicalize - [min, x] into - [-, x]. */
4750 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4751 switch (TREE_CODE (TREE_TYPE (low0)))
4753 case ENUMERAL_TYPE:
4754 if (TYPE_PRECISION (TREE_TYPE (low0))
4755 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4756 break;
4757 /* FALLTHROUGH */
4758 case INTEGER_TYPE:
4759 if (tree_int_cst_equal (low0,
4760 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4761 low0 = 0;
4762 break;
4763 case POINTER_TYPE:
4764 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4765 && integer_zerop (low0))
4766 low0 = 0;
4767 break;
4768 default:
4769 break;
4772 /* Canonicalize - [x, max] into - [x, -]. */
4773 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4774 switch (TREE_CODE (TREE_TYPE (high1)))
4776 case ENUMERAL_TYPE:
4777 if (TYPE_PRECISION (TREE_TYPE (high1))
4778 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4779 break;
4780 /* FALLTHROUGH */
4781 case INTEGER_TYPE:
4782 if (tree_int_cst_equal (high1,
4783 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4784 high1 = 0;
4785 break;
4786 case POINTER_TYPE:
4787 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4788 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4789 high1, 1,
4790 integer_one_node, 1)))
4791 high1 = 0;
4792 break;
4793 default:
4794 break;
4797 /* The ranges might be also adjacent between the maximum and
4798 minimum values of the given type. For
4799 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4800 return + [x + 1, y - 1]. */
4801 if (low0 == 0 && high1 == 0)
4803 low = range_successor (high0);
4804 high = range_predecessor (low1);
4805 if (low == 0 || high == 0)
4806 return 0;
4808 in_p = 1;
4810 else
4811 return 0;
4814 else if (subset)
4815 in_p = 0, low = low0, high = high0;
4816 else
4817 in_p = 0, low = low0, high = high1;
4820 *pin_p = in_p, *plow = low, *phigh = high;
4821 return 1;
4825 /* Subroutine of fold, looking inside expressions of the form
4826 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4827 of the COND_EXPR. This function is being used also to optimize
4828 A op B ? C : A, by reversing the comparison first.
4830 Return a folded expression whose code is not a COND_EXPR
4831 anymore, or NULL_TREE if no folding opportunity is found. */
4833 static tree
4834 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4836 enum tree_code comp_code = TREE_CODE (arg0);
4837 tree arg00 = TREE_OPERAND (arg0, 0);
4838 tree arg01 = TREE_OPERAND (arg0, 1);
4839 tree arg1_type = TREE_TYPE (arg1);
4840 tree tem;
4842 STRIP_NOPS (arg1);
4843 STRIP_NOPS (arg2);
4845 /* If we have A op 0 ? A : -A, consider applying the following
4846 transformations:
4848 A == 0? A : -A same as -A
4849 A != 0? A : -A same as A
4850 A >= 0? A : -A same as abs (A)
4851 A > 0? A : -A same as abs (A)
4852 A <= 0? A : -A same as -abs (A)
4853 A < 0? A : -A same as -abs (A)
4855 None of these transformations work for modes with signed
4856 zeros. If A is +/-0, the first two transformations will
4857 change the sign of the result (from +0 to -0, or vice
4858 versa). The last four will fix the sign of the result,
4859 even though the original expressions could be positive or
4860 negative, depending on the sign of A.
4862 Note that all these transformations are correct if A is
4863 NaN, since the two alternatives (A and -A) are also NaNs. */
4864 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4865 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4866 ? real_zerop (arg01)
4867 : integer_zerop (arg01))
4868 && ((TREE_CODE (arg2) == NEGATE_EXPR
4869 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4870 /* In the case that A is of the form X-Y, '-A' (arg2) may
4871 have already been folded to Y-X, check for that. */
4872 || (TREE_CODE (arg1) == MINUS_EXPR
4873 && TREE_CODE (arg2) == MINUS_EXPR
4874 && operand_equal_p (TREE_OPERAND (arg1, 0),
4875 TREE_OPERAND (arg2, 1), 0)
4876 && operand_equal_p (TREE_OPERAND (arg1, 1),
4877 TREE_OPERAND (arg2, 0), 0))))
4878 switch (comp_code)
4880 case EQ_EXPR:
4881 case UNEQ_EXPR:
4882 tem = fold_convert (arg1_type, arg1);
4883 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4884 case NE_EXPR:
4885 case LTGT_EXPR:
4886 return pedantic_non_lvalue (fold_convert (type, arg1));
4887 case UNGE_EXPR:
4888 case UNGT_EXPR:
4889 if (flag_trapping_math)
4890 break;
4891 /* Fall through. */
4892 case GE_EXPR:
4893 case GT_EXPR:
4894 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4895 arg1 = fold_convert (signed_type_for
4896 (TREE_TYPE (arg1)), arg1);
4897 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4898 return pedantic_non_lvalue (fold_convert (type, tem));
4899 case UNLE_EXPR:
4900 case UNLT_EXPR:
4901 if (flag_trapping_math)
4902 break;
4903 case LE_EXPR:
4904 case LT_EXPR:
4905 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4906 arg1 = fold_convert (signed_type_for
4907 (TREE_TYPE (arg1)), arg1);
4908 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4909 return negate_expr (fold_convert (type, tem));
4910 default:
4911 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4912 break;
4915 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4916 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4917 both transformations are correct when A is NaN: A != 0
4918 is then true, and A == 0 is false. */
4920 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4921 && integer_zerop (arg01) && integer_zerop (arg2))
4923 if (comp_code == NE_EXPR)
4924 return pedantic_non_lvalue (fold_convert (type, arg1));
4925 else if (comp_code == EQ_EXPR)
4926 return build_int_cst (type, 0);
4929 /* Try some transformations of A op B ? A : B.
4931 A == B? A : B same as B
4932 A != B? A : B same as A
4933 A >= B? A : B same as max (A, B)
4934 A > B? A : B same as max (B, A)
4935 A <= B? A : B same as min (A, B)
4936 A < B? A : B same as min (B, A)
4938 As above, these transformations don't work in the presence
4939 of signed zeros. For example, if A and B are zeros of
4940 opposite sign, the first two transformations will change
4941 the sign of the result. In the last four, the original
4942 expressions give different results for (A=+0, B=-0) and
4943 (A=-0, B=+0), but the transformed expressions do not.
4945 The first two transformations are correct if either A or B
4946 is a NaN. In the first transformation, the condition will
4947 be false, and B will indeed be chosen. In the case of the
4948 second transformation, the condition A != B will be true,
4949 and A will be chosen.
4951 The conversions to max() and min() are not correct if B is
4952 a number and A is not. The conditions in the original
4953 expressions will be false, so all four give B. The min()
4954 and max() versions would give a NaN instead. */
4955 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4956 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4957 /* Avoid these transformations if the COND_EXPR may be used
4958 as an lvalue in the C++ front-end. PR c++/19199. */
4959 && (in_gimple_form
4960 || (strcmp (lang_hooks.name, "GNU C++") != 0
4961 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4962 || ! maybe_lvalue_p (arg1)
4963 || ! maybe_lvalue_p (arg2)))
4965 tree comp_op0 = arg00;
4966 tree comp_op1 = arg01;
4967 tree comp_type = TREE_TYPE (comp_op0);
4969 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4970 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4972 comp_type = type;
4973 comp_op0 = arg1;
4974 comp_op1 = arg2;
4977 switch (comp_code)
4979 case EQ_EXPR:
4980 return pedantic_non_lvalue (fold_convert (type, arg2));
4981 case NE_EXPR:
4982 return pedantic_non_lvalue (fold_convert (type, arg1));
4983 case LE_EXPR:
4984 case LT_EXPR:
4985 case UNLE_EXPR:
4986 case UNLT_EXPR:
4987 /* In C++ a ?: expression can be an lvalue, so put the
4988 operand which will be used if they are equal first
4989 so that we can convert this back to the
4990 corresponding COND_EXPR. */
4991 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4993 comp_op0 = fold_convert (comp_type, comp_op0);
4994 comp_op1 = fold_convert (comp_type, comp_op1);
4995 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4996 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4997 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4998 return pedantic_non_lvalue (fold_convert (type, tem));
5000 break;
5001 case GE_EXPR:
5002 case GT_EXPR:
5003 case UNGE_EXPR:
5004 case UNGT_EXPR:
5005 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5007 comp_op0 = fold_convert (comp_type, comp_op0);
5008 comp_op1 = fold_convert (comp_type, comp_op1);
5009 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5010 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5011 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5012 return pedantic_non_lvalue (fold_convert (type, tem));
5014 break;
5015 case UNEQ_EXPR:
5016 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5017 return pedantic_non_lvalue (fold_convert (type, arg2));
5018 break;
5019 case LTGT_EXPR:
5020 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5021 return pedantic_non_lvalue (fold_convert (type, arg1));
5022 break;
5023 default:
5024 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5025 break;
5029 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5030 we might still be able to simplify this. For example,
5031 if C1 is one less or one more than C2, this might have started
5032 out as a MIN or MAX and been transformed by this function.
5033 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5035 if (INTEGRAL_TYPE_P (type)
5036 && TREE_CODE (arg01) == INTEGER_CST
5037 && TREE_CODE (arg2) == INTEGER_CST)
5038 switch (comp_code)
5040 case EQ_EXPR:
5041 /* We can replace A with C1 in this case. */
5042 arg1 = fold_convert (type, arg01);
5043 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5045 case LT_EXPR:
5046 /* If C1 is C2 + 1, this is min(A, C2). */
5047 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5048 OEP_ONLY_CONST)
5049 && operand_equal_p (arg01,
5050 const_binop (PLUS_EXPR, arg2,
5051 build_int_cst (type, 1), 0),
5052 OEP_ONLY_CONST))
5053 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5054 type,
5055 fold_convert (type, arg1),
5056 arg2));
5057 break;
5059 case LE_EXPR:
5060 /* If C1 is C2 - 1, this is min(A, C2). */
5061 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5062 OEP_ONLY_CONST)
5063 && operand_equal_p (arg01,
5064 const_binop (MINUS_EXPR, arg2,
5065 build_int_cst (type, 1), 0),
5066 OEP_ONLY_CONST))
5067 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5068 type,
5069 fold_convert (type, arg1),
5070 arg2));
5071 break;
5073 case GT_EXPR:
5074 /* If C1 is C2 - 1, this is max(A, C2). */
5075 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5076 OEP_ONLY_CONST)
5077 && operand_equal_p (arg01,
5078 const_binop (MINUS_EXPR, arg2,
5079 build_int_cst (type, 1), 0),
5080 OEP_ONLY_CONST))
5081 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5082 type,
5083 fold_convert (type, arg1),
5084 arg2));
5085 break;
5087 case GE_EXPR:
5088 /* If C1 is C2 + 1, this is max(A, C2). */
5089 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5090 OEP_ONLY_CONST)
5091 && operand_equal_p (arg01,
5092 const_binop (PLUS_EXPR, arg2,
5093 build_int_cst (type, 1), 0),
5094 OEP_ONLY_CONST))
5095 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5096 type,
5097 fold_convert (type, arg1),
5098 arg2));
5099 break;
5100 case NE_EXPR:
5101 break;
5102 default:
5103 gcc_unreachable ();
5106 return NULL_TREE;
5111 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5112 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5113 #endif
5115 /* EXP is some logical combination of boolean tests. See if we can
5116 merge it into some range test. Return the new tree if so. */
5118 static tree
5119 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5121 int or_op = (code == TRUTH_ORIF_EXPR
5122 || code == TRUTH_OR_EXPR);
5123 int in0_p, in1_p, in_p;
5124 tree low0, low1, low, high0, high1, high;
5125 bool strict_overflow_p = false;
5126 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5127 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5128 tree tem;
5129 const char * const warnmsg = G_("assuming signed overflow does not occur "
5130 "when simplifying range test");
5132 /* If this is an OR operation, invert both sides; we will invert
5133 again at the end. */
5134 if (or_op)
5135 in0_p = ! in0_p, in1_p = ! in1_p;
5137 /* If both expressions are the same, if we can merge the ranges, and we
5138 can build the range test, return it or it inverted. If one of the
5139 ranges is always true or always false, consider it to be the same
5140 expression as the other. */
5141 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5142 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5143 in1_p, low1, high1)
5144 && 0 != (tem = (build_range_check (type,
5145 lhs != 0 ? lhs
5146 : rhs != 0 ? rhs : integer_zero_node,
5147 in_p, low, high))))
5149 if (strict_overflow_p)
5150 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5151 return or_op ? invert_truthvalue (tem) : tem;
5154 /* On machines where the branch cost is expensive, if this is a
5155 short-circuited branch and the underlying object on both sides
5156 is the same, make a non-short-circuit operation. */
5157 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5158 && lhs != 0 && rhs != 0
5159 && (code == TRUTH_ANDIF_EXPR
5160 || code == TRUTH_ORIF_EXPR)
5161 && operand_equal_p (lhs, rhs, 0))
5163 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5164 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5165 which cases we can't do this. */
5166 if (simple_operand_p (lhs))
5167 return build2 (code == TRUTH_ANDIF_EXPR
5168 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5169 type, op0, op1);
5171 else if (lang_hooks.decls.global_bindings_p () == 0
5172 && ! CONTAINS_PLACEHOLDER_P (lhs))
5174 tree common = save_expr (lhs);
5176 if (0 != (lhs = build_range_check (type, common,
5177 or_op ? ! in0_p : in0_p,
5178 low0, high0))
5179 && (0 != (rhs = build_range_check (type, common,
5180 or_op ? ! in1_p : in1_p,
5181 low1, high1))))
5183 if (strict_overflow_p)
5184 fold_overflow_warning (warnmsg,
5185 WARN_STRICT_OVERFLOW_COMPARISON);
5186 return build2 (code == TRUTH_ANDIF_EXPR
5187 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5188 type, lhs, rhs);
5193 return 0;
5196 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5197 bit value. Arrange things so the extra bits will be set to zero if and
5198 only if C is signed-extended to its full width. If MASK is nonzero,
5199 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5201 static tree
5202 unextend (tree c, int p, int unsignedp, tree mask)
5204 tree type = TREE_TYPE (c);
5205 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5206 tree temp;
5208 if (p == modesize || unsignedp)
5209 return c;
5211 /* We work by getting just the sign bit into the low-order bit, then
5212 into the high-order bit, then sign-extend. We then XOR that value
5213 with C. */
5214 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5215 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5217 /* We must use a signed type in order to get an arithmetic right shift.
5218 However, we must also avoid introducing accidental overflows, so that
5219 a subsequent call to integer_zerop will work. Hence we must
5220 do the type conversion here. At this point, the constant is either
5221 zero or one, and the conversion to a signed type can never overflow.
5222 We could get an overflow if this conversion is done anywhere else. */
5223 if (TYPE_UNSIGNED (type))
5224 temp = fold_convert (signed_type_for (type), temp);
5226 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5227 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5228 if (mask != 0)
5229 temp = const_binop (BIT_AND_EXPR, temp,
5230 fold_convert (TREE_TYPE (c), mask), 0);
5231 /* If necessary, convert the type back to match the type of C. */
5232 if (TYPE_UNSIGNED (type))
5233 temp = fold_convert (type, temp);
5235 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5238 /* Find ways of folding logical expressions of LHS and RHS:
5239 Try to merge two comparisons to the same innermost item.
5240 Look for range tests like "ch >= '0' && ch <= '9'".
5241 Look for combinations of simple terms on machines with expensive branches
5242 and evaluate the RHS unconditionally.
5244 For example, if we have p->a == 2 && p->b == 4 and we can make an
5245 object large enough to span both A and B, we can do this with a comparison
5246 against the object ANDed with the a mask.
5248 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5249 operations to do this with one comparison.
5251 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5252 function and the one above.
5254 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5255 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5257 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5258 two operands.
5260 We return the simplified tree or 0 if no optimization is possible. */
5262 static tree
5263 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5265 /* If this is the "or" of two comparisons, we can do something if
5266 the comparisons are NE_EXPR. If this is the "and", we can do something
5267 if the comparisons are EQ_EXPR. I.e.,
5268 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5270 WANTED_CODE is this operation code. For single bit fields, we can
5271 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5272 comparison for one-bit fields. */
5274 enum tree_code wanted_code;
5275 enum tree_code lcode, rcode;
5276 tree ll_arg, lr_arg, rl_arg, rr_arg;
5277 tree ll_inner, lr_inner, rl_inner, rr_inner;
5278 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5279 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5280 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5281 HOST_WIDE_INT lnbitsize, lnbitpos;
5282 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5283 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5284 enum machine_mode lnmode;
5285 tree ll_mask, lr_mask, rl_mask, rr_mask;
5286 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5287 tree l_const, r_const;
5288 tree lntype, result;
5289 int first_bit, end_bit;
5290 int volatilep;
5291 tree orig_lhs = lhs, orig_rhs = rhs;
5292 enum tree_code orig_code = code;
5294 /* Start by getting the comparison codes. Fail if anything is volatile.
5295 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5296 it were surrounded with a NE_EXPR. */
5298 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5299 return 0;
5301 lcode = TREE_CODE (lhs);
5302 rcode = TREE_CODE (rhs);
5304 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5306 lhs = build2 (NE_EXPR, truth_type, lhs,
5307 build_int_cst (TREE_TYPE (lhs), 0));
5308 lcode = NE_EXPR;
5311 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5313 rhs = build2 (NE_EXPR, truth_type, rhs,
5314 build_int_cst (TREE_TYPE (rhs), 0));
5315 rcode = NE_EXPR;
5318 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5319 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5320 return 0;
5322 ll_arg = TREE_OPERAND (lhs, 0);
5323 lr_arg = TREE_OPERAND (lhs, 1);
5324 rl_arg = TREE_OPERAND (rhs, 0);
5325 rr_arg = TREE_OPERAND (rhs, 1);
5327 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5328 if (simple_operand_p (ll_arg)
5329 && simple_operand_p (lr_arg))
5331 tree result;
5332 if (operand_equal_p (ll_arg, rl_arg, 0)
5333 && operand_equal_p (lr_arg, rr_arg, 0))
5335 result = combine_comparisons (code, lcode, rcode,
5336 truth_type, ll_arg, lr_arg);
5337 if (result)
5338 return result;
5340 else if (operand_equal_p (ll_arg, rr_arg, 0)
5341 && operand_equal_p (lr_arg, rl_arg, 0))
5343 result = combine_comparisons (code, lcode,
5344 swap_tree_comparison (rcode),
5345 truth_type, ll_arg, lr_arg);
5346 if (result)
5347 return result;
5351 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5352 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5354 /* If the RHS can be evaluated unconditionally and its operands are
5355 simple, it wins to evaluate the RHS unconditionally on machines
5356 with expensive branches. In this case, this isn't a comparison
5357 that can be merged. Avoid doing this if the RHS is a floating-point
5358 comparison since those can trap. */
5360 if (BRANCH_COST >= 2
5361 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5362 && simple_operand_p (rl_arg)
5363 && simple_operand_p (rr_arg))
5365 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5366 if (code == TRUTH_OR_EXPR
5367 && lcode == NE_EXPR && integer_zerop (lr_arg)
5368 && rcode == NE_EXPR && integer_zerop (rr_arg)
5369 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5370 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5371 return build2 (NE_EXPR, truth_type,
5372 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5373 ll_arg, rl_arg),
5374 build_int_cst (TREE_TYPE (ll_arg), 0));
5376 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5377 if (code == TRUTH_AND_EXPR
5378 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5379 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5380 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5381 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5382 return build2 (EQ_EXPR, truth_type,
5383 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5384 ll_arg, rl_arg),
5385 build_int_cst (TREE_TYPE (ll_arg), 0));
5387 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5389 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5390 return build2 (code, truth_type, lhs, rhs);
5391 return NULL_TREE;
5395 /* See if the comparisons can be merged. Then get all the parameters for
5396 each side. */
5398 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5399 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5400 return 0;
5402 volatilep = 0;
5403 ll_inner = decode_field_reference (ll_arg,
5404 &ll_bitsize, &ll_bitpos, &ll_mode,
5405 &ll_unsignedp, &volatilep, &ll_mask,
5406 &ll_and_mask);
5407 lr_inner = decode_field_reference (lr_arg,
5408 &lr_bitsize, &lr_bitpos, &lr_mode,
5409 &lr_unsignedp, &volatilep, &lr_mask,
5410 &lr_and_mask);
5411 rl_inner = decode_field_reference (rl_arg,
5412 &rl_bitsize, &rl_bitpos, &rl_mode,
5413 &rl_unsignedp, &volatilep, &rl_mask,
5414 &rl_and_mask);
5415 rr_inner = decode_field_reference (rr_arg,
5416 &rr_bitsize, &rr_bitpos, &rr_mode,
5417 &rr_unsignedp, &volatilep, &rr_mask,
5418 &rr_and_mask);
5420 /* It must be true that the inner operation on the lhs of each
5421 comparison must be the same if we are to be able to do anything.
5422 Then see if we have constants. If not, the same must be true for
5423 the rhs's. */
5424 if (volatilep || ll_inner == 0 || rl_inner == 0
5425 || ! operand_equal_p (ll_inner, rl_inner, 0))
5426 return 0;
5428 if (TREE_CODE (lr_arg) == INTEGER_CST
5429 && TREE_CODE (rr_arg) == INTEGER_CST)
5430 l_const = lr_arg, r_const = rr_arg;
5431 else if (lr_inner == 0 || rr_inner == 0
5432 || ! operand_equal_p (lr_inner, rr_inner, 0))
5433 return 0;
5434 else
5435 l_const = r_const = 0;
5437 /* If either comparison code is not correct for our logical operation,
5438 fail. However, we can convert a one-bit comparison against zero into
5439 the opposite comparison against that bit being set in the field. */
5441 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5442 if (lcode != wanted_code)
5444 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5446 /* Make the left operand unsigned, since we are only interested
5447 in the value of one bit. Otherwise we are doing the wrong
5448 thing below. */
5449 ll_unsignedp = 1;
5450 l_const = ll_mask;
5452 else
5453 return 0;
5456 /* This is analogous to the code for l_const above. */
5457 if (rcode != wanted_code)
5459 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5461 rl_unsignedp = 1;
5462 r_const = rl_mask;
5464 else
5465 return 0;
5468 /* See if we can find a mode that contains both fields being compared on
5469 the left. If we can't, fail. Otherwise, update all constants and masks
5470 to be relative to a field of that size. */
5471 first_bit = MIN (ll_bitpos, rl_bitpos);
5472 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5473 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5474 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5475 volatilep);
5476 if (lnmode == VOIDmode)
5477 return 0;
5479 lnbitsize = GET_MODE_BITSIZE (lnmode);
5480 lnbitpos = first_bit & ~ (lnbitsize - 1);
5481 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5482 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5484 if (BYTES_BIG_ENDIAN)
5486 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5487 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5490 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5491 size_int (xll_bitpos), 0);
5492 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5493 size_int (xrl_bitpos), 0);
5495 if (l_const)
5497 l_const = fold_convert (lntype, l_const);
5498 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5499 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5500 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5501 fold_build1 (BIT_NOT_EXPR,
5502 lntype, ll_mask),
5503 0)))
5505 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5507 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5510 if (r_const)
5512 r_const = fold_convert (lntype, r_const);
5513 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5514 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5515 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5516 fold_build1 (BIT_NOT_EXPR,
5517 lntype, rl_mask),
5518 0)))
5520 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5522 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5526 /* Handle the case of comparisons with constants. If there is something in
5527 common between the masks, those bits of the constants must be the same.
5528 If not, the condition is always false. Test for this to avoid generating
5529 incorrect code below. */
5530 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5531 if (! integer_zerop (result)
5532 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5533 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5535 if (wanted_code == NE_EXPR)
5537 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5538 return constant_boolean_node (true, truth_type);
5540 else
5542 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5543 return constant_boolean_node (false, truth_type);
5547 return NULL_TREE;
5550 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5551 constant. */
5553 static tree
5554 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5556 tree arg0 = op0;
5557 enum tree_code op_code;
5558 tree comp_const;
5559 tree minmax_const;
5560 int consts_equal, consts_lt;
5561 tree inner;
5563 STRIP_SIGN_NOPS (arg0);
5565 op_code = TREE_CODE (arg0);
5566 minmax_const = TREE_OPERAND (arg0, 1);
5567 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5568 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5569 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5570 inner = TREE_OPERAND (arg0, 0);
5572 /* If something does not permit us to optimize, return the original tree. */
5573 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5574 || TREE_CODE (comp_const) != INTEGER_CST
5575 || TREE_OVERFLOW (comp_const)
5576 || TREE_CODE (minmax_const) != INTEGER_CST
5577 || TREE_OVERFLOW (minmax_const))
5578 return NULL_TREE;
5580 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5581 and GT_EXPR, doing the rest with recursive calls using logical
5582 simplifications. */
5583 switch (code)
5585 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5587 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5588 type, op0, op1);
5589 if (tem)
5590 return invert_truthvalue (tem);
5591 return NULL_TREE;
5594 case GE_EXPR:
5595 return
5596 fold_build2 (TRUTH_ORIF_EXPR, type,
5597 optimize_minmax_comparison
5598 (EQ_EXPR, type, arg0, comp_const),
5599 optimize_minmax_comparison
5600 (GT_EXPR, type, arg0, comp_const));
5602 case EQ_EXPR:
5603 if (op_code == MAX_EXPR && consts_equal)
5604 /* MAX (X, 0) == 0 -> X <= 0 */
5605 return fold_build2 (LE_EXPR, type, inner, comp_const);
5607 else if (op_code == MAX_EXPR && consts_lt)
5608 /* MAX (X, 0) == 5 -> X == 5 */
5609 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5611 else if (op_code == MAX_EXPR)
5612 /* MAX (X, 0) == -1 -> false */
5613 return omit_one_operand (type, integer_zero_node, inner);
5615 else if (consts_equal)
5616 /* MIN (X, 0) == 0 -> X >= 0 */
5617 return fold_build2 (GE_EXPR, type, inner, comp_const);
5619 else if (consts_lt)
5620 /* MIN (X, 0) == 5 -> false */
5621 return omit_one_operand (type, integer_zero_node, inner);
5623 else
5624 /* MIN (X, 0) == -1 -> X == -1 */
5625 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5627 case GT_EXPR:
5628 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5629 /* MAX (X, 0) > 0 -> X > 0
5630 MAX (X, 0) > 5 -> X > 5 */
5631 return fold_build2 (GT_EXPR, type, inner, comp_const);
5633 else if (op_code == MAX_EXPR)
5634 /* MAX (X, 0) > -1 -> true */
5635 return omit_one_operand (type, integer_one_node, inner);
5637 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5638 /* MIN (X, 0) > 0 -> false
5639 MIN (X, 0) > 5 -> false */
5640 return omit_one_operand (type, integer_zero_node, inner);
5642 else
5643 /* MIN (X, 0) > -1 -> X > -1 */
5644 return fold_build2 (GT_EXPR, type, inner, comp_const);
5646 default:
5647 return NULL_TREE;
5651 /* T is an integer expression that is being multiplied, divided, or taken a
5652 modulus (CODE says which and what kind of divide or modulus) by a
5653 constant C. See if we can eliminate that operation by folding it with
5654 other operations already in T. WIDE_TYPE, if non-null, is a type that
5655 should be used for the computation if wider than our type.
5657 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5658 (X * 2) + (Y * 4). We must, however, be assured that either the original
5659 expression would not overflow or that overflow is undefined for the type
5660 in the language in question.
5662 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5663 the machine has a multiply-accumulate insn or that this is part of an
5664 addressing calculation.
5666 If we return a non-null expression, it is an equivalent form of the
5667 original computation, but need not be in the original type.
5669 We set *STRICT_OVERFLOW_P to true if the return values depends on
5670 signed overflow being undefined. Otherwise we do not change
5671 *STRICT_OVERFLOW_P. */
5673 static tree
5674 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5675 bool *strict_overflow_p)
5677 /* To avoid exponential search depth, refuse to allow recursion past
5678 three levels. Beyond that (1) it's highly unlikely that we'll find
5679 something interesting and (2) we've probably processed it before
5680 when we built the inner expression. */
5682 static int depth;
5683 tree ret;
5685 if (depth > 3)
5686 return NULL;
5688 depth++;
5689 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5690 depth--;
5692 return ret;
5695 static tree
5696 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5697 bool *strict_overflow_p)
5699 tree type = TREE_TYPE (t);
5700 enum tree_code tcode = TREE_CODE (t);
5701 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5702 > GET_MODE_SIZE (TYPE_MODE (type)))
5703 ? wide_type : type);
5704 tree t1, t2;
5705 int same_p = tcode == code;
5706 tree op0 = NULL_TREE, op1 = NULL_TREE;
5707 bool sub_strict_overflow_p;
5709 /* Don't deal with constants of zero here; they confuse the code below. */
5710 if (integer_zerop (c))
5711 return NULL_TREE;
5713 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5714 op0 = TREE_OPERAND (t, 0);
5716 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5717 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5719 /* Note that we need not handle conditional operations here since fold
5720 already handles those cases. So just do arithmetic here. */
5721 switch (tcode)
5723 case INTEGER_CST:
5724 /* For a constant, we can always simplify if we are a multiply
5725 or (for divide and modulus) if it is a multiple of our constant. */
5726 if (code == MULT_EXPR
5727 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5728 return const_binop (code, fold_convert (ctype, t),
5729 fold_convert (ctype, c), 0);
5730 break;
5732 CASE_CONVERT: case NON_LVALUE_EXPR:
5733 /* If op0 is an expression ... */
5734 if ((COMPARISON_CLASS_P (op0)
5735 || UNARY_CLASS_P (op0)
5736 || BINARY_CLASS_P (op0)
5737 || VL_EXP_CLASS_P (op0)
5738 || EXPRESSION_CLASS_P (op0))
5739 /* ... and has wrapping overflow, and its type is smaller
5740 than ctype, then we cannot pass through as widening. */
5741 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5742 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5743 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5744 && (TYPE_PRECISION (ctype)
5745 > TYPE_PRECISION (TREE_TYPE (op0))))
5746 /* ... or this is a truncation (t is narrower than op0),
5747 then we cannot pass through this narrowing. */
5748 || (TYPE_PRECISION (type)
5749 < TYPE_PRECISION (TREE_TYPE (op0)))
5750 /* ... or signedness changes for division or modulus,
5751 then we cannot pass through this conversion. */
5752 || (code != MULT_EXPR
5753 && (TYPE_UNSIGNED (ctype)
5754 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5755 /* ... or has undefined overflow while the converted to
5756 type has not, we cannot do the operation in the inner type
5757 as that would introduce undefined overflow. */
5758 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5759 && !TYPE_OVERFLOW_UNDEFINED (type))))
5760 break;
5762 /* Pass the constant down and see if we can make a simplification. If
5763 we can, replace this expression with the inner simplification for
5764 possible later conversion to our or some other type. */
5765 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5766 && TREE_CODE (t2) == INTEGER_CST
5767 && !TREE_OVERFLOW (t2)
5768 && (0 != (t1 = extract_muldiv (op0, t2, code,
5769 code == MULT_EXPR
5770 ? ctype : NULL_TREE,
5771 strict_overflow_p))))
5772 return t1;
5773 break;
5775 case ABS_EXPR:
5776 /* If widening the type changes it from signed to unsigned, then we
5777 must avoid building ABS_EXPR itself as unsigned. */
5778 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5780 tree cstype = (*signed_type_for) (ctype);
5781 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5782 != 0)
5784 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5785 return fold_convert (ctype, t1);
5787 break;
5789 /* If the constant is negative, we cannot simplify this. */
5790 if (tree_int_cst_sgn (c) == -1)
5791 break;
5792 /* FALLTHROUGH */
5793 case NEGATE_EXPR:
5794 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5795 != 0)
5796 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5797 break;
5799 case MIN_EXPR: case MAX_EXPR:
5800 /* If widening the type changes the signedness, then we can't perform
5801 this optimization as that changes the result. */
5802 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5803 break;
5805 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5806 sub_strict_overflow_p = false;
5807 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5808 &sub_strict_overflow_p)) != 0
5809 && (t2 = extract_muldiv (op1, c, code, wide_type,
5810 &sub_strict_overflow_p)) != 0)
5812 if (tree_int_cst_sgn (c) < 0)
5813 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5814 if (sub_strict_overflow_p)
5815 *strict_overflow_p = true;
5816 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5817 fold_convert (ctype, t2));
5819 break;
5821 case LSHIFT_EXPR: case RSHIFT_EXPR:
5822 /* If the second operand is constant, this is a multiplication
5823 or floor division, by a power of two, so we can treat it that
5824 way unless the multiplier or divisor overflows. Signed
5825 left-shift overflow is implementation-defined rather than
5826 undefined in C90, so do not convert signed left shift into
5827 multiplication. */
5828 if (TREE_CODE (op1) == INTEGER_CST
5829 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5830 /* const_binop may not detect overflow correctly,
5831 so check for it explicitly here. */
5832 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5833 && TREE_INT_CST_HIGH (op1) == 0
5834 && 0 != (t1 = fold_convert (ctype,
5835 const_binop (LSHIFT_EXPR,
5836 size_one_node,
5837 op1, 0)))
5838 && !TREE_OVERFLOW (t1))
5839 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5840 ? MULT_EXPR : FLOOR_DIV_EXPR,
5841 ctype, fold_convert (ctype, op0), t1),
5842 c, code, wide_type, strict_overflow_p);
5843 break;
5845 case PLUS_EXPR: case MINUS_EXPR:
5846 /* See if we can eliminate the operation on both sides. If we can, we
5847 can return a new PLUS or MINUS. If we can't, the only remaining
5848 cases where we can do anything are if the second operand is a
5849 constant. */
5850 sub_strict_overflow_p = false;
5851 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5852 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5853 if (t1 != 0 && t2 != 0
5854 && (code == MULT_EXPR
5855 /* If not multiplication, we can only do this if both operands
5856 are divisible by c. */
5857 || (multiple_of_p (ctype, op0, c)
5858 && multiple_of_p (ctype, op1, c))))
5860 if (sub_strict_overflow_p)
5861 *strict_overflow_p = true;
5862 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5863 fold_convert (ctype, t2));
5866 /* If this was a subtraction, negate OP1 and set it to be an addition.
5867 This simplifies the logic below. */
5868 if (tcode == MINUS_EXPR)
5869 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5871 if (TREE_CODE (op1) != INTEGER_CST)
5872 break;
5874 /* If either OP1 or C are negative, this optimization is not safe for
5875 some of the division and remainder types while for others we need
5876 to change the code. */
5877 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5879 if (code == CEIL_DIV_EXPR)
5880 code = FLOOR_DIV_EXPR;
5881 else if (code == FLOOR_DIV_EXPR)
5882 code = CEIL_DIV_EXPR;
5883 else if (code != MULT_EXPR
5884 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5885 break;
5888 /* If it's a multiply or a division/modulus operation of a multiple
5889 of our constant, do the operation and verify it doesn't overflow. */
5890 if (code == MULT_EXPR
5891 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5893 op1 = const_binop (code, fold_convert (ctype, op1),
5894 fold_convert (ctype, c), 0);
5895 /* We allow the constant to overflow with wrapping semantics. */
5896 if (op1 == 0
5897 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5898 break;
5900 else
5901 break;
5903 /* If we have an unsigned type is not a sizetype, we cannot widen
5904 the operation since it will change the result if the original
5905 computation overflowed. */
5906 if (TYPE_UNSIGNED (ctype)
5907 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5908 && ctype != type)
5909 break;
5911 /* If we were able to eliminate our operation from the first side,
5912 apply our operation to the second side and reform the PLUS. */
5913 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5916 /* The last case is if we are a multiply. In that case, we can
5917 apply the distributive law to commute the multiply and addition
5918 if the multiplication of the constants doesn't overflow. */
5919 if (code == MULT_EXPR)
5920 return fold_build2 (tcode, ctype,
5921 fold_build2 (code, ctype,
5922 fold_convert (ctype, op0),
5923 fold_convert (ctype, c)),
5924 op1);
5926 break;
5928 case MULT_EXPR:
5929 /* We have a special case here if we are doing something like
5930 (C * 8) % 4 since we know that's zero. */
5931 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5932 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5933 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5934 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5935 return omit_one_operand (type, integer_zero_node, op0);
5937 /* ... fall through ... */
5939 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5940 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5941 /* If we can extract our operation from the LHS, do so and return a
5942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5943 do something only if the second operand is a constant. */
5944 if (same_p
5945 && (t1 = extract_muldiv (op0, c, code, wide_type,
5946 strict_overflow_p)) != 0)
5947 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5948 fold_convert (ctype, op1));
5949 else if (tcode == MULT_EXPR && code == MULT_EXPR
5950 && (t1 = extract_muldiv (op1, c, code, wide_type,
5951 strict_overflow_p)) != 0)
5952 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5953 fold_convert (ctype, t1));
5954 else if (TREE_CODE (op1) != INTEGER_CST)
5955 return 0;
5957 /* If these are the same operation types, we can associate them
5958 assuming no overflow. */
5959 if (tcode == code
5960 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5961 fold_convert (ctype, c), 1))
5962 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5963 TREE_INT_CST_HIGH (t1),
5964 (TYPE_UNSIGNED (ctype)
5965 && tcode != MULT_EXPR) ? -1 : 1,
5966 TREE_OVERFLOW (t1)))
5967 && !TREE_OVERFLOW (t1))
5968 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5970 /* If these operations "cancel" each other, we have the main
5971 optimizations of this pass, which occur when either constant is a
5972 multiple of the other, in which case we replace this with either an
5973 operation or CODE or TCODE.
5975 If we have an unsigned type that is not a sizetype, we cannot do
5976 this since it will change the result if the original computation
5977 overflowed. */
5978 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5979 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5980 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5981 || (tcode == MULT_EXPR
5982 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5983 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5984 && code != MULT_EXPR)))
5986 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5988 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5989 *strict_overflow_p = true;
5990 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5991 fold_convert (ctype,
5992 const_binop (TRUNC_DIV_EXPR,
5993 op1, c, 0)));
5995 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5997 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5998 *strict_overflow_p = true;
5999 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6000 fold_convert (ctype,
6001 const_binop (TRUNC_DIV_EXPR,
6002 c, op1, 0)));
6005 break;
6007 default:
6008 break;
6011 return 0;
6014 /* Return a node which has the indicated constant VALUE (either 0 or
6015 1), and is of the indicated TYPE. */
6017 tree
6018 constant_boolean_node (int value, tree type)
6020 if (type == integer_type_node)
6021 return value ? integer_one_node : integer_zero_node;
6022 else if (type == boolean_type_node)
6023 return value ? boolean_true_node : boolean_false_node;
6024 else
6025 return build_int_cst (type, value);
6029 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6030 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6031 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6032 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6033 COND is the first argument to CODE; otherwise (as in the example
6034 given here), it is the second argument. TYPE is the type of the
6035 original expression. Return NULL_TREE if no simplification is
6036 possible. */
6038 static tree
6039 fold_binary_op_with_conditional_arg (enum tree_code code,
6040 tree type, tree op0, tree op1,
6041 tree cond, tree arg, int cond_first_p)
6043 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6044 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6045 tree test, true_value, false_value;
6046 tree lhs = NULL_TREE;
6047 tree rhs = NULL_TREE;
6049 /* This transformation is only worthwhile if we don't have to wrap
6050 arg in a SAVE_EXPR, and the operation can be simplified on at least
6051 one of the branches once its pushed inside the COND_EXPR. */
6052 if (!TREE_CONSTANT (arg))
6053 return NULL_TREE;
6055 if (TREE_CODE (cond) == COND_EXPR)
6057 test = TREE_OPERAND (cond, 0);
6058 true_value = TREE_OPERAND (cond, 1);
6059 false_value = TREE_OPERAND (cond, 2);
6060 /* If this operand throws an expression, then it does not make
6061 sense to try to perform a logical or arithmetic operation
6062 involving it. */
6063 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6064 lhs = true_value;
6065 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6066 rhs = false_value;
6068 else
6070 tree testtype = TREE_TYPE (cond);
6071 test = cond;
6072 true_value = constant_boolean_node (true, testtype);
6073 false_value = constant_boolean_node (false, testtype);
6076 arg = fold_convert (arg_type, arg);
6077 if (lhs == 0)
6079 true_value = fold_convert (cond_type, true_value);
6080 if (cond_first_p)
6081 lhs = fold_build2 (code, type, true_value, arg);
6082 else
6083 lhs = fold_build2 (code, type, arg, true_value);
6085 if (rhs == 0)
6087 false_value = fold_convert (cond_type, false_value);
6088 if (cond_first_p)
6089 rhs = fold_build2 (code, type, false_value, arg);
6090 else
6091 rhs = fold_build2 (code, type, arg, false_value);
6094 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6095 return fold_convert (type, test);
6099 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6101 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6102 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6103 ADDEND is the same as X.
6105 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6106 and finite. The problematic cases are when X is zero, and its mode
6107 has signed zeros. In the case of rounding towards -infinity,
6108 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6109 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6111 bool
6112 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6114 if (!real_zerop (addend))
6115 return false;
6117 /* Don't allow the fold with -fsignaling-nans. */
6118 if (HONOR_SNANS (TYPE_MODE (type)))
6119 return false;
6121 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6122 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6123 return true;
6125 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6126 if (TREE_CODE (addend) == REAL_CST
6127 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6128 negate = !negate;
6130 /* The mode has signed zeros, and we have to honor their sign.
6131 In this situation, there is only one case we can return true for.
6132 X - 0 is the same as X unless rounding towards -infinity is
6133 supported. */
6134 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6137 /* Subroutine of fold() that checks comparisons of built-in math
6138 functions against real constants.
6140 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6141 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6142 is the type of the result and ARG0 and ARG1 are the operands of the
6143 comparison. ARG1 must be a TREE_REAL_CST.
6145 The function returns the constant folded tree if a simplification
6146 can be made, and NULL_TREE otherwise. */
6148 static tree
6149 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6150 tree type, tree arg0, tree arg1)
6152 REAL_VALUE_TYPE c;
6154 if (BUILTIN_SQRT_P (fcode))
6156 tree arg = CALL_EXPR_ARG (arg0, 0);
6157 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6159 c = TREE_REAL_CST (arg1);
6160 if (REAL_VALUE_NEGATIVE (c))
6162 /* sqrt(x) < y is always false, if y is negative. */
6163 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6164 return omit_one_operand (type, integer_zero_node, arg);
6166 /* sqrt(x) > y is always true, if y is negative and we
6167 don't care about NaNs, i.e. negative values of x. */
6168 if (code == NE_EXPR || !HONOR_NANS (mode))
6169 return omit_one_operand (type, integer_one_node, arg);
6171 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6172 return fold_build2 (GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg), dconst0));
6175 else if (code == GT_EXPR || code == GE_EXPR)
6177 REAL_VALUE_TYPE c2;
6179 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6180 real_convert (&c2, mode, &c2);
6182 if (REAL_VALUE_ISINF (c2))
6184 /* sqrt(x) > y is x == +Inf, when y is very large. */
6185 if (HONOR_INFINITIES (mode))
6186 return fold_build2 (EQ_EXPR, type, arg,
6187 build_real (TREE_TYPE (arg), c2));
6189 /* sqrt(x) > y is always false, when y is very large
6190 and we don't care about infinities. */
6191 return omit_one_operand (type, integer_zero_node, arg);
6194 /* sqrt(x) > c is the same as x > c*c. */
6195 return fold_build2 (code, type, arg,
6196 build_real (TREE_TYPE (arg), c2));
6198 else if (code == LT_EXPR || code == LE_EXPR)
6200 REAL_VALUE_TYPE c2;
6202 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6203 real_convert (&c2, mode, &c2);
6205 if (REAL_VALUE_ISINF (c2))
6207 /* sqrt(x) < y is always true, when y is a very large
6208 value and we don't care about NaNs or Infinities. */
6209 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6210 return omit_one_operand (type, integer_one_node, arg);
6212 /* sqrt(x) < y is x != +Inf when y is very large and we
6213 don't care about NaNs. */
6214 if (! HONOR_NANS (mode))
6215 return fold_build2 (NE_EXPR, type, arg,
6216 build_real (TREE_TYPE (arg), c2));
6218 /* sqrt(x) < y is x >= 0 when y is very large and we
6219 don't care about Infinities. */
6220 if (! HONOR_INFINITIES (mode))
6221 return fold_build2 (GE_EXPR, type, arg,
6222 build_real (TREE_TYPE (arg), dconst0));
6224 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6225 if (lang_hooks.decls.global_bindings_p () != 0
6226 || CONTAINS_PLACEHOLDER_P (arg))
6227 return NULL_TREE;
6229 arg = save_expr (arg);
6230 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6231 fold_build2 (GE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg),
6233 dconst0)),
6234 fold_build2 (NE_EXPR, type, arg,
6235 build_real (TREE_TYPE (arg),
6236 c2)));
6239 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return fold_build2 (code, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6244 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6245 if (lang_hooks.decls.global_bindings_p () == 0
6246 && ! CONTAINS_PLACEHOLDER_P (arg))
6248 arg = save_expr (arg);
6249 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6250 fold_build2 (GE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg),
6252 dconst0)),
6253 fold_build2 (code, type, arg,
6254 build_real (TREE_TYPE (arg),
6255 c2)));
6260 return NULL_TREE;
6263 /* Subroutine of fold() that optimizes comparisons against Infinities,
6264 either +Inf or -Inf.
6266 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6267 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6268 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6270 The function returns the constant folded tree if a simplification
6271 can be made, and NULL_TREE otherwise. */
6273 static tree
6274 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6276 enum machine_mode mode;
6277 REAL_VALUE_TYPE max;
6278 tree temp;
6279 bool neg;
6281 mode = TYPE_MODE (TREE_TYPE (arg0));
6283 /* For negative infinity swap the sense of the comparison. */
6284 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6285 if (neg)
6286 code = swap_tree_comparison (code);
6288 switch (code)
6290 case GT_EXPR:
6291 /* x > +Inf is always false, if with ignore sNANs. */
6292 if (HONOR_SNANS (mode))
6293 return NULL_TREE;
6294 return omit_one_operand (type, integer_zero_node, arg0);
6296 case LE_EXPR:
6297 /* x <= +Inf is always true, if we don't case about NaNs. */
6298 if (! HONOR_NANS (mode))
6299 return omit_one_operand (type, integer_one_node, arg0);
6301 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6302 if (lang_hooks.decls.global_bindings_p () == 0
6303 && ! CONTAINS_PLACEHOLDER_P (arg0))
6305 arg0 = save_expr (arg0);
6306 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6308 break;
6310 case EQ_EXPR:
6311 case GE_EXPR:
6312 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6313 real_maxval (&max, neg, mode);
6314 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6315 arg0, build_real (TREE_TYPE (arg0), max));
6317 case LT_EXPR:
6318 /* x < +Inf is always equal to x <= DBL_MAX. */
6319 real_maxval (&max, neg, mode);
6320 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6321 arg0, build_real (TREE_TYPE (arg0), max));
6323 case NE_EXPR:
6324 /* x != +Inf is always equal to !(x > DBL_MAX). */
6325 real_maxval (&max, neg, mode);
6326 if (! HONOR_NANS (mode))
6327 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6328 arg0, build_real (TREE_TYPE (arg0), max));
6330 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6332 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6334 default:
6335 break;
6338 return NULL_TREE;
6341 /* Subroutine of fold() that optimizes comparisons of a division by
6342 a nonzero integer constant against an integer constant, i.e.
6343 X/C1 op C2.
6345 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6346 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6347 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6349 The function returns the constant folded tree if a simplification
6350 can be made, and NULL_TREE otherwise. */
6352 static tree
6353 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6355 tree prod, tmp, hi, lo;
6356 tree arg00 = TREE_OPERAND (arg0, 0);
6357 tree arg01 = TREE_OPERAND (arg0, 1);
6358 unsigned HOST_WIDE_INT lpart;
6359 HOST_WIDE_INT hpart;
6360 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6361 bool neg_overflow;
6362 int overflow;
6364 /* We have to do this the hard way to detect unsigned overflow.
6365 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6366 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6367 TREE_INT_CST_HIGH (arg01),
6368 TREE_INT_CST_LOW (arg1),
6369 TREE_INT_CST_HIGH (arg1),
6370 &lpart, &hpart, unsigned_p);
6371 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6372 -1, overflow);
6373 neg_overflow = false;
6375 if (unsigned_p)
6377 tmp = int_const_binop (MINUS_EXPR, arg01,
6378 build_int_cst (TREE_TYPE (arg01), 1), 0);
6379 lo = prod;
6381 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6382 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6383 TREE_INT_CST_HIGH (prod),
6384 TREE_INT_CST_LOW (tmp),
6385 TREE_INT_CST_HIGH (tmp),
6386 &lpart, &hpart, unsigned_p);
6387 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6388 -1, overflow | TREE_OVERFLOW (prod));
6390 else if (tree_int_cst_sgn (arg01) >= 0)
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1), 0);
6394 switch (tree_int_cst_sgn (arg1))
6396 case -1:
6397 neg_overflow = true;
6398 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6399 hi = prod;
6400 break;
6402 case 0:
6403 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6404 hi = tmp;
6405 break;
6407 case 1:
6408 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6409 lo = prod;
6410 break;
6412 default:
6413 gcc_unreachable ();
6416 else
6418 /* A negative divisor reverses the relational operators. */
6419 code = swap_tree_comparison (code);
6421 tmp = int_const_binop (PLUS_EXPR, arg01,
6422 build_int_cst (TREE_TYPE (arg01), 1), 0);
6423 switch (tree_int_cst_sgn (arg1))
6425 case -1:
6426 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6427 lo = prod;
6428 break;
6430 case 0:
6431 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6432 lo = tmp;
6433 break;
6435 case 1:
6436 neg_overflow = true;
6437 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6438 hi = prod;
6439 break;
6441 default:
6442 gcc_unreachable ();
6446 switch (code)
6448 case EQ_EXPR:
6449 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6450 return omit_one_operand (type, integer_zero_node, arg00);
6451 if (TREE_OVERFLOW (hi))
6452 return fold_build2 (GE_EXPR, type, arg00, lo);
6453 if (TREE_OVERFLOW (lo))
6454 return fold_build2 (LE_EXPR, type, arg00, hi);
6455 return build_range_check (type, arg00, 1, lo, hi);
6457 case NE_EXPR:
6458 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6459 return omit_one_operand (type, integer_one_node, arg00);
6460 if (TREE_OVERFLOW (hi))
6461 return fold_build2 (LT_EXPR, type, arg00, lo);
6462 if (TREE_OVERFLOW (lo))
6463 return fold_build2 (GT_EXPR, type, arg00, hi);
6464 return build_range_check (type, arg00, 0, lo, hi);
6466 case LT_EXPR:
6467 if (TREE_OVERFLOW (lo))
6469 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6470 return omit_one_operand (type, tmp, arg00);
6472 return fold_build2 (LT_EXPR, type, arg00, lo);
6474 case LE_EXPR:
6475 if (TREE_OVERFLOW (hi))
6477 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6478 return omit_one_operand (type, tmp, arg00);
6480 return fold_build2 (LE_EXPR, type, arg00, hi);
6482 case GT_EXPR:
6483 if (TREE_OVERFLOW (hi))
6485 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6486 return omit_one_operand (type, tmp, arg00);
6488 return fold_build2 (GT_EXPR, type, arg00, hi);
6490 case GE_EXPR:
6491 if (TREE_OVERFLOW (lo))
6493 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6494 return omit_one_operand (type, tmp, arg00);
6496 return fold_build2 (GE_EXPR, type, arg00, lo);
6498 default:
6499 break;
6502 return NULL_TREE;
6506 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6507 equality/inequality test, then return a simplified form of the test
6508 using a sign testing. Otherwise return NULL. TYPE is the desired
6509 result type. */
6511 static tree
6512 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6513 tree result_type)
6515 /* If this is testing a single bit, we can optimize the test. */
6516 if ((code == NE_EXPR || code == EQ_EXPR)
6517 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6518 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6520 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6521 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6522 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6524 if (arg00 != NULL_TREE
6525 /* This is only a win if casting to a signed type is cheap,
6526 i.e. when arg00's type is not a partial mode. */
6527 && TYPE_PRECISION (TREE_TYPE (arg00))
6528 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6530 tree stype = signed_type_for (TREE_TYPE (arg00));
6531 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6532 result_type, fold_convert (stype, arg00),
6533 build_int_cst (stype, 0));
6537 return NULL_TREE;
6540 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6541 equality/inequality test, then return a simplified form of
6542 the test using shifts and logical operations. Otherwise return
6543 NULL. TYPE is the desired result type. */
6545 tree
6546 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6547 tree result_type)
6549 /* If this is testing a single bit, we can optimize the test. */
6550 if ((code == NE_EXPR || code == EQ_EXPR)
6551 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6552 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6554 tree inner = TREE_OPERAND (arg0, 0);
6555 tree type = TREE_TYPE (arg0);
6556 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6557 enum machine_mode operand_mode = TYPE_MODE (type);
6558 int ops_unsigned;
6559 tree signed_type, unsigned_type, intermediate_type;
6560 tree tem, one;
6562 /* First, see if we can fold the single bit test into a sign-bit
6563 test. */
6564 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6565 result_type);
6566 if (tem)
6567 return tem;
6569 /* Otherwise we have (A & C) != 0 where C is a single bit,
6570 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6571 Similarly for (A & C) == 0. */
6573 /* If INNER is a right shift of a constant and it plus BITNUM does
6574 not overflow, adjust BITNUM and INNER. */
6575 if (TREE_CODE (inner) == RSHIFT_EXPR
6576 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6577 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6578 && bitnum < TYPE_PRECISION (type)
6579 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6580 bitnum - TYPE_PRECISION (type)))
6582 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6583 inner = TREE_OPERAND (inner, 0);
6586 /* If we are going to be able to omit the AND below, we must do our
6587 operations as unsigned. If we must use the AND, we have a choice.
6588 Normally unsigned is faster, but for some machines signed is. */
6589 #ifdef LOAD_EXTEND_OP
6590 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6591 && !flag_syntax_only) ? 0 : 1;
6592 #else
6593 ops_unsigned = 1;
6594 #endif
6596 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6597 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6598 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6599 inner = fold_convert (intermediate_type, inner);
6601 if (bitnum != 0)
6602 inner = build2 (RSHIFT_EXPR, intermediate_type,
6603 inner, size_int (bitnum));
6605 one = build_int_cst (intermediate_type, 1);
6607 if (code == EQ_EXPR)
6608 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6610 /* Put the AND last so it can combine with more things. */
6611 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6613 /* Make sure to return the proper type. */
6614 inner = fold_convert (result_type, inner);
6616 return inner;
6618 return NULL_TREE;
6621 /* Check whether we are allowed to reorder operands arg0 and arg1,
6622 such that the evaluation of arg1 occurs before arg0. */
6624 static bool
6625 reorder_operands_p (const_tree arg0, const_tree arg1)
6627 if (! flag_evaluation_order)
6628 return true;
6629 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6630 return true;
6631 return ! TREE_SIDE_EFFECTS (arg0)
6632 && ! TREE_SIDE_EFFECTS (arg1);
6635 /* Test whether it is preferable two swap two operands, ARG0 and
6636 ARG1, for example because ARG0 is an integer constant and ARG1
6637 isn't. If REORDER is true, only recommend swapping if we can
6638 evaluate the operands in reverse order. */
6640 bool
6641 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6643 STRIP_SIGN_NOPS (arg0);
6644 STRIP_SIGN_NOPS (arg1);
6646 if (TREE_CODE (arg1) == INTEGER_CST)
6647 return 0;
6648 if (TREE_CODE (arg0) == INTEGER_CST)
6649 return 1;
6651 if (TREE_CODE (arg1) == REAL_CST)
6652 return 0;
6653 if (TREE_CODE (arg0) == REAL_CST)
6654 return 1;
6656 if (TREE_CODE (arg1) == FIXED_CST)
6657 return 0;
6658 if (TREE_CODE (arg0) == FIXED_CST)
6659 return 1;
6661 if (TREE_CODE (arg1) == COMPLEX_CST)
6662 return 0;
6663 if (TREE_CODE (arg0) == COMPLEX_CST)
6664 return 1;
6666 if (TREE_CONSTANT (arg1))
6667 return 0;
6668 if (TREE_CONSTANT (arg0))
6669 return 1;
6671 if (optimize_size)
6672 return 0;
6674 if (reorder && flag_evaluation_order
6675 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6676 return 0;
6678 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6679 for commutative and comparison operators. Ensuring a canonical
6680 form allows the optimizers to find additional redundancies without
6681 having to explicitly check for both orderings. */
6682 if (TREE_CODE (arg0) == SSA_NAME
6683 && TREE_CODE (arg1) == SSA_NAME
6684 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6685 return 1;
6687 /* Put SSA_NAMEs last. */
6688 if (TREE_CODE (arg1) == SSA_NAME)
6689 return 0;
6690 if (TREE_CODE (arg0) == SSA_NAME)
6691 return 1;
6693 /* Put variables last. */
6694 if (DECL_P (arg1))
6695 return 0;
6696 if (DECL_P (arg0))
6697 return 1;
6699 return 0;
6702 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6703 ARG0 is extended to a wider type. */
6705 static tree
6706 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6708 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6709 tree arg1_unw;
6710 tree shorter_type, outer_type;
6711 tree min, max;
6712 bool above, below;
6714 if (arg0_unw == arg0)
6715 return NULL_TREE;
6716 shorter_type = TREE_TYPE (arg0_unw);
6718 #ifdef HAVE_canonicalize_funcptr_for_compare
6719 /* Disable this optimization if we're casting a function pointer
6720 type on targets that require function pointer canonicalization. */
6721 if (HAVE_canonicalize_funcptr_for_compare
6722 && TREE_CODE (shorter_type) == POINTER_TYPE
6723 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6724 return NULL_TREE;
6725 #endif
6727 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6728 return NULL_TREE;
6730 arg1_unw = get_unwidened (arg1, NULL_TREE);
6732 /* If possible, express the comparison in the shorter mode. */
6733 if ((code == EQ_EXPR || code == NE_EXPR
6734 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6735 && (TREE_TYPE (arg1_unw) == shorter_type
6736 || (TYPE_PRECISION (shorter_type)
6737 > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6738 || ((TYPE_PRECISION (shorter_type)
6739 == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6740 && (TYPE_UNSIGNED (shorter_type)
6741 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6742 || (TREE_CODE (arg1_unw) == INTEGER_CST
6743 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6744 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6745 && int_fits_type_p (arg1_unw, shorter_type))))
6746 return fold_build2 (code, type, arg0_unw,
6747 fold_convert (shorter_type, arg1_unw));
6749 if (TREE_CODE (arg1_unw) != INTEGER_CST
6750 || TREE_CODE (shorter_type) != INTEGER_TYPE
6751 || !int_fits_type_p (arg1_unw, shorter_type))
6752 return NULL_TREE;
6754 /* If we are comparing with the integer that does not fit into the range
6755 of the shorter type, the result is known. */
6756 outer_type = TREE_TYPE (arg1_unw);
6757 min = lower_bound_in_type (outer_type, shorter_type);
6758 max = upper_bound_in_type (outer_type, shorter_type);
6760 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6761 max, arg1_unw));
6762 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6763 arg1_unw, min));
6765 switch (code)
6767 case EQ_EXPR:
6768 if (above || below)
6769 return omit_one_operand (type, integer_zero_node, arg0);
6770 break;
6772 case NE_EXPR:
6773 if (above || below)
6774 return omit_one_operand (type, integer_one_node, arg0);
6775 break;
6777 case LT_EXPR:
6778 case LE_EXPR:
6779 if (above)
6780 return omit_one_operand (type, integer_one_node, arg0);
6781 else if (below)
6782 return omit_one_operand (type, integer_zero_node, arg0);
6784 case GT_EXPR:
6785 case GE_EXPR:
6786 if (above)
6787 return omit_one_operand (type, integer_zero_node, arg0);
6788 else if (below)
6789 return omit_one_operand (type, integer_one_node, arg0);
6791 default:
6792 break;
6795 return NULL_TREE;
6798 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6799 ARG0 just the signedness is changed. */
6801 static tree
6802 fold_sign_changed_comparison (enum tree_code code, tree type,
6803 tree arg0, tree arg1)
6805 tree arg0_inner;
6806 tree inner_type, outer_type;
6808 if (!CONVERT_EXPR_P (arg0))
6809 return NULL_TREE;
6811 outer_type = TREE_TYPE (arg0);
6812 arg0_inner = TREE_OPERAND (arg0, 0);
6813 inner_type = TREE_TYPE (arg0_inner);
6815 #ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6821 return NULL_TREE;
6822 #endif
6824 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6825 return NULL_TREE;
6827 /* If the conversion is from an integral subtype to its basetype
6828 leave it alone. */
6829 if (TREE_TYPE (inner_type) == outer_type)
6830 return NULL_TREE;
6832 if (TREE_CODE (arg1) != INTEGER_CST
6833 && !(CONVERT_EXPR_P (arg1)
6834 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6835 return NULL_TREE;
6837 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6838 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6839 && code != NE_EXPR
6840 && code != EQ_EXPR)
6841 return NULL_TREE;
6843 if (TREE_CODE (arg1) == INTEGER_CST)
6844 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6845 TREE_INT_CST_HIGH (arg1), 0,
6846 TREE_OVERFLOW (arg1));
6847 else
6848 arg1 = fold_convert (inner_type, arg1);
6850 return fold_build2 (code, type, arg0_inner, arg1);
6853 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6854 step of the array. Reconstructs s and delta in the case of s * delta
6855 being an integer constant (and thus already folded).
6856 ADDR is the address. MULT is the multiplicative expression.
6857 If the function succeeds, the new address expression is returned. Otherwise
6858 NULL_TREE is returned. */
6860 static tree
6861 try_move_mult_to_index (tree addr, tree op1)
6863 tree s, delta, step;
6864 tree ref = TREE_OPERAND (addr, 0), pref;
6865 tree ret, pos;
6866 tree itype;
6867 bool mdim = false;
6869 /* Strip the nops that might be added when converting op1 to sizetype. */
6870 STRIP_NOPS (op1);
6872 /* Canonicalize op1 into a possibly non-constant delta
6873 and an INTEGER_CST s. */
6874 if (TREE_CODE (op1) == MULT_EXPR)
6876 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6878 STRIP_NOPS (arg0);
6879 STRIP_NOPS (arg1);
6881 if (TREE_CODE (arg0) == INTEGER_CST)
6883 s = arg0;
6884 delta = arg1;
6886 else if (TREE_CODE (arg1) == INTEGER_CST)
6888 s = arg1;
6889 delta = arg0;
6891 else
6892 return NULL_TREE;
6894 else if (TREE_CODE (op1) == INTEGER_CST)
6896 delta = op1;
6897 s = NULL_TREE;
6899 else
6901 /* Simulate we are delta * 1. */
6902 delta = op1;
6903 s = integer_one_node;
6906 for (;; ref = TREE_OPERAND (ref, 0))
6908 if (TREE_CODE (ref) == ARRAY_REF)
6910 /* Remember if this was a multi-dimensional array. */
6911 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6912 mdim = true;
6914 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6915 if (! itype)
6916 continue;
6918 step = array_ref_element_size (ref);
6919 if (TREE_CODE (step) != INTEGER_CST)
6920 continue;
6922 if (s)
6924 if (! tree_int_cst_equal (step, s))
6925 continue;
6927 else
6929 /* Try if delta is a multiple of step. */
6930 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6931 if (! tmp)
6932 continue;
6933 delta = tmp;
6936 /* Only fold here if we can verify we do not overflow one
6937 dimension of a multi-dimensional array. */
6938 if (mdim)
6940 tree tmp;
6942 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6943 || !INTEGRAL_TYPE_P (itype)
6944 || !TYPE_MAX_VALUE (itype)
6945 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6946 continue;
6948 tmp = fold_binary (PLUS_EXPR, itype,
6949 fold_convert (itype,
6950 TREE_OPERAND (ref, 1)),
6951 fold_convert (itype, delta));
6952 if (!tmp
6953 || TREE_CODE (tmp) != INTEGER_CST
6954 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6955 continue;
6958 break;
6960 else
6961 mdim = false;
6963 if (!handled_component_p (ref))
6964 return NULL_TREE;
6967 /* We found the suitable array reference. So copy everything up to it,
6968 and replace the index. */
6970 pref = TREE_OPERAND (addr, 0);
6971 ret = copy_node (pref);
6972 pos = ret;
6974 while (pref != ref)
6976 pref = TREE_OPERAND (pref, 0);
6977 TREE_OPERAND (pos, 0) = copy_node (pref);
6978 pos = TREE_OPERAND (pos, 0);
6981 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6982 fold_convert (itype,
6983 TREE_OPERAND (pos, 1)),
6984 fold_convert (itype, delta));
6986 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6990 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6991 means A >= Y && A != MAX, but in this case we know that
6992 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6994 static tree
6995 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6997 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6999 if (TREE_CODE (bound) == LT_EXPR)
7000 a = TREE_OPERAND (bound, 0);
7001 else if (TREE_CODE (bound) == GT_EXPR)
7002 a = TREE_OPERAND (bound, 1);
7003 else
7004 return NULL_TREE;
7006 typea = TREE_TYPE (a);
7007 if (!INTEGRAL_TYPE_P (typea)
7008 && !POINTER_TYPE_P (typea))
7009 return NULL_TREE;
7011 if (TREE_CODE (ineq) == LT_EXPR)
7013 a1 = TREE_OPERAND (ineq, 1);
7014 y = TREE_OPERAND (ineq, 0);
7016 else if (TREE_CODE (ineq) == GT_EXPR)
7018 a1 = TREE_OPERAND (ineq, 0);
7019 y = TREE_OPERAND (ineq, 1);
7021 else
7022 return NULL_TREE;
7024 if (TREE_TYPE (a1) != typea)
7025 return NULL_TREE;
7027 if (POINTER_TYPE_P (typea))
7029 /* Convert the pointer types into integer before taking the difference. */
7030 tree ta = fold_convert (ssizetype, a);
7031 tree ta1 = fold_convert (ssizetype, a1);
7032 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7034 else
7035 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7037 if (!diff || !integer_onep (diff))
7038 return NULL_TREE;
7040 return fold_build2 (GE_EXPR, type, a, y);
7043 /* Fold a sum or difference of at least one multiplication.
7044 Returns the folded tree or NULL if no simplification could be made. */
7046 static tree
7047 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7049 tree arg00, arg01, arg10, arg11;
7050 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7052 /* (A * C) +- (B * C) -> (A+-B) * C.
7053 (A * C) +- A -> A * (C+-1).
7054 We are most concerned about the case where C is a constant,
7055 but other combinations show up during loop reduction. Since
7056 it is not difficult, try all four possibilities. */
7058 if (TREE_CODE (arg0) == MULT_EXPR)
7060 arg00 = TREE_OPERAND (arg0, 0);
7061 arg01 = TREE_OPERAND (arg0, 1);
7063 else if (TREE_CODE (arg0) == INTEGER_CST)
7065 arg00 = build_one_cst (type);
7066 arg01 = arg0;
7068 else
7070 /* We cannot generate constant 1 for fract. */
7071 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7072 return NULL_TREE;
7073 arg00 = arg0;
7074 arg01 = build_one_cst (type);
7076 if (TREE_CODE (arg1) == MULT_EXPR)
7078 arg10 = TREE_OPERAND (arg1, 0);
7079 arg11 = TREE_OPERAND (arg1, 1);
7081 else if (TREE_CODE (arg1) == INTEGER_CST)
7083 arg10 = build_one_cst (type);
7084 arg11 = arg1;
7086 else
7088 /* We cannot generate constant 1 for fract. */
7089 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7090 return NULL_TREE;
7091 arg10 = arg1;
7092 arg11 = build_one_cst (type);
7094 same = NULL_TREE;
7096 if (operand_equal_p (arg01, arg11, 0))
7097 same = arg01, alt0 = arg00, alt1 = arg10;
7098 else if (operand_equal_p (arg00, arg10, 0))
7099 same = arg00, alt0 = arg01, alt1 = arg11;
7100 else if (operand_equal_p (arg00, arg11, 0))
7101 same = arg00, alt0 = arg01, alt1 = arg10;
7102 else if (operand_equal_p (arg01, arg10, 0))
7103 same = arg01, alt0 = arg00, alt1 = arg11;
7105 /* No identical multiplicands; see if we can find a common
7106 power-of-two factor in non-power-of-two multiplies. This
7107 can help in multi-dimensional array access. */
7108 else if (host_integerp (arg01, 0)
7109 && host_integerp (arg11, 0))
7111 HOST_WIDE_INT int01, int11, tmp;
7112 bool swap = false;
7113 tree maybe_same;
7114 int01 = TREE_INT_CST_LOW (arg01);
7115 int11 = TREE_INT_CST_LOW (arg11);
7117 /* Move min of absolute values to int11. */
7118 if ((int01 >= 0 ? int01 : -int01)
7119 < (int11 >= 0 ? int11 : -int11))
7121 tmp = int01, int01 = int11, int11 = tmp;
7122 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7123 maybe_same = arg01;
7124 swap = true;
7126 else
7127 maybe_same = arg11;
7129 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7131 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7132 build_int_cst (TREE_TYPE (arg00),
7133 int01 / int11));
7134 alt1 = arg10;
7135 same = maybe_same;
7136 if (swap)
7137 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7141 if (same)
7142 return fold_build2 (MULT_EXPR, type,
7143 fold_build2 (code, type,
7144 fold_convert (type, alt0),
7145 fold_convert (type, alt1)),
7146 fold_convert (type, same));
7148 return NULL_TREE;
7151 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7154 upon failure. */
7156 static int
7157 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7159 tree type = TREE_TYPE (expr);
7160 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7161 int byte, offset, word, words;
7162 unsigned char value;
7164 if (total_bytes > len)
7165 return 0;
7166 words = total_bytes / UNITS_PER_WORD;
7168 for (byte = 0; byte < total_bytes; byte++)
7170 int bitpos = byte * BITS_PER_UNIT;
7171 if (bitpos < HOST_BITS_PER_WIDE_INT)
7172 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7173 else
7174 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7175 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7177 if (total_bytes > UNITS_PER_WORD)
7179 word = byte / UNITS_PER_WORD;
7180 if (WORDS_BIG_ENDIAN)
7181 word = (words - 1) - word;
7182 offset = word * UNITS_PER_WORD;
7183 if (BYTES_BIG_ENDIAN)
7184 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7185 else
7186 offset += byte % UNITS_PER_WORD;
7188 else
7189 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7190 ptr[offset] = value;
7192 return total_bytes;
7196 /* Subroutine of native_encode_expr. Encode the REAL_CST
7197 specified by EXPR into the buffer PTR of length LEN bytes.
7198 Return the number of bytes placed in the buffer, or zero
7199 upon failure. */
7201 static int
7202 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7204 tree type = TREE_TYPE (expr);
7205 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7206 int byte, offset, word, words, bitpos;
7207 unsigned char value;
7209 /* There are always 32 bits in each long, no matter the size of
7210 the hosts long. We handle floating point representations with
7211 up to 192 bits. */
7212 long tmp[6];
7214 if (total_bytes > len)
7215 return 0;
7216 words = 32 / UNITS_PER_WORD;
7218 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7220 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7221 bitpos += BITS_PER_UNIT)
7223 byte = (bitpos / BITS_PER_UNIT) & 3;
7224 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7226 if (UNITS_PER_WORD < 4)
7228 word = byte / UNITS_PER_WORD;
7229 if (WORDS_BIG_ENDIAN)
7230 word = (words - 1) - word;
7231 offset = word * UNITS_PER_WORD;
7232 if (BYTES_BIG_ENDIAN)
7233 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7234 else
7235 offset += byte % UNITS_PER_WORD;
7237 else
7238 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7239 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7241 return total_bytes;
7244 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7245 specified by EXPR into the buffer PTR of length LEN bytes.
7246 Return the number of bytes placed in the buffer, or zero
7247 upon failure. */
7249 static int
7250 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7252 int rsize, isize;
7253 tree part;
7255 part = TREE_REALPART (expr);
7256 rsize = native_encode_expr (part, ptr, len);
7257 if (rsize == 0)
7258 return 0;
7259 part = TREE_IMAGPART (expr);
7260 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7261 if (isize != rsize)
7262 return 0;
7263 return rsize + isize;
7267 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7268 specified by EXPR into the buffer PTR of length LEN bytes.
7269 Return the number of bytes placed in the buffer, or zero
7270 upon failure. */
7272 static int
7273 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7275 int i, size, offset, count;
7276 tree itype, elem, elements;
7278 offset = 0;
7279 elements = TREE_VECTOR_CST_ELTS (expr);
7280 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7281 itype = TREE_TYPE (TREE_TYPE (expr));
7282 size = GET_MODE_SIZE (TYPE_MODE (itype));
7283 for (i = 0; i < count; i++)
7285 if (elements)
7287 elem = TREE_VALUE (elements);
7288 elements = TREE_CHAIN (elements);
7290 else
7291 elem = NULL_TREE;
7293 if (elem)
7295 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7296 return 0;
7298 else
7300 if (offset + size > len)
7301 return 0;
7302 memset (ptr+offset, 0, size);
7304 offset += size;
7306 return offset;
7310 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7311 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7312 buffer PTR of length LEN bytes. Return the number of bytes
7313 placed in the buffer, or zero upon failure. */
7316 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7318 switch (TREE_CODE (expr))
7320 case INTEGER_CST:
7321 return native_encode_int (expr, ptr, len);
7323 case REAL_CST:
7324 return native_encode_real (expr, ptr, len);
7326 case COMPLEX_CST:
7327 return native_encode_complex (expr, ptr, len);
7329 case VECTOR_CST:
7330 return native_encode_vector (expr, ptr, len);
7332 default:
7333 return 0;
7338 /* Subroutine of native_interpret_expr. Interpret the contents of
7339 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7340 If the buffer cannot be interpreted, return NULL_TREE. */
7342 static tree
7343 native_interpret_int (tree type, const unsigned char *ptr, int len)
7345 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7346 int byte, offset, word, words;
7347 unsigned char value;
7348 unsigned int HOST_WIDE_INT lo = 0;
7349 HOST_WIDE_INT hi = 0;
7351 if (total_bytes > len)
7352 return NULL_TREE;
7353 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7354 return NULL_TREE;
7355 words = total_bytes / UNITS_PER_WORD;
7357 for (byte = 0; byte < total_bytes; byte++)
7359 int bitpos = byte * BITS_PER_UNIT;
7360 if (total_bytes > UNITS_PER_WORD)
7362 word = byte / UNITS_PER_WORD;
7363 if (WORDS_BIG_ENDIAN)
7364 word = (words - 1) - word;
7365 offset = word * UNITS_PER_WORD;
7366 if (BYTES_BIG_ENDIAN)
7367 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7368 else
7369 offset += byte % UNITS_PER_WORD;
7371 else
7372 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7373 value = ptr[offset];
7375 if (bitpos < HOST_BITS_PER_WIDE_INT)
7376 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7377 else
7378 hi |= (unsigned HOST_WIDE_INT) value
7379 << (bitpos - HOST_BITS_PER_WIDE_INT);
7382 return build_int_cst_wide_type (type, lo, hi);
7386 /* Subroutine of native_interpret_expr. Interpret the contents of
7387 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7388 If the buffer cannot be interpreted, return NULL_TREE. */
7390 static tree
7391 native_interpret_real (tree type, const unsigned char *ptr, int len)
7393 enum machine_mode mode = TYPE_MODE (type);
7394 int total_bytes = GET_MODE_SIZE (mode);
7395 int byte, offset, word, words, bitpos;
7396 unsigned char value;
7397 /* There are always 32 bits in each long, no matter the size of
7398 the hosts long. We handle floating point representations with
7399 up to 192 bits. */
7400 REAL_VALUE_TYPE r;
7401 long tmp[6];
7403 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7404 if (total_bytes > len || total_bytes > 24)
7405 return NULL_TREE;
7406 words = 32 / UNITS_PER_WORD;
7408 memset (tmp, 0, sizeof (tmp));
7409 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7410 bitpos += BITS_PER_UNIT)
7412 byte = (bitpos / BITS_PER_UNIT) & 3;
7413 if (UNITS_PER_WORD < 4)
7415 word = byte / UNITS_PER_WORD;
7416 if (WORDS_BIG_ENDIAN)
7417 word = (words - 1) - word;
7418 offset = word * UNITS_PER_WORD;
7419 if (BYTES_BIG_ENDIAN)
7420 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7421 else
7422 offset += byte % UNITS_PER_WORD;
7424 else
7425 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7426 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7428 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7431 real_from_target (&r, tmp, mode);
7432 return build_real (type, r);
7436 /* Subroutine of native_interpret_expr. Interpret the contents of
7437 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7438 If the buffer cannot be interpreted, return NULL_TREE. */
7440 static tree
7441 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7443 tree etype, rpart, ipart;
7444 int size;
7446 etype = TREE_TYPE (type);
7447 size = GET_MODE_SIZE (TYPE_MODE (etype));
7448 if (size * 2 > len)
7449 return NULL_TREE;
7450 rpart = native_interpret_expr (etype, ptr, size);
7451 if (!rpart)
7452 return NULL_TREE;
7453 ipart = native_interpret_expr (etype, ptr+size, size);
7454 if (!ipart)
7455 return NULL_TREE;
7456 return build_complex (type, rpart, ipart);
7460 /* Subroutine of native_interpret_expr. Interpret the contents of
7461 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7462 If the buffer cannot be interpreted, return NULL_TREE. */
7464 static tree
7465 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7467 tree etype, elem, elements;
7468 int i, size, count;
7470 etype = TREE_TYPE (type);
7471 size = GET_MODE_SIZE (TYPE_MODE (etype));
7472 count = TYPE_VECTOR_SUBPARTS (type);
7473 if (size * count > len)
7474 return NULL_TREE;
7476 elements = NULL_TREE;
7477 for (i = count - 1; i >= 0; i--)
7479 elem = native_interpret_expr (etype, ptr+(i*size), size);
7480 if (!elem)
7481 return NULL_TREE;
7482 elements = tree_cons (NULL_TREE, elem, elements);
7484 return build_vector (type, elements);
7488 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7489 the buffer PTR of length LEN as a constant of type TYPE. For
7490 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7491 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7492 return NULL_TREE. */
7494 tree
7495 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7497 switch (TREE_CODE (type))
7499 case INTEGER_TYPE:
7500 case ENUMERAL_TYPE:
7501 case BOOLEAN_TYPE:
7502 return native_interpret_int (type, ptr, len);
7504 case REAL_TYPE:
7505 return native_interpret_real (type, ptr, len);
7507 case COMPLEX_TYPE:
7508 return native_interpret_complex (type, ptr, len);
7510 case VECTOR_TYPE:
7511 return native_interpret_vector (type, ptr, len);
7513 default:
7514 return NULL_TREE;
7519 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7520 TYPE at compile-time. If we're unable to perform the conversion
7521 return NULL_TREE. */
7523 static tree
7524 fold_view_convert_expr (tree type, tree expr)
7526 /* We support up to 512-bit values (for V8DFmode). */
7527 unsigned char buffer[64];
7528 int len;
7530 /* Check that the host and target are sane. */
7531 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7532 return NULL_TREE;
7534 len = native_encode_expr (expr, buffer, sizeof (buffer));
7535 if (len == 0)
7536 return NULL_TREE;
7538 return native_interpret_expr (type, buffer, len);
7541 /* Build an expression for the address of T. Folds away INDIRECT_REF
7542 to avoid confusing the gimplify process. When IN_FOLD is true
7543 avoid modifications of T. */
7545 static tree
7546 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7548 /* The size of the object is not relevant when talking about its address. */
7549 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7550 t = TREE_OPERAND (t, 0);
7552 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7553 if (TREE_CODE (t) == INDIRECT_REF
7554 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7556 t = TREE_OPERAND (t, 0);
7558 if (TREE_TYPE (t) != ptrtype)
7559 t = build1 (NOP_EXPR, ptrtype, t);
7561 else if (!in_fold)
7563 tree base = t;
7565 while (handled_component_p (base))
7566 base = TREE_OPERAND (base, 0);
7568 if (DECL_P (base))
7569 TREE_ADDRESSABLE (base) = 1;
7571 t = build1 (ADDR_EXPR, ptrtype, t);
7573 else
7574 t = build1 (ADDR_EXPR, ptrtype, t);
7576 return t;
7579 /* Build an expression for the address of T with type PTRTYPE. This
7580 function modifies the input parameter 'T' by sometimes setting the
7581 TREE_ADDRESSABLE flag. */
7583 tree
7584 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7586 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7589 /* Build an expression for the address of T. This function modifies
7590 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7591 flag. When called from fold functions, use fold_addr_expr instead. */
7593 tree
7594 build_fold_addr_expr (tree t)
7596 return build_fold_addr_expr_with_type_1 (t,
7597 build_pointer_type (TREE_TYPE (t)),
7598 false);
7601 /* Same as build_fold_addr_expr, builds an expression for the address
7602 of T, but avoids touching the input node 't'. Fold functions
7603 should use this version. */
7605 static tree
7606 fold_addr_expr (tree t)
7608 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7610 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7613 /* Fold a unary expression of code CODE and type TYPE with operand
7614 OP0. Return the folded expression if folding is successful.
7615 Otherwise, return NULL_TREE. */
7617 tree
7618 fold_unary (enum tree_code code, tree type, tree op0)
7620 tree tem;
7621 tree arg0;
7622 enum tree_code_class kind = TREE_CODE_CLASS (code);
7624 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7625 && TREE_CODE_LENGTH (code) == 1);
7627 arg0 = op0;
7628 if (arg0)
7630 if (code == NOP_EXPR || code == CONVERT_EXPR
7631 || code == FLOAT_EXPR || code == ABS_EXPR)
7633 /* Don't use STRIP_NOPS, because signedness of argument type
7634 matters. */
7635 STRIP_SIGN_NOPS (arg0);
7637 else
7639 /* Strip any conversions that don't change the mode. This
7640 is safe for every expression, except for a comparison
7641 expression because its signedness is derived from its
7642 operands.
7644 Note that this is done as an internal manipulation within
7645 the constant folder, in order to find the simplest
7646 representation of the arguments so that their form can be
7647 studied. In any cases, the appropriate type conversions
7648 should be put back in the tree that will get out of the
7649 constant folder. */
7650 STRIP_NOPS (arg0);
7654 if (TREE_CODE_CLASS (code) == tcc_unary)
7656 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7657 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7658 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7659 else if (TREE_CODE (arg0) == COND_EXPR)
7661 tree arg01 = TREE_OPERAND (arg0, 1);
7662 tree arg02 = TREE_OPERAND (arg0, 2);
7663 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7664 arg01 = fold_build1 (code, type, arg01);
7665 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7666 arg02 = fold_build1 (code, type, arg02);
7667 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7668 arg01, arg02);
7670 /* If this was a conversion, and all we did was to move into
7671 inside the COND_EXPR, bring it back out. But leave it if
7672 it is a conversion from integer to integer and the
7673 result precision is no wider than a word since such a
7674 conversion is cheap and may be optimized away by combine,
7675 while it couldn't if it were outside the COND_EXPR. Then return
7676 so we don't get into an infinite recursion loop taking the
7677 conversion out and then back in. */
7679 if ((code == NOP_EXPR || code == CONVERT_EXPR
7680 || code == NON_LVALUE_EXPR)
7681 && TREE_CODE (tem) == COND_EXPR
7682 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7683 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7684 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7685 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7686 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7687 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7688 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7689 && (INTEGRAL_TYPE_P
7690 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7691 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7692 || flag_syntax_only))
7693 tem = build1 (code, type,
7694 build3 (COND_EXPR,
7695 TREE_TYPE (TREE_OPERAND
7696 (TREE_OPERAND (tem, 1), 0)),
7697 TREE_OPERAND (tem, 0),
7698 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7699 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7700 return tem;
7702 else if (COMPARISON_CLASS_P (arg0))
7704 if (TREE_CODE (type) == BOOLEAN_TYPE)
7706 arg0 = copy_node (arg0);
7707 TREE_TYPE (arg0) = type;
7708 return arg0;
7710 else if (TREE_CODE (type) != INTEGER_TYPE)
7711 return fold_build3 (COND_EXPR, type, arg0,
7712 fold_build1 (code, type,
7713 integer_one_node),
7714 fold_build1 (code, type,
7715 integer_zero_node));
7719 switch (code)
7721 case PAREN_EXPR:
7722 /* Re-association barriers around constants and other re-association
7723 barriers can be removed. */
7724 if (CONSTANT_CLASS_P (op0)
7725 || TREE_CODE (op0) == PAREN_EXPR)
7726 return fold_convert (type, op0);
7727 return NULL_TREE;
7729 CASE_CONVERT:
7730 case FLOAT_EXPR:
7731 case FIX_TRUNC_EXPR:
7732 if (TREE_TYPE (op0) == type)
7733 return op0;
7735 /* If we have (type) (a CMP b) and type is an integral type, return
7736 new expression involving the new type. */
7737 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7738 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7739 TREE_OPERAND (op0, 1));
7741 /* Handle cases of two conversions in a row. */
7742 if (CONVERT_EXPR_P (op0))
7744 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7745 tree inter_type = TREE_TYPE (op0);
7746 int inside_int = INTEGRAL_TYPE_P (inside_type);
7747 int inside_ptr = POINTER_TYPE_P (inside_type);
7748 int inside_float = FLOAT_TYPE_P (inside_type);
7749 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7750 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7751 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7752 int inter_int = INTEGRAL_TYPE_P (inter_type);
7753 int inter_ptr = POINTER_TYPE_P (inter_type);
7754 int inter_float = FLOAT_TYPE_P (inter_type);
7755 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7756 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7757 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7758 int final_int = INTEGRAL_TYPE_P (type);
7759 int final_ptr = POINTER_TYPE_P (type);
7760 int final_float = FLOAT_TYPE_P (type);
7761 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7762 unsigned int final_prec = TYPE_PRECISION (type);
7763 int final_unsignedp = TYPE_UNSIGNED (type);
7765 /* In addition to the cases of two conversions in a row
7766 handled below, if we are converting something to its own
7767 type via an object of identical or wider precision, neither
7768 conversion is needed. */
7769 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7770 && (((inter_int || inter_ptr) && final_int)
7771 || (inter_float && final_float))
7772 && inter_prec >= final_prec)
7773 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7775 /* Likewise, if the intermediate and final types are either both
7776 float or both integer, we don't need the middle conversion if
7777 it is wider than the final type and doesn't change the signedness
7778 (for integers). Avoid this if the final type is a pointer
7779 since then we sometimes need the inner conversion. Likewise if
7780 the outer has a precision not equal to the size of its mode. */
7781 if (((inter_int && inside_int)
7782 || (inter_float && inside_float)
7783 || (inter_vec && inside_vec))
7784 && inter_prec >= inside_prec
7785 && (inter_float || inter_vec
7786 || inter_unsignedp == inside_unsignedp)
7787 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7788 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7789 && ! final_ptr
7790 && (! final_vec || inter_prec == inside_prec))
7791 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7793 /* If we have a sign-extension of a zero-extended value, we can
7794 replace that by a single zero-extension. */
7795 if (inside_int && inter_int && final_int
7796 && inside_prec < inter_prec && inter_prec < final_prec
7797 && inside_unsignedp && !inter_unsignedp)
7798 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7800 /* Two conversions in a row are not needed unless:
7801 - some conversion is floating-point (overstrict for now), or
7802 - some conversion is a vector (overstrict for now), or
7803 - the intermediate type is narrower than both initial and
7804 final, or
7805 - the intermediate type and innermost type differ in signedness,
7806 and the outermost type is wider than the intermediate, or
7807 - the initial type is a pointer type and the precisions of the
7808 intermediate and final types differ, or
7809 - the final type is a pointer type and the precisions of the
7810 initial and intermediate types differ. */
7811 if (! inside_float && ! inter_float && ! final_float
7812 && ! inside_vec && ! inter_vec && ! final_vec
7813 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7814 && ! (inside_int && inter_int
7815 && inter_unsignedp != inside_unsignedp
7816 && inter_prec < final_prec)
7817 && ((inter_unsignedp && inter_prec > inside_prec)
7818 == (final_unsignedp && final_prec > inter_prec))
7819 && ! (inside_ptr && inter_prec != final_prec)
7820 && ! (final_ptr && inside_prec != inter_prec)
7821 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7822 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7823 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7826 /* Handle (T *)&A.B.C for A being of type T and B and C
7827 living at offset zero. This occurs frequently in
7828 C++ upcasting and then accessing the base. */
7829 if (TREE_CODE (op0) == ADDR_EXPR
7830 && POINTER_TYPE_P (type)
7831 && handled_component_p (TREE_OPERAND (op0, 0)))
7833 HOST_WIDE_INT bitsize, bitpos;
7834 tree offset;
7835 enum machine_mode mode;
7836 int unsignedp, volatilep;
7837 tree base = TREE_OPERAND (op0, 0);
7838 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7839 &mode, &unsignedp, &volatilep, false);
7840 /* If the reference was to a (constant) zero offset, we can use
7841 the address of the base if it has the same base type
7842 as the result type. */
7843 if (! offset && bitpos == 0
7844 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7845 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7846 return fold_convert (type, fold_addr_expr (base));
7849 if ((TREE_CODE (op0) == MODIFY_EXPR
7850 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7851 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7852 /* Detect assigning a bitfield. */
7853 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7854 && DECL_BIT_FIELD
7855 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7857 /* Don't leave an assignment inside a conversion
7858 unless assigning a bitfield. */
7859 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7860 /* First do the assignment, then return converted constant. */
7861 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7862 TREE_NO_WARNING (tem) = 1;
7863 TREE_USED (tem) = 1;
7864 return tem;
7867 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7868 constants (if x has signed type, the sign bit cannot be set
7869 in c). This folds extension into the BIT_AND_EXPR.
7870 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7871 very likely don't have maximal range for their precision and this
7872 transformation effectively doesn't preserve non-maximal ranges. */
7873 if (TREE_CODE (type) == INTEGER_TYPE
7874 && TREE_CODE (op0) == BIT_AND_EXPR
7875 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7877 tree and = op0;
7878 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7879 int change = 0;
7881 if (TYPE_UNSIGNED (TREE_TYPE (and))
7882 || (TYPE_PRECISION (type)
7883 <= TYPE_PRECISION (TREE_TYPE (and))))
7884 change = 1;
7885 else if (TYPE_PRECISION (TREE_TYPE (and1))
7886 <= HOST_BITS_PER_WIDE_INT
7887 && host_integerp (and1, 1))
7889 unsigned HOST_WIDE_INT cst;
7891 cst = tree_low_cst (and1, 1);
7892 cst &= (HOST_WIDE_INT) -1
7893 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7894 change = (cst == 0);
7895 #ifdef LOAD_EXTEND_OP
7896 if (change
7897 && !flag_syntax_only
7898 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7899 == ZERO_EXTEND))
7901 tree uns = unsigned_type_for (TREE_TYPE (and0));
7902 and0 = fold_convert (uns, and0);
7903 and1 = fold_convert (uns, and1);
7905 #endif
7907 if (change)
7909 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7910 TREE_INT_CST_HIGH (and1), 0,
7911 TREE_OVERFLOW (and1));
7912 return fold_build2 (BIT_AND_EXPR, type,
7913 fold_convert (type, and0), tem);
7917 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7918 when one of the new casts will fold away. Conservatively we assume
7919 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7920 if (POINTER_TYPE_P (type)
7921 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7922 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7923 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7924 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7926 tree arg00 = TREE_OPERAND (arg0, 0);
7927 tree arg01 = TREE_OPERAND (arg0, 1);
7929 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7930 fold_convert (sizetype, arg01));
7933 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7934 of the same precision, and X is an integer type not narrower than
7935 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7936 if (INTEGRAL_TYPE_P (type)
7937 && TREE_CODE (op0) == BIT_NOT_EXPR
7938 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7939 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7940 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7942 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7943 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7944 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7945 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7948 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7949 type of X and Y (integer types only). */
7950 if (INTEGRAL_TYPE_P (type)
7951 && TREE_CODE (op0) == MULT_EXPR
7952 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7953 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7955 /* Be careful not to introduce new overflows. */
7956 tree mult_type;
7957 if (TYPE_OVERFLOW_WRAPS (type))
7958 mult_type = type;
7959 else
7960 mult_type = unsigned_type_for (type);
7962 tem = fold_build2 (MULT_EXPR, mult_type,
7963 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7964 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7965 return fold_convert (type, tem);
7968 tem = fold_convert_const (code, type, op0);
7969 return tem ? tem : NULL_TREE;
7971 case FIXED_CONVERT_EXPR:
7972 tem = fold_convert_const (code, type, arg0);
7973 return tem ? tem : NULL_TREE;
7975 case VIEW_CONVERT_EXPR:
7976 if (TREE_TYPE (op0) == type)
7977 return op0;
7978 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7979 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7981 /* For integral conversions with the same precision or pointer
7982 conversions use a NOP_EXPR instead. */
7983 if ((INTEGRAL_TYPE_P (type)
7984 || POINTER_TYPE_P (type))
7985 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7986 || POINTER_TYPE_P (TREE_TYPE (op0)))
7987 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7988 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7989 a sub-type to its base type as generated by the Ada FE. */
7990 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7991 && TREE_TYPE (TREE_TYPE (op0))))
7992 return fold_convert (type, op0);
7994 /* Strip inner integral conversions that do not change the precision. */
7995 if (CONVERT_EXPR_P (op0)
7996 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7997 || POINTER_TYPE_P (TREE_TYPE (op0)))
7998 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7999 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8000 && (TYPE_PRECISION (TREE_TYPE (op0))
8001 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8002 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8004 return fold_view_convert_expr (type, op0);
8006 case NEGATE_EXPR:
8007 tem = fold_negate_expr (arg0);
8008 if (tem)
8009 return fold_convert (type, tem);
8010 return NULL_TREE;
8012 case ABS_EXPR:
8013 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8014 return fold_abs_const (arg0, type);
8015 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8016 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8017 /* Convert fabs((double)float) into (double)fabsf(float). */
8018 else if (TREE_CODE (arg0) == NOP_EXPR
8019 && TREE_CODE (type) == REAL_TYPE)
8021 tree targ0 = strip_float_extensions (arg0);
8022 if (targ0 != arg0)
8023 return fold_convert (type, fold_build1 (ABS_EXPR,
8024 TREE_TYPE (targ0),
8025 targ0));
8027 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8028 else if (TREE_CODE (arg0) == ABS_EXPR)
8029 return arg0;
8030 else if (tree_expr_nonnegative_p (arg0))
8031 return arg0;
8033 /* Strip sign ops from argument. */
8034 if (TREE_CODE (type) == REAL_TYPE)
8036 tem = fold_strip_sign_ops (arg0);
8037 if (tem)
8038 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8040 return NULL_TREE;
8042 case CONJ_EXPR:
8043 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8044 return fold_convert (type, arg0);
8045 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8047 tree itype = TREE_TYPE (type);
8048 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8049 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8050 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8052 if (TREE_CODE (arg0) == COMPLEX_CST)
8054 tree itype = TREE_TYPE (type);
8055 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8056 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8057 return build_complex (type, rpart, negate_expr (ipart));
8059 if (TREE_CODE (arg0) == CONJ_EXPR)
8060 return fold_convert (type, TREE_OPERAND (arg0, 0));
8061 return NULL_TREE;
8063 case BIT_NOT_EXPR:
8064 if (TREE_CODE (arg0) == INTEGER_CST)
8065 return fold_not_const (arg0, type);
8066 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8067 return fold_convert (type, TREE_OPERAND (arg0, 0));
8068 /* Convert ~ (-A) to A - 1. */
8069 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8070 return fold_build2 (MINUS_EXPR, type,
8071 fold_convert (type, TREE_OPERAND (arg0, 0)),
8072 build_int_cst (type, 1));
8073 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8074 else if (INTEGRAL_TYPE_P (type)
8075 && ((TREE_CODE (arg0) == MINUS_EXPR
8076 && integer_onep (TREE_OPERAND (arg0, 1)))
8077 || (TREE_CODE (arg0) == PLUS_EXPR
8078 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8079 return fold_build1 (NEGATE_EXPR, type,
8080 fold_convert (type, TREE_OPERAND (arg0, 0)));
8081 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8082 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8083 && (tem = fold_unary (BIT_NOT_EXPR, type,
8084 fold_convert (type,
8085 TREE_OPERAND (arg0, 0)))))
8086 return fold_build2 (BIT_XOR_EXPR, type, tem,
8087 fold_convert (type, TREE_OPERAND (arg0, 1)));
8088 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8089 && (tem = fold_unary (BIT_NOT_EXPR, type,
8090 fold_convert (type,
8091 TREE_OPERAND (arg0, 1)))))
8092 return fold_build2 (BIT_XOR_EXPR, type,
8093 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8094 /* Perform BIT_NOT_EXPR on each element individually. */
8095 else if (TREE_CODE (arg0) == VECTOR_CST)
8097 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8098 int count = TYPE_VECTOR_SUBPARTS (type), i;
8100 for (i = 0; i < count; i++)
8102 if (elements)
8104 elem = TREE_VALUE (elements);
8105 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8106 if (elem == NULL_TREE)
8107 break;
8108 elements = TREE_CHAIN (elements);
8110 else
8111 elem = build_int_cst (TREE_TYPE (type), -1);
8112 list = tree_cons (NULL_TREE, elem, list);
8114 if (i == count)
8115 return build_vector (type, nreverse (list));
8118 return NULL_TREE;
8120 case TRUTH_NOT_EXPR:
8121 /* The argument to invert_truthvalue must have Boolean type. */
8122 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8123 arg0 = fold_convert (boolean_type_node, arg0);
8125 /* Note that the operand of this must be an int
8126 and its values must be 0 or 1.
8127 ("true" is a fixed value perhaps depending on the language,
8128 but we don't handle values other than 1 correctly yet.) */
8129 tem = fold_truth_not_expr (arg0);
8130 if (!tem)
8131 return NULL_TREE;
8132 return fold_convert (type, tem);
8134 case REALPART_EXPR:
8135 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8136 return fold_convert (type, arg0);
8137 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8138 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8139 TREE_OPERAND (arg0, 1));
8140 if (TREE_CODE (arg0) == COMPLEX_CST)
8141 return fold_convert (type, TREE_REALPART (arg0));
8142 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8144 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8145 tem = fold_build2 (TREE_CODE (arg0), itype,
8146 fold_build1 (REALPART_EXPR, itype,
8147 TREE_OPERAND (arg0, 0)),
8148 fold_build1 (REALPART_EXPR, itype,
8149 TREE_OPERAND (arg0, 1)));
8150 return fold_convert (type, tem);
8152 if (TREE_CODE (arg0) == CONJ_EXPR)
8154 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8155 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8156 return fold_convert (type, tem);
8158 if (TREE_CODE (arg0) == CALL_EXPR)
8160 tree fn = get_callee_fndecl (arg0);
8161 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8162 switch (DECL_FUNCTION_CODE (fn))
8164 CASE_FLT_FN (BUILT_IN_CEXPI):
8165 fn = mathfn_built_in (type, BUILT_IN_COS);
8166 if (fn)
8167 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8168 break;
8170 default:
8171 break;
8174 return NULL_TREE;
8176 case IMAGPART_EXPR:
8177 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8178 return fold_convert (type, integer_zero_node);
8179 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8180 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8181 TREE_OPERAND (arg0, 0));
8182 if (TREE_CODE (arg0) == COMPLEX_CST)
8183 return fold_convert (type, TREE_IMAGPART (arg0));
8184 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8186 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8187 tem = fold_build2 (TREE_CODE (arg0), itype,
8188 fold_build1 (IMAGPART_EXPR, itype,
8189 TREE_OPERAND (arg0, 0)),
8190 fold_build1 (IMAGPART_EXPR, itype,
8191 TREE_OPERAND (arg0, 1)));
8192 return fold_convert (type, tem);
8194 if (TREE_CODE (arg0) == CONJ_EXPR)
8196 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8197 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8198 return fold_convert (type, negate_expr (tem));
8200 if (TREE_CODE (arg0) == CALL_EXPR)
8202 tree fn = get_callee_fndecl (arg0);
8203 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8204 switch (DECL_FUNCTION_CODE (fn))
8206 CASE_FLT_FN (BUILT_IN_CEXPI):
8207 fn = mathfn_built_in (type, BUILT_IN_SIN);
8208 if (fn)
8209 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8210 break;
8212 default:
8213 break;
8216 return NULL_TREE;
8218 default:
8219 return NULL_TREE;
8220 } /* switch (code) */
8223 /* Fold a binary expression of code CODE and type TYPE with operands
8224 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8225 Return the folded expression if folding is successful. Otherwise,
8226 return NULL_TREE. */
8228 static tree
8229 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8231 enum tree_code compl_code;
8233 if (code == MIN_EXPR)
8234 compl_code = MAX_EXPR;
8235 else if (code == MAX_EXPR)
8236 compl_code = MIN_EXPR;
8237 else
8238 gcc_unreachable ();
8240 /* MIN (MAX (a, b), b) == b. */
8241 if (TREE_CODE (op0) == compl_code
8242 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8243 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8245 /* MIN (MAX (b, a), b) == b. */
8246 if (TREE_CODE (op0) == compl_code
8247 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8248 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8249 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8251 /* MIN (a, MAX (a, b)) == a. */
8252 if (TREE_CODE (op1) == compl_code
8253 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8254 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8255 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8257 /* MIN (a, MAX (b, a)) == a. */
8258 if (TREE_CODE (op1) == compl_code
8259 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8260 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8261 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8263 return NULL_TREE;
8266 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8267 by changing CODE to reduce the magnitude of constants involved in
8268 ARG0 of the comparison.
8269 Returns a canonicalized comparison tree if a simplification was
8270 possible, otherwise returns NULL_TREE.
8271 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8272 valid if signed overflow is undefined. */
8274 static tree
8275 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8276 tree arg0, tree arg1,
8277 bool *strict_overflow_p)
8279 enum tree_code code0 = TREE_CODE (arg0);
8280 tree t, cst0 = NULL_TREE;
8281 int sgn0;
8282 bool swap = false;
8284 /* Match A +- CST code arg1 and CST code arg1. */
8285 if (!(((code0 == MINUS_EXPR
8286 || code0 == PLUS_EXPR)
8287 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8288 || code0 == INTEGER_CST))
8289 return NULL_TREE;
8291 /* Identify the constant in arg0 and its sign. */
8292 if (code0 == INTEGER_CST)
8293 cst0 = arg0;
8294 else
8295 cst0 = TREE_OPERAND (arg0, 1);
8296 sgn0 = tree_int_cst_sgn (cst0);
8298 /* Overflowed constants and zero will cause problems. */
8299 if (integer_zerop (cst0)
8300 || TREE_OVERFLOW (cst0))
8301 return NULL_TREE;
8303 /* See if we can reduce the magnitude of the constant in
8304 arg0 by changing the comparison code. */
8305 if (code0 == INTEGER_CST)
8307 /* CST <= arg1 -> CST-1 < arg1. */
8308 if (code == LE_EXPR && sgn0 == 1)
8309 code = LT_EXPR;
8310 /* -CST < arg1 -> -CST-1 <= arg1. */
8311 else if (code == LT_EXPR && sgn0 == -1)
8312 code = LE_EXPR;
8313 /* CST > arg1 -> CST-1 >= arg1. */
8314 else if (code == GT_EXPR && sgn0 == 1)
8315 code = GE_EXPR;
8316 /* -CST >= arg1 -> -CST-1 > arg1. */
8317 else if (code == GE_EXPR && sgn0 == -1)
8318 code = GT_EXPR;
8319 else
8320 return NULL_TREE;
8321 /* arg1 code' CST' might be more canonical. */
8322 swap = true;
8324 else
8326 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8327 if (code == LT_EXPR
8328 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8329 code = LE_EXPR;
8330 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8331 else if (code == GT_EXPR
8332 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8333 code = GE_EXPR;
8334 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8335 else if (code == LE_EXPR
8336 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8337 code = LT_EXPR;
8338 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8339 else if (code == GE_EXPR
8340 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8341 code = GT_EXPR;
8342 else
8343 return NULL_TREE;
8344 *strict_overflow_p = true;
8347 /* Now build the constant reduced in magnitude. */
8348 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8349 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8350 if (code0 != INTEGER_CST)
8351 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8353 /* If swapping might yield to a more canonical form, do so. */
8354 if (swap)
8355 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8356 else
8357 return fold_build2 (code, type, t, arg1);
8360 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8361 overflow further. Try to decrease the magnitude of constants involved
8362 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8363 and put sole constants at the second argument position.
8364 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8366 static tree
8367 maybe_canonicalize_comparison (enum tree_code code, tree type,
8368 tree arg0, tree arg1)
8370 tree t;
8371 bool strict_overflow_p;
8372 const char * const warnmsg = G_("assuming signed overflow does not occur "
8373 "when reducing constant in comparison");
8375 /* In principle pointers also have undefined overflow behavior,
8376 but that causes problems elsewhere. */
8377 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8378 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8379 return NULL_TREE;
8381 /* Try canonicalization by simplifying arg0. */
8382 strict_overflow_p = false;
8383 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8384 &strict_overflow_p);
8385 if (t)
8387 if (strict_overflow_p)
8388 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8389 return t;
8392 /* Try canonicalization by simplifying arg1 using the swapped
8393 comparison. */
8394 code = swap_tree_comparison (code);
8395 strict_overflow_p = false;
8396 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8397 &strict_overflow_p);
8398 if (t && strict_overflow_p)
8399 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8400 return t;
8403 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8404 space. This is used to avoid issuing overflow warnings for
8405 expressions like &p->x which can not wrap. */
8407 static bool
8408 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8410 unsigned HOST_WIDE_INT offset_low, total_low;
8411 HOST_WIDE_INT size, offset_high, total_high;
8413 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8414 return true;
8416 if (bitpos < 0)
8417 return true;
8419 if (offset == NULL_TREE)
8421 offset_low = 0;
8422 offset_high = 0;
8424 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8425 return true;
8426 else
8428 offset_low = TREE_INT_CST_LOW (offset);
8429 offset_high = TREE_INT_CST_HIGH (offset);
8432 if (add_double_with_sign (offset_low, offset_high,
8433 bitpos / BITS_PER_UNIT, 0,
8434 &total_low, &total_high,
8435 true))
8436 return true;
8438 if (total_high != 0)
8439 return true;
8441 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8442 if (size <= 0)
8443 return true;
8445 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8446 array. */
8447 if (TREE_CODE (base) == ADDR_EXPR)
8449 HOST_WIDE_INT base_size;
8451 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8452 if (base_size > 0 && size < base_size)
8453 size = base_size;
8456 return total_low > (unsigned HOST_WIDE_INT) size;
8459 /* Subroutine of fold_binary. This routine performs all of the
8460 transformations that are common to the equality/inequality
8461 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8462 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8463 fold_binary should call fold_binary. Fold a comparison with
8464 tree code CODE and type TYPE with operands OP0 and OP1. Return
8465 the folded comparison or NULL_TREE. */
8467 static tree
8468 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8470 tree arg0, arg1, tem;
8472 arg0 = op0;
8473 arg1 = op1;
8475 STRIP_SIGN_NOPS (arg0);
8476 STRIP_SIGN_NOPS (arg1);
8478 tem = fold_relational_const (code, type, arg0, arg1);
8479 if (tem != NULL_TREE)
8480 return tem;
8482 /* If one arg is a real or integer constant, put it last. */
8483 if (tree_swap_operands_p (arg0, arg1, true))
8484 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8486 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8487 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8488 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8489 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8490 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8491 && (TREE_CODE (arg1) == INTEGER_CST
8492 && !TREE_OVERFLOW (arg1)))
8494 tree const1 = TREE_OPERAND (arg0, 1);
8495 tree const2 = arg1;
8496 tree variable = TREE_OPERAND (arg0, 0);
8497 tree lhs;
8498 int lhs_add;
8499 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8501 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8502 TREE_TYPE (arg1), const2, const1);
8504 /* If the constant operation overflowed this can be
8505 simplified as a comparison against INT_MAX/INT_MIN. */
8506 if (TREE_CODE (lhs) == INTEGER_CST
8507 && TREE_OVERFLOW (lhs))
8509 int const1_sgn = tree_int_cst_sgn (const1);
8510 enum tree_code code2 = code;
8512 /* Get the sign of the constant on the lhs if the
8513 operation were VARIABLE + CONST1. */
8514 if (TREE_CODE (arg0) == MINUS_EXPR)
8515 const1_sgn = -const1_sgn;
8517 /* The sign of the constant determines if we overflowed
8518 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8519 Canonicalize to the INT_MIN overflow by swapping the comparison
8520 if necessary. */
8521 if (const1_sgn == -1)
8522 code2 = swap_tree_comparison (code);
8524 /* We now can look at the canonicalized case
8525 VARIABLE + 1 CODE2 INT_MIN
8526 and decide on the result. */
8527 if (code2 == LT_EXPR
8528 || code2 == LE_EXPR
8529 || code2 == EQ_EXPR)
8530 return omit_one_operand (type, boolean_false_node, variable);
8531 else if (code2 == NE_EXPR
8532 || code2 == GE_EXPR
8533 || code2 == GT_EXPR)
8534 return omit_one_operand (type, boolean_true_node, variable);
8537 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8538 && (TREE_CODE (lhs) != INTEGER_CST
8539 || !TREE_OVERFLOW (lhs)))
8541 fold_overflow_warning (("assuming signed overflow does not occur "
8542 "when changing X +- C1 cmp C2 to "
8543 "X cmp C1 +- C2"),
8544 WARN_STRICT_OVERFLOW_COMPARISON);
8545 return fold_build2 (code, type, variable, lhs);
8549 /* For comparisons of pointers we can decompose it to a compile time
8550 comparison of the base objects and the offsets into the object.
8551 This requires at least one operand being an ADDR_EXPR or a
8552 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8553 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8554 && (TREE_CODE (arg0) == ADDR_EXPR
8555 || TREE_CODE (arg1) == ADDR_EXPR
8556 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8557 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8559 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8560 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8561 enum machine_mode mode;
8562 int volatilep, unsignedp;
8563 bool indirect_base0 = false, indirect_base1 = false;
8565 /* Get base and offset for the access. Strip ADDR_EXPR for
8566 get_inner_reference, but put it back by stripping INDIRECT_REF
8567 off the base object if possible. indirect_baseN will be true
8568 if baseN is not an address but refers to the object itself. */
8569 base0 = arg0;
8570 if (TREE_CODE (arg0) == ADDR_EXPR)
8572 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8573 &bitsize, &bitpos0, &offset0, &mode,
8574 &unsignedp, &volatilep, false);
8575 if (TREE_CODE (base0) == INDIRECT_REF)
8576 base0 = TREE_OPERAND (base0, 0);
8577 else
8578 indirect_base0 = true;
8580 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8582 base0 = TREE_OPERAND (arg0, 0);
8583 offset0 = TREE_OPERAND (arg0, 1);
8586 base1 = arg1;
8587 if (TREE_CODE (arg1) == ADDR_EXPR)
8589 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8590 &bitsize, &bitpos1, &offset1, &mode,
8591 &unsignedp, &volatilep, false);
8592 if (TREE_CODE (base1) == INDIRECT_REF)
8593 base1 = TREE_OPERAND (base1, 0);
8594 else
8595 indirect_base1 = true;
8597 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8599 base1 = TREE_OPERAND (arg1, 0);
8600 offset1 = TREE_OPERAND (arg1, 1);
8603 /* If we have equivalent bases we might be able to simplify. */
8604 if (indirect_base0 == indirect_base1
8605 && operand_equal_p (base0, base1, 0))
8607 /* We can fold this expression to a constant if the non-constant
8608 offset parts are equal. */
8609 if ((offset0 == offset1
8610 || (offset0 && offset1
8611 && operand_equal_p (offset0, offset1, 0)))
8612 && (code == EQ_EXPR
8613 || code == NE_EXPR
8614 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8617 if (code != EQ_EXPR
8618 && code != NE_EXPR
8619 && bitpos0 != bitpos1
8620 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8621 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8622 fold_overflow_warning (("assuming pointer wraparound does not "
8623 "occur when comparing P +- C1 with "
8624 "P +- C2"),
8625 WARN_STRICT_OVERFLOW_CONDITIONAL);
8627 switch (code)
8629 case EQ_EXPR:
8630 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8631 case NE_EXPR:
8632 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8633 case LT_EXPR:
8634 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8635 case LE_EXPR:
8636 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8637 case GE_EXPR:
8638 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8639 case GT_EXPR:
8640 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8641 default:;
8644 /* We can simplify the comparison to a comparison of the variable
8645 offset parts if the constant offset parts are equal.
8646 Be careful to use signed size type here because otherwise we
8647 mess with array offsets in the wrong way. This is possible
8648 because pointer arithmetic is restricted to retain within an
8649 object and overflow on pointer differences is undefined as of
8650 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8651 else if (bitpos0 == bitpos1
8652 && ((code == EQ_EXPR || code == NE_EXPR)
8653 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8655 tree signed_size_type_node;
8656 signed_size_type_node = signed_type_for (size_type_node);
8658 /* By converting to signed size type we cover middle-end pointer
8659 arithmetic which operates on unsigned pointer types of size
8660 type size and ARRAY_REF offsets which are properly sign or
8661 zero extended from their type in case it is narrower than
8662 size type. */
8663 if (offset0 == NULL_TREE)
8664 offset0 = build_int_cst (signed_size_type_node, 0);
8665 else
8666 offset0 = fold_convert (signed_size_type_node, offset0);
8667 if (offset1 == NULL_TREE)
8668 offset1 = build_int_cst (signed_size_type_node, 0);
8669 else
8670 offset1 = fold_convert (signed_size_type_node, offset1);
8672 if (code != EQ_EXPR
8673 && code != NE_EXPR
8674 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8675 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8676 fold_overflow_warning (("assuming pointer wraparound does not "
8677 "occur when comparing P +- C1 with "
8678 "P +- C2"),
8679 WARN_STRICT_OVERFLOW_COMPARISON);
8681 return fold_build2 (code, type, offset0, offset1);
8684 /* For non-equal bases we can simplify if they are addresses
8685 of local binding decls or constants. */
8686 else if (indirect_base0 && indirect_base1
8687 /* We know that !operand_equal_p (base0, base1, 0)
8688 because the if condition was false. But make
8689 sure two decls are not the same. */
8690 && base0 != base1
8691 && TREE_CODE (arg0) == ADDR_EXPR
8692 && TREE_CODE (arg1) == ADDR_EXPR
8693 && (((TREE_CODE (base0) == VAR_DECL
8694 || TREE_CODE (base0) == PARM_DECL)
8695 && (targetm.binds_local_p (base0)
8696 || CONSTANT_CLASS_P (base1)))
8697 || CONSTANT_CLASS_P (base0))
8698 && (((TREE_CODE (base1) == VAR_DECL
8699 || TREE_CODE (base1) == PARM_DECL)
8700 && (targetm.binds_local_p (base1)
8701 || CONSTANT_CLASS_P (base0)))
8702 || CONSTANT_CLASS_P (base1)))
8704 if (code == EQ_EXPR)
8705 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8706 else if (code == NE_EXPR)
8707 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8709 /* For equal offsets we can simplify to a comparison of the
8710 base addresses. */
8711 else if (bitpos0 == bitpos1
8712 && (indirect_base0
8713 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8714 && (indirect_base1
8715 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8716 && ((offset0 == offset1)
8717 || (offset0 && offset1
8718 && operand_equal_p (offset0, offset1, 0))))
8720 if (indirect_base0)
8721 base0 = fold_addr_expr (base0);
8722 if (indirect_base1)
8723 base1 = fold_addr_expr (base1);
8724 return fold_build2 (code, type, base0, base1);
8728 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8729 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8730 the resulting offset is smaller in absolute value than the
8731 original one. */
8732 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8733 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8734 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8735 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8736 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8737 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8738 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8740 tree const1 = TREE_OPERAND (arg0, 1);
8741 tree const2 = TREE_OPERAND (arg1, 1);
8742 tree variable1 = TREE_OPERAND (arg0, 0);
8743 tree variable2 = TREE_OPERAND (arg1, 0);
8744 tree cst;
8745 const char * const warnmsg = G_("assuming signed overflow does not "
8746 "occur when combining constants around "
8747 "a comparison");
8749 /* Put the constant on the side where it doesn't overflow and is
8750 of lower absolute value than before. */
8751 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8752 ? MINUS_EXPR : PLUS_EXPR,
8753 const2, const1, 0);
8754 if (!TREE_OVERFLOW (cst)
8755 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8757 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8758 return fold_build2 (code, type,
8759 variable1,
8760 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8761 variable2, cst));
8764 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8765 ? MINUS_EXPR : PLUS_EXPR,
8766 const1, const2, 0);
8767 if (!TREE_OVERFLOW (cst)
8768 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8770 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8771 return fold_build2 (code, type,
8772 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8773 variable1, cst),
8774 variable2);
8778 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8779 signed arithmetic case. That form is created by the compiler
8780 often enough for folding it to be of value. One example is in
8781 computing loop trip counts after Operator Strength Reduction. */
8782 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8783 && TREE_CODE (arg0) == MULT_EXPR
8784 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8785 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8786 && integer_zerop (arg1))
8788 tree const1 = TREE_OPERAND (arg0, 1);
8789 tree const2 = arg1; /* zero */
8790 tree variable1 = TREE_OPERAND (arg0, 0);
8791 enum tree_code cmp_code = code;
8793 gcc_assert (!integer_zerop (const1));
8795 fold_overflow_warning (("assuming signed overflow does not occur when "
8796 "eliminating multiplication in comparison "
8797 "with zero"),
8798 WARN_STRICT_OVERFLOW_COMPARISON);
8800 /* If const1 is negative we swap the sense of the comparison. */
8801 if (tree_int_cst_sgn (const1) < 0)
8802 cmp_code = swap_tree_comparison (cmp_code);
8804 return fold_build2 (cmp_code, type, variable1, const2);
8807 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8808 if (tem)
8809 return tem;
8811 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8813 tree targ0 = strip_float_extensions (arg0);
8814 tree targ1 = strip_float_extensions (arg1);
8815 tree newtype = TREE_TYPE (targ0);
8817 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8818 newtype = TREE_TYPE (targ1);
8820 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8821 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8822 return fold_build2 (code, type, fold_convert (newtype, targ0),
8823 fold_convert (newtype, targ1));
8825 /* (-a) CMP (-b) -> b CMP a */
8826 if (TREE_CODE (arg0) == NEGATE_EXPR
8827 && TREE_CODE (arg1) == NEGATE_EXPR)
8828 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8829 TREE_OPERAND (arg0, 0));
8831 if (TREE_CODE (arg1) == REAL_CST)
8833 REAL_VALUE_TYPE cst;
8834 cst = TREE_REAL_CST (arg1);
8836 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8837 if (TREE_CODE (arg0) == NEGATE_EXPR)
8838 return fold_build2 (swap_tree_comparison (code), type,
8839 TREE_OPERAND (arg0, 0),
8840 build_real (TREE_TYPE (arg1),
8841 REAL_VALUE_NEGATE (cst)));
8843 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8844 /* a CMP (-0) -> a CMP 0 */
8845 if (REAL_VALUE_MINUS_ZERO (cst))
8846 return fold_build2 (code, type, arg0,
8847 build_real (TREE_TYPE (arg1), dconst0));
8849 /* x != NaN is always true, other ops are always false. */
8850 if (REAL_VALUE_ISNAN (cst)
8851 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8853 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8854 return omit_one_operand (type, tem, arg0);
8857 /* Fold comparisons against infinity. */
8858 if (REAL_VALUE_ISINF (cst))
8860 tem = fold_inf_compare (code, type, arg0, arg1);
8861 if (tem != NULL_TREE)
8862 return tem;
8866 /* If this is a comparison of a real constant with a PLUS_EXPR
8867 or a MINUS_EXPR of a real constant, we can convert it into a
8868 comparison with a revised real constant as long as no overflow
8869 occurs when unsafe_math_optimizations are enabled. */
8870 if (flag_unsafe_math_optimizations
8871 && TREE_CODE (arg1) == REAL_CST
8872 && (TREE_CODE (arg0) == PLUS_EXPR
8873 || TREE_CODE (arg0) == MINUS_EXPR)
8874 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8875 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8876 ? MINUS_EXPR : PLUS_EXPR,
8877 arg1, TREE_OPERAND (arg0, 1), 0))
8878 && !TREE_OVERFLOW (tem))
8879 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8881 /* Likewise, we can simplify a comparison of a real constant with
8882 a MINUS_EXPR whose first operand is also a real constant, i.e.
8883 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8884 floating-point types only if -fassociative-math is set. */
8885 if (flag_associative_math
8886 && TREE_CODE (arg1) == REAL_CST
8887 && TREE_CODE (arg0) == MINUS_EXPR
8888 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8889 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8890 arg1, 0))
8891 && !TREE_OVERFLOW (tem))
8892 return fold_build2 (swap_tree_comparison (code), type,
8893 TREE_OPERAND (arg0, 1), tem);
8895 /* Fold comparisons against built-in math functions. */
8896 if (TREE_CODE (arg1) == REAL_CST
8897 && flag_unsafe_math_optimizations
8898 && ! flag_errno_math)
8900 enum built_in_function fcode = builtin_mathfn_code (arg0);
8902 if (fcode != END_BUILTINS)
8904 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8905 if (tem != NULL_TREE)
8906 return tem;
8911 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8912 && CONVERT_EXPR_P (arg0))
8914 /* If we are widening one operand of an integer comparison,
8915 see if the other operand is similarly being widened. Perhaps we
8916 can do the comparison in the narrower type. */
8917 tem = fold_widened_comparison (code, type, arg0, arg1);
8918 if (tem)
8919 return tem;
8921 /* Or if we are changing signedness. */
8922 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8923 if (tem)
8924 return tem;
8927 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8928 constant, we can simplify it. */
8929 if (TREE_CODE (arg1) == INTEGER_CST
8930 && (TREE_CODE (arg0) == MIN_EXPR
8931 || TREE_CODE (arg0) == MAX_EXPR)
8932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8934 tem = optimize_minmax_comparison (code, type, op0, op1);
8935 if (tem)
8936 return tem;
8939 /* Simplify comparison of something with itself. (For IEEE
8940 floating-point, we can only do some of these simplifications.) */
8941 if (operand_equal_p (arg0, arg1, 0))
8943 switch (code)
8945 case EQ_EXPR:
8946 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8947 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8948 return constant_boolean_node (1, type);
8949 break;
8951 case GE_EXPR:
8952 case LE_EXPR:
8953 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8954 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8955 return constant_boolean_node (1, type);
8956 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8958 case NE_EXPR:
8959 /* For NE, we can only do this simplification if integer
8960 or we don't honor IEEE floating point NaNs. */
8961 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8962 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8963 break;
8964 /* ... fall through ... */
8965 case GT_EXPR:
8966 case LT_EXPR:
8967 return constant_boolean_node (0, type);
8968 default:
8969 gcc_unreachable ();
8973 /* If we are comparing an expression that just has comparisons
8974 of two integer values, arithmetic expressions of those comparisons,
8975 and constants, we can simplify it. There are only three cases
8976 to check: the two values can either be equal, the first can be
8977 greater, or the second can be greater. Fold the expression for
8978 those three values. Since each value must be 0 or 1, we have
8979 eight possibilities, each of which corresponds to the constant 0
8980 or 1 or one of the six possible comparisons.
8982 This handles common cases like (a > b) == 0 but also handles
8983 expressions like ((x > y) - (y > x)) > 0, which supposedly
8984 occur in macroized code. */
8986 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8988 tree cval1 = 0, cval2 = 0;
8989 int save_p = 0;
8991 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8992 /* Don't handle degenerate cases here; they should already
8993 have been handled anyway. */
8994 && cval1 != 0 && cval2 != 0
8995 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8996 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8997 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8998 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8999 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9000 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9001 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9003 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9004 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9006 /* We can't just pass T to eval_subst in case cval1 or cval2
9007 was the same as ARG1. */
9009 tree high_result
9010 = fold_build2 (code, type,
9011 eval_subst (arg0, cval1, maxval,
9012 cval2, minval),
9013 arg1);
9014 tree equal_result
9015 = fold_build2 (code, type,
9016 eval_subst (arg0, cval1, maxval,
9017 cval2, maxval),
9018 arg1);
9019 tree low_result
9020 = fold_build2 (code, type,
9021 eval_subst (arg0, cval1, minval,
9022 cval2, maxval),
9023 arg1);
9025 /* All three of these results should be 0 or 1. Confirm they are.
9026 Then use those values to select the proper code to use. */
9028 if (TREE_CODE (high_result) == INTEGER_CST
9029 && TREE_CODE (equal_result) == INTEGER_CST
9030 && TREE_CODE (low_result) == INTEGER_CST)
9032 /* Make a 3-bit mask with the high-order bit being the
9033 value for `>', the next for '=', and the low for '<'. */
9034 switch ((integer_onep (high_result) * 4)
9035 + (integer_onep (equal_result) * 2)
9036 + integer_onep (low_result))
9038 case 0:
9039 /* Always false. */
9040 return omit_one_operand (type, integer_zero_node, arg0);
9041 case 1:
9042 code = LT_EXPR;
9043 break;
9044 case 2:
9045 code = EQ_EXPR;
9046 break;
9047 case 3:
9048 code = LE_EXPR;
9049 break;
9050 case 4:
9051 code = GT_EXPR;
9052 break;
9053 case 5:
9054 code = NE_EXPR;
9055 break;
9056 case 6:
9057 code = GE_EXPR;
9058 break;
9059 case 7:
9060 /* Always true. */
9061 return omit_one_operand (type, integer_one_node, arg0);
9064 if (save_p)
9065 return save_expr (build2 (code, type, cval1, cval2));
9066 return fold_build2 (code, type, cval1, cval2);
9071 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9072 into a single range test. */
9073 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9074 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9075 && TREE_CODE (arg1) == INTEGER_CST
9076 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9077 && !integer_zerop (TREE_OPERAND (arg0, 1))
9078 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9079 && !TREE_OVERFLOW (arg1))
9081 tem = fold_div_compare (code, type, arg0, arg1);
9082 if (tem != NULL_TREE)
9083 return tem;
9086 /* Fold ~X op ~Y as Y op X. */
9087 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9088 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9090 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9091 return fold_build2 (code, type,
9092 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9093 TREE_OPERAND (arg0, 0));
9096 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9097 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9098 && TREE_CODE (arg1) == INTEGER_CST)
9100 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9101 return fold_build2 (swap_tree_comparison (code), type,
9102 TREE_OPERAND (arg0, 0),
9103 fold_build1 (BIT_NOT_EXPR, cmp_type,
9104 fold_convert (cmp_type, arg1)));
9107 return NULL_TREE;
9111 /* Subroutine of fold_binary. Optimize complex multiplications of the
9112 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9113 argument EXPR represents the expression "z" of type TYPE. */
9115 static tree
9116 fold_mult_zconjz (tree type, tree expr)
9118 tree itype = TREE_TYPE (type);
9119 tree rpart, ipart, tem;
9121 if (TREE_CODE (expr) == COMPLEX_EXPR)
9123 rpart = TREE_OPERAND (expr, 0);
9124 ipart = TREE_OPERAND (expr, 1);
9126 else if (TREE_CODE (expr) == COMPLEX_CST)
9128 rpart = TREE_REALPART (expr);
9129 ipart = TREE_IMAGPART (expr);
9131 else
9133 expr = save_expr (expr);
9134 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9135 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9138 rpart = save_expr (rpart);
9139 ipart = save_expr (ipart);
9140 tem = fold_build2 (PLUS_EXPR, itype,
9141 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9142 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9143 return fold_build2 (COMPLEX_EXPR, type, tem,
9144 fold_convert (itype, integer_zero_node));
9148 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9149 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9150 guarantees that P and N have the same least significant log2(M) bits.
9151 N is not otherwise constrained. In particular, N is not normalized to
9152 0 <= N < M as is common. In general, the precise value of P is unknown.
9153 M is chosen as large as possible such that constant N can be determined.
9155 Returns M and sets *RESIDUE to N. */
9157 static unsigned HOST_WIDE_INT
9158 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9160 enum tree_code code;
9162 *residue = 0;
9164 code = TREE_CODE (expr);
9165 if (code == ADDR_EXPR)
9167 expr = TREE_OPERAND (expr, 0);
9168 if (handled_component_p (expr))
9170 HOST_WIDE_INT bitsize, bitpos;
9171 tree offset;
9172 enum machine_mode mode;
9173 int unsignedp, volatilep;
9175 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9176 &mode, &unsignedp, &volatilep, false);
9177 *residue = bitpos / BITS_PER_UNIT;
9178 if (offset)
9180 if (TREE_CODE (offset) == INTEGER_CST)
9181 *residue += TREE_INT_CST_LOW (offset);
9182 else
9183 /* We don't handle more complicated offset expressions. */
9184 return 1;
9188 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9189 return DECL_ALIGN_UNIT (expr);
9191 else if (code == POINTER_PLUS_EXPR)
9193 tree op0, op1;
9194 unsigned HOST_WIDE_INT modulus;
9195 enum tree_code inner_code;
9197 op0 = TREE_OPERAND (expr, 0);
9198 STRIP_NOPS (op0);
9199 modulus = get_pointer_modulus_and_residue (op0, residue);
9201 op1 = TREE_OPERAND (expr, 1);
9202 STRIP_NOPS (op1);
9203 inner_code = TREE_CODE (op1);
9204 if (inner_code == INTEGER_CST)
9206 *residue += TREE_INT_CST_LOW (op1);
9207 return modulus;
9209 else if (inner_code == MULT_EXPR)
9211 op1 = TREE_OPERAND (op1, 1);
9212 if (TREE_CODE (op1) == INTEGER_CST)
9214 unsigned HOST_WIDE_INT align;
9216 /* Compute the greatest power-of-2 divisor of op1. */
9217 align = TREE_INT_CST_LOW (op1);
9218 align &= -align;
9220 /* If align is non-zero and less than *modulus, replace
9221 *modulus with align., If align is 0, then either op1 is 0
9222 or the greatest power-of-2 divisor of op1 doesn't fit in an
9223 unsigned HOST_WIDE_INT. In either case, no additional
9224 constraint is imposed. */
9225 if (align)
9226 modulus = MIN (modulus, align);
9228 return modulus;
9233 /* If we get here, we were unable to determine anything useful about the
9234 expression. */
9235 return 1;
9239 /* Fold a binary expression of code CODE and type TYPE with operands
9240 OP0 and OP1. Return the folded expression if folding is
9241 successful. Otherwise, return NULL_TREE. */
9243 tree
9244 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9246 enum tree_code_class kind = TREE_CODE_CLASS (code);
9247 tree arg0, arg1, tem;
9248 tree t1 = NULL_TREE;
9249 bool strict_overflow_p;
9251 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9252 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9253 && TREE_CODE_LENGTH (code) == 2
9254 && op0 != NULL_TREE
9255 && op1 != NULL_TREE);
9257 arg0 = op0;
9258 arg1 = op1;
9260 /* Strip any conversions that don't change the mode. This is
9261 safe for every expression, except for a comparison expression
9262 because its signedness is derived from its operands. So, in
9263 the latter case, only strip conversions that don't change the
9264 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9265 preserved.
9267 Note that this is done as an internal manipulation within the
9268 constant folder, in order to find the simplest representation
9269 of the arguments so that their form can be studied. In any
9270 cases, the appropriate type conversions should be put back in
9271 the tree that will get out of the constant folder. */
9273 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9275 STRIP_SIGN_NOPS (arg0);
9276 STRIP_SIGN_NOPS (arg1);
9278 else
9280 STRIP_NOPS (arg0);
9281 STRIP_NOPS (arg1);
9284 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9285 constant but we can't do arithmetic on them. */
9286 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9287 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9288 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9289 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9290 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9291 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9293 if (kind == tcc_binary)
9295 /* Make sure type and arg0 have the same saturating flag. */
9296 gcc_assert (TYPE_SATURATING (type)
9297 == TYPE_SATURATING (TREE_TYPE (arg0)));
9298 tem = const_binop (code, arg0, arg1, 0);
9300 else if (kind == tcc_comparison)
9301 tem = fold_relational_const (code, type, arg0, arg1);
9302 else
9303 tem = NULL_TREE;
9305 if (tem != NULL_TREE)
9307 if (TREE_TYPE (tem) != type)
9308 tem = fold_convert (type, tem);
9309 return tem;
9313 /* If this is a commutative operation, and ARG0 is a constant, move it
9314 to ARG1 to reduce the number of tests below. */
9315 if (commutative_tree_code (code)
9316 && tree_swap_operands_p (arg0, arg1, true))
9317 return fold_build2 (code, type, op1, op0);
9319 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9321 First check for cases where an arithmetic operation is applied to a
9322 compound, conditional, or comparison operation. Push the arithmetic
9323 operation inside the compound or conditional to see if any folding
9324 can then be done. Convert comparison to conditional for this purpose.
9325 The also optimizes non-constant cases that used to be done in
9326 expand_expr.
9328 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9329 one of the operands is a comparison and the other is a comparison, a
9330 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9331 code below would make the expression more complex. Change it to a
9332 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9333 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9335 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9336 || code == EQ_EXPR || code == NE_EXPR)
9337 && ((truth_value_p (TREE_CODE (arg0))
9338 && (truth_value_p (TREE_CODE (arg1))
9339 || (TREE_CODE (arg1) == BIT_AND_EXPR
9340 && integer_onep (TREE_OPERAND (arg1, 1)))))
9341 || (truth_value_p (TREE_CODE (arg1))
9342 && (truth_value_p (TREE_CODE (arg0))
9343 || (TREE_CODE (arg0) == BIT_AND_EXPR
9344 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9346 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9347 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9348 : TRUTH_XOR_EXPR,
9349 boolean_type_node,
9350 fold_convert (boolean_type_node, arg0),
9351 fold_convert (boolean_type_node, arg1));
9353 if (code == EQ_EXPR)
9354 tem = invert_truthvalue (tem);
9356 return fold_convert (type, tem);
9359 if (TREE_CODE_CLASS (code) == tcc_binary
9360 || TREE_CODE_CLASS (code) == tcc_comparison)
9362 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9363 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9364 fold_build2 (code, type,
9365 fold_convert (TREE_TYPE (op0),
9366 TREE_OPERAND (arg0, 1)),
9367 op1));
9368 if (TREE_CODE (arg1) == COMPOUND_EXPR
9369 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9370 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9371 fold_build2 (code, type, op0,
9372 fold_convert (TREE_TYPE (op1),
9373 TREE_OPERAND (arg1, 1))));
9375 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9377 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9378 arg0, arg1,
9379 /*cond_first_p=*/1);
9380 if (tem != NULL_TREE)
9381 return tem;
9384 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9386 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9387 arg1, arg0,
9388 /*cond_first_p=*/0);
9389 if (tem != NULL_TREE)
9390 return tem;
9394 switch (code)
9396 case POINTER_PLUS_EXPR:
9397 /* 0 +p index -> (type)index */
9398 if (integer_zerop (arg0))
9399 return non_lvalue (fold_convert (type, arg1));
9401 /* PTR +p 0 -> PTR */
9402 if (integer_zerop (arg1))
9403 return non_lvalue (fold_convert (type, arg0));
9405 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9406 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9407 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9408 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9409 fold_convert (sizetype, arg1),
9410 fold_convert (sizetype, arg0)));
9412 /* index +p PTR -> PTR +p index */
9413 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9414 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9415 return fold_build2 (POINTER_PLUS_EXPR, type,
9416 fold_convert (type, arg1),
9417 fold_convert (sizetype, arg0));
9419 /* (PTR +p B) +p A -> PTR +p (B + A) */
9420 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9422 tree inner;
9423 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9424 tree arg00 = TREE_OPERAND (arg0, 0);
9425 inner = fold_build2 (PLUS_EXPR, sizetype,
9426 arg01, fold_convert (sizetype, arg1));
9427 return fold_convert (type,
9428 fold_build2 (POINTER_PLUS_EXPR,
9429 TREE_TYPE (arg00), arg00, inner));
9432 /* PTR_CST +p CST -> CST1 */
9433 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9434 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9436 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9437 of the array. Loop optimizer sometimes produce this type of
9438 expressions. */
9439 if (TREE_CODE (arg0) == ADDR_EXPR)
9441 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9442 if (tem)
9443 return fold_convert (type, tem);
9446 return NULL_TREE;
9448 case PLUS_EXPR:
9449 /* PTR + INT -> (INT)(PTR p+ INT) */
9450 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9451 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9452 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9453 TREE_TYPE (arg0),
9454 arg0,
9455 fold_convert (sizetype, arg1)));
9456 /* INT + PTR -> (INT)(PTR p+ INT) */
9457 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9458 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9459 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9460 TREE_TYPE (arg1),
9461 arg1,
9462 fold_convert (sizetype, arg0)));
9463 /* A + (-B) -> A - B */
9464 if (TREE_CODE (arg1) == NEGATE_EXPR)
9465 return fold_build2 (MINUS_EXPR, type,
9466 fold_convert (type, arg0),
9467 fold_convert (type, TREE_OPERAND (arg1, 0)));
9468 /* (-A) + B -> B - A */
9469 if (TREE_CODE (arg0) == NEGATE_EXPR
9470 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9471 return fold_build2 (MINUS_EXPR, type,
9472 fold_convert (type, arg1),
9473 fold_convert (type, TREE_OPERAND (arg0, 0)));
9475 if (INTEGRAL_TYPE_P (type))
9477 /* Convert ~A + 1 to -A. */
9478 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9479 && integer_onep (arg1))
9480 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9482 /* ~X + X is -1. */
9483 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9484 && !TYPE_OVERFLOW_TRAPS (type))
9486 tree tem = TREE_OPERAND (arg0, 0);
9488 STRIP_NOPS (tem);
9489 if (operand_equal_p (tem, arg1, 0))
9491 t1 = build_int_cst_type (type, -1);
9492 return omit_one_operand (type, t1, arg1);
9496 /* X + ~X is -1. */
9497 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9498 && !TYPE_OVERFLOW_TRAPS (type))
9500 tree tem = TREE_OPERAND (arg1, 0);
9502 STRIP_NOPS (tem);
9503 if (operand_equal_p (arg0, tem, 0))
9505 t1 = build_int_cst_type (type, -1);
9506 return omit_one_operand (type, t1, arg0);
9510 /* X + (X / CST) * -CST is X % CST. */
9511 if (TREE_CODE (arg1) == MULT_EXPR
9512 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9513 && operand_equal_p (arg0,
9514 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9516 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9517 tree cst1 = TREE_OPERAND (arg1, 1);
9518 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9519 if (sum && integer_zerop (sum))
9520 return fold_convert (type,
9521 fold_build2 (TRUNC_MOD_EXPR,
9522 TREE_TYPE (arg0), arg0, cst0));
9526 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9527 same or one. Make sure type is not saturating.
9528 fold_plusminus_mult_expr will re-associate. */
9529 if ((TREE_CODE (arg0) == MULT_EXPR
9530 || TREE_CODE (arg1) == MULT_EXPR)
9531 && !TYPE_SATURATING (type)
9532 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9534 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9535 if (tem)
9536 return tem;
9539 if (! FLOAT_TYPE_P (type))
9541 if (integer_zerop (arg1))
9542 return non_lvalue (fold_convert (type, arg0));
9544 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9545 with a constant, and the two constants have no bits in common,
9546 we should treat this as a BIT_IOR_EXPR since this may produce more
9547 simplifications. */
9548 if (TREE_CODE (arg0) == BIT_AND_EXPR
9549 && TREE_CODE (arg1) == BIT_AND_EXPR
9550 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9551 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9552 && integer_zerop (const_binop (BIT_AND_EXPR,
9553 TREE_OPERAND (arg0, 1),
9554 TREE_OPERAND (arg1, 1), 0)))
9556 code = BIT_IOR_EXPR;
9557 goto bit_ior;
9560 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9561 (plus (plus (mult) (mult)) (foo)) so that we can
9562 take advantage of the factoring cases below. */
9563 if (((TREE_CODE (arg0) == PLUS_EXPR
9564 || TREE_CODE (arg0) == MINUS_EXPR)
9565 && TREE_CODE (arg1) == MULT_EXPR)
9566 || ((TREE_CODE (arg1) == PLUS_EXPR
9567 || TREE_CODE (arg1) == MINUS_EXPR)
9568 && TREE_CODE (arg0) == MULT_EXPR))
9570 tree parg0, parg1, parg, marg;
9571 enum tree_code pcode;
9573 if (TREE_CODE (arg1) == MULT_EXPR)
9574 parg = arg0, marg = arg1;
9575 else
9576 parg = arg1, marg = arg0;
9577 pcode = TREE_CODE (parg);
9578 parg0 = TREE_OPERAND (parg, 0);
9579 parg1 = TREE_OPERAND (parg, 1);
9580 STRIP_NOPS (parg0);
9581 STRIP_NOPS (parg1);
9583 if (TREE_CODE (parg0) == MULT_EXPR
9584 && TREE_CODE (parg1) != MULT_EXPR)
9585 return fold_build2 (pcode, type,
9586 fold_build2 (PLUS_EXPR, type,
9587 fold_convert (type, parg0),
9588 fold_convert (type, marg)),
9589 fold_convert (type, parg1));
9590 if (TREE_CODE (parg0) != MULT_EXPR
9591 && TREE_CODE (parg1) == MULT_EXPR)
9592 return fold_build2 (PLUS_EXPR, type,
9593 fold_convert (type, parg0),
9594 fold_build2 (pcode, type,
9595 fold_convert (type, marg),
9596 fold_convert (type,
9597 parg1)));
9600 else
9602 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9603 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9604 return non_lvalue (fold_convert (type, arg0));
9606 /* Likewise if the operands are reversed. */
9607 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9608 return non_lvalue (fold_convert (type, arg1));
9610 /* Convert X + -C into X - C. */
9611 if (TREE_CODE (arg1) == REAL_CST
9612 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9614 tem = fold_negate_const (arg1, type);
9615 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9616 return fold_build2 (MINUS_EXPR, type,
9617 fold_convert (type, arg0),
9618 fold_convert (type, tem));
9621 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9622 to __complex__ ( x, y ). This is not the same for SNaNs or
9623 if signed zeros are involved. */
9624 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9625 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9626 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9628 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9629 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9630 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9631 bool arg0rz = false, arg0iz = false;
9632 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9633 || (arg0i && (arg0iz = real_zerop (arg0i))))
9635 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9636 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9637 if (arg0rz && arg1i && real_zerop (arg1i))
9639 tree rp = arg1r ? arg1r
9640 : build1 (REALPART_EXPR, rtype, arg1);
9641 tree ip = arg0i ? arg0i
9642 : build1 (IMAGPART_EXPR, rtype, arg0);
9643 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9645 else if (arg0iz && arg1r && real_zerop (arg1r))
9647 tree rp = arg0r ? arg0r
9648 : build1 (REALPART_EXPR, rtype, arg0);
9649 tree ip = arg1i ? arg1i
9650 : build1 (IMAGPART_EXPR, rtype, arg1);
9651 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9656 if (flag_unsafe_math_optimizations
9657 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9658 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9659 && (tem = distribute_real_division (code, type, arg0, arg1)))
9660 return tem;
9662 /* Convert x+x into x*2.0. */
9663 if (operand_equal_p (arg0, arg1, 0)
9664 && SCALAR_FLOAT_TYPE_P (type))
9665 return fold_build2 (MULT_EXPR, type, arg0,
9666 build_real (type, dconst2));
9668 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9669 We associate floats only if the user has specified
9670 -fassociative-math. */
9671 if (flag_associative_math
9672 && TREE_CODE (arg1) == PLUS_EXPR
9673 && TREE_CODE (arg0) != MULT_EXPR)
9675 tree tree10 = TREE_OPERAND (arg1, 0);
9676 tree tree11 = TREE_OPERAND (arg1, 1);
9677 if (TREE_CODE (tree11) == MULT_EXPR
9678 && TREE_CODE (tree10) == MULT_EXPR)
9680 tree tree0;
9681 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9682 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9685 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9686 We associate floats only if the user has specified
9687 -fassociative-math. */
9688 if (flag_associative_math
9689 && TREE_CODE (arg0) == PLUS_EXPR
9690 && TREE_CODE (arg1) != MULT_EXPR)
9692 tree tree00 = TREE_OPERAND (arg0, 0);
9693 tree tree01 = TREE_OPERAND (arg0, 1);
9694 if (TREE_CODE (tree01) == MULT_EXPR
9695 && TREE_CODE (tree00) == MULT_EXPR)
9697 tree tree0;
9698 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9699 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9704 bit_rotate:
9705 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9706 is a rotate of A by C1 bits. */
9707 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9708 is a rotate of A by B bits. */
9710 enum tree_code code0, code1;
9711 tree rtype;
9712 code0 = TREE_CODE (arg0);
9713 code1 = TREE_CODE (arg1);
9714 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9715 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9716 && operand_equal_p (TREE_OPERAND (arg0, 0),
9717 TREE_OPERAND (arg1, 0), 0)
9718 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9719 TYPE_UNSIGNED (rtype))
9720 /* Only create rotates in complete modes. Other cases are not
9721 expanded properly. */
9722 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9724 tree tree01, tree11;
9725 enum tree_code code01, code11;
9727 tree01 = TREE_OPERAND (arg0, 1);
9728 tree11 = TREE_OPERAND (arg1, 1);
9729 STRIP_NOPS (tree01);
9730 STRIP_NOPS (tree11);
9731 code01 = TREE_CODE (tree01);
9732 code11 = TREE_CODE (tree11);
9733 if (code01 == INTEGER_CST
9734 && code11 == INTEGER_CST
9735 && TREE_INT_CST_HIGH (tree01) == 0
9736 && TREE_INT_CST_HIGH (tree11) == 0
9737 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9738 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9739 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9740 code0 == LSHIFT_EXPR ? tree01 : tree11);
9741 else if (code11 == MINUS_EXPR)
9743 tree tree110, tree111;
9744 tree110 = TREE_OPERAND (tree11, 0);
9745 tree111 = TREE_OPERAND (tree11, 1);
9746 STRIP_NOPS (tree110);
9747 STRIP_NOPS (tree111);
9748 if (TREE_CODE (tree110) == INTEGER_CST
9749 && 0 == compare_tree_int (tree110,
9750 TYPE_PRECISION
9751 (TREE_TYPE (TREE_OPERAND
9752 (arg0, 0))))
9753 && operand_equal_p (tree01, tree111, 0))
9754 return build2 ((code0 == LSHIFT_EXPR
9755 ? LROTATE_EXPR
9756 : RROTATE_EXPR),
9757 type, TREE_OPERAND (arg0, 0), tree01);
9759 else if (code01 == MINUS_EXPR)
9761 tree tree010, tree011;
9762 tree010 = TREE_OPERAND (tree01, 0);
9763 tree011 = TREE_OPERAND (tree01, 1);
9764 STRIP_NOPS (tree010);
9765 STRIP_NOPS (tree011);
9766 if (TREE_CODE (tree010) == INTEGER_CST
9767 && 0 == compare_tree_int (tree010,
9768 TYPE_PRECISION
9769 (TREE_TYPE (TREE_OPERAND
9770 (arg0, 0))))
9771 && operand_equal_p (tree11, tree011, 0))
9772 return build2 ((code0 != LSHIFT_EXPR
9773 ? LROTATE_EXPR
9774 : RROTATE_EXPR),
9775 type, TREE_OPERAND (arg0, 0), tree11);
9780 associate:
9781 /* In most languages, can't associate operations on floats through
9782 parentheses. Rather than remember where the parentheses were, we
9783 don't associate floats at all, unless the user has specified
9784 -fassociative-math.
9785 And, we need to make sure type is not saturating. */
9787 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9788 && !TYPE_SATURATING (type))
9790 tree var0, con0, lit0, minus_lit0;
9791 tree var1, con1, lit1, minus_lit1;
9792 bool ok = true;
9794 /* Split both trees into variables, constants, and literals. Then
9795 associate each group together, the constants with literals,
9796 then the result with variables. This increases the chances of
9797 literals being recombined later and of generating relocatable
9798 expressions for the sum of a constant and literal. */
9799 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9800 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9801 code == MINUS_EXPR);
9803 /* With undefined overflow we can only associate constants
9804 with one variable. */
9805 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9806 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9807 && var0 && var1)
9809 tree tmp0 = var0;
9810 tree tmp1 = var1;
9812 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9813 tmp0 = TREE_OPERAND (tmp0, 0);
9814 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9815 tmp1 = TREE_OPERAND (tmp1, 0);
9816 /* The only case we can still associate with two variables
9817 is if they are the same, modulo negation. */
9818 if (!operand_equal_p (tmp0, tmp1, 0))
9819 ok = false;
9822 /* Only do something if we found more than two objects. Otherwise,
9823 nothing has changed and we risk infinite recursion. */
9824 if (ok
9825 && (2 < ((var0 != 0) + (var1 != 0)
9826 + (con0 != 0) + (con1 != 0)
9827 + (lit0 != 0) + (lit1 != 0)
9828 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9830 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9831 if (code == MINUS_EXPR)
9832 code = PLUS_EXPR;
9834 var0 = associate_trees (var0, var1, code, type);
9835 con0 = associate_trees (con0, con1, code, type);
9836 lit0 = associate_trees (lit0, lit1, code, type);
9837 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9839 /* Preserve the MINUS_EXPR if the negative part of the literal is
9840 greater than the positive part. Otherwise, the multiplicative
9841 folding code (i.e extract_muldiv) may be fooled in case
9842 unsigned constants are subtracted, like in the following
9843 example: ((X*2 + 4) - 8U)/2. */
9844 if (minus_lit0 && lit0)
9846 if (TREE_CODE (lit0) == INTEGER_CST
9847 && TREE_CODE (minus_lit0) == INTEGER_CST
9848 && tree_int_cst_lt (lit0, minus_lit0))
9850 minus_lit0 = associate_trees (minus_lit0, lit0,
9851 MINUS_EXPR, type);
9852 lit0 = 0;
9854 else
9856 lit0 = associate_trees (lit0, minus_lit0,
9857 MINUS_EXPR, type);
9858 minus_lit0 = 0;
9861 if (minus_lit0)
9863 if (con0 == 0)
9864 return fold_convert (type,
9865 associate_trees (var0, minus_lit0,
9866 MINUS_EXPR, type));
9867 else
9869 con0 = associate_trees (con0, minus_lit0,
9870 MINUS_EXPR, type);
9871 return fold_convert (type,
9872 associate_trees (var0, con0,
9873 PLUS_EXPR, type));
9877 con0 = associate_trees (con0, lit0, code, type);
9878 return fold_convert (type, associate_trees (var0, con0,
9879 code, type));
9883 return NULL_TREE;
9885 case MINUS_EXPR:
9886 /* Pointer simplifications for subtraction, simple reassociations. */
9887 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9889 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9890 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9891 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9893 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9894 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9895 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9896 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9897 return fold_build2 (PLUS_EXPR, type,
9898 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9899 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9901 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9902 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9904 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9905 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9906 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9907 if (tmp)
9908 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9911 /* A - (-B) -> A + B */
9912 if (TREE_CODE (arg1) == NEGATE_EXPR)
9913 return fold_build2 (PLUS_EXPR, type, op0,
9914 fold_convert (type, TREE_OPERAND (arg1, 0)));
9915 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9916 if (TREE_CODE (arg0) == NEGATE_EXPR
9917 && (FLOAT_TYPE_P (type)
9918 || INTEGRAL_TYPE_P (type))
9919 && negate_expr_p (arg1)
9920 && reorder_operands_p (arg0, arg1))
9921 return fold_build2 (MINUS_EXPR, type,
9922 fold_convert (type, negate_expr (arg1)),
9923 fold_convert (type, TREE_OPERAND (arg0, 0)));
9924 /* Convert -A - 1 to ~A. */
9925 if (INTEGRAL_TYPE_P (type)
9926 && TREE_CODE (arg0) == NEGATE_EXPR
9927 && integer_onep (arg1)
9928 && !TYPE_OVERFLOW_TRAPS (type))
9929 return fold_build1 (BIT_NOT_EXPR, type,
9930 fold_convert (type, TREE_OPERAND (arg0, 0)));
9932 /* Convert -1 - A to ~A. */
9933 if (INTEGRAL_TYPE_P (type)
9934 && integer_all_onesp (arg0))
9935 return fold_build1 (BIT_NOT_EXPR, type, op1);
9938 /* X - (X / CST) * CST is X % CST. */
9939 if (INTEGRAL_TYPE_P (type)
9940 && TREE_CODE (arg1) == MULT_EXPR
9941 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9942 && operand_equal_p (arg0,
9943 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9944 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9945 TREE_OPERAND (arg1, 1), 0))
9946 return fold_convert (type,
9947 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9948 arg0, TREE_OPERAND (arg1, 1)));
9950 if (! FLOAT_TYPE_P (type))
9952 if (integer_zerop (arg0))
9953 return negate_expr (fold_convert (type, arg1));
9954 if (integer_zerop (arg1))
9955 return non_lvalue (fold_convert (type, arg0));
9957 /* Fold A - (A & B) into ~B & A. */
9958 if (!TREE_SIDE_EFFECTS (arg0)
9959 && TREE_CODE (arg1) == BIT_AND_EXPR)
9961 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9963 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9964 return fold_build2 (BIT_AND_EXPR, type,
9965 fold_build1 (BIT_NOT_EXPR, type, arg10),
9966 fold_convert (type, arg0));
9968 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9970 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9971 return fold_build2 (BIT_AND_EXPR, type,
9972 fold_build1 (BIT_NOT_EXPR, type, arg11),
9973 fold_convert (type, arg0));
9977 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9978 any power of 2 minus 1. */
9979 if (TREE_CODE (arg0) == BIT_AND_EXPR
9980 && TREE_CODE (arg1) == BIT_AND_EXPR
9981 && operand_equal_p (TREE_OPERAND (arg0, 0),
9982 TREE_OPERAND (arg1, 0), 0))
9984 tree mask0 = TREE_OPERAND (arg0, 1);
9985 tree mask1 = TREE_OPERAND (arg1, 1);
9986 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9988 if (operand_equal_p (tem, mask1, 0))
9990 tem = fold_build2 (BIT_XOR_EXPR, type,
9991 TREE_OPERAND (arg0, 0), mask1);
9992 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9997 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9998 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9999 return non_lvalue (fold_convert (type, arg0));
10001 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10002 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10003 (-ARG1 + ARG0) reduces to -ARG1. */
10004 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10005 return negate_expr (fold_convert (type, arg1));
10007 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10008 __complex__ ( x, -y ). This is not the same for SNaNs or if
10009 signed zeros are involved. */
10010 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10011 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10012 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10014 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10015 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10016 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10017 bool arg0rz = false, arg0iz = false;
10018 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10019 || (arg0i && (arg0iz = real_zerop (arg0i))))
10021 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10022 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10023 if (arg0rz && arg1i && real_zerop (arg1i))
10025 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10026 arg1r ? arg1r
10027 : build1 (REALPART_EXPR, rtype, arg1));
10028 tree ip = arg0i ? arg0i
10029 : build1 (IMAGPART_EXPR, rtype, arg0);
10030 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10032 else if (arg0iz && arg1r && real_zerop (arg1r))
10034 tree rp = arg0r ? arg0r
10035 : build1 (REALPART_EXPR, rtype, arg0);
10036 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10037 arg1i ? arg1i
10038 : build1 (IMAGPART_EXPR, rtype, arg1));
10039 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10044 /* Fold &x - &x. This can happen from &x.foo - &x.
10045 This is unsafe for certain floats even in non-IEEE formats.
10046 In IEEE, it is unsafe because it does wrong for NaNs.
10047 Also note that operand_equal_p is always false if an operand
10048 is volatile. */
10050 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10051 && operand_equal_p (arg0, arg1, 0))
10052 return fold_convert (type, integer_zero_node);
10054 /* A - B -> A + (-B) if B is easily negatable. */
10055 if (negate_expr_p (arg1)
10056 && ((FLOAT_TYPE_P (type)
10057 /* Avoid this transformation if B is a positive REAL_CST. */
10058 && (TREE_CODE (arg1) != REAL_CST
10059 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10060 || INTEGRAL_TYPE_P (type)))
10061 return fold_build2 (PLUS_EXPR, type,
10062 fold_convert (type, arg0),
10063 fold_convert (type, negate_expr (arg1)));
10065 /* Try folding difference of addresses. */
10067 HOST_WIDE_INT diff;
10069 if ((TREE_CODE (arg0) == ADDR_EXPR
10070 || TREE_CODE (arg1) == ADDR_EXPR)
10071 && ptr_difference_const (arg0, arg1, &diff))
10072 return build_int_cst_type (type, diff);
10075 /* Fold &a[i] - &a[j] to i-j. */
10076 if (TREE_CODE (arg0) == ADDR_EXPR
10077 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10078 && TREE_CODE (arg1) == ADDR_EXPR
10079 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10081 tree aref0 = TREE_OPERAND (arg0, 0);
10082 tree aref1 = TREE_OPERAND (arg1, 0);
10083 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10084 TREE_OPERAND (aref1, 0), 0))
10086 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10087 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10088 tree esz = array_ref_element_size (aref0);
10089 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10090 return fold_build2 (MULT_EXPR, type, diff,
10091 fold_convert (type, esz));
10096 if (flag_unsafe_math_optimizations
10097 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10098 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10099 && (tem = distribute_real_division (code, type, arg0, arg1)))
10100 return tem;
10102 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10103 same or one. Make sure type is not saturating.
10104 fold_plusminus_mult_expr will re-associate. */
10105 if ((TREE_CODE (arg0) == MULT_EXPR
10106 || TREE_CODE (arg1) == MULT_EXPR)
10107 && !TYPE_SATURATING (type)
10108 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10110 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10111 if (tem)
10112 return tem;
10115 goto associate;
10117 case MULT_EXPR:
10118 /* (-A) * (-B) -> A * B */
10119 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10120 return fold_build2 (MULT_EXPR, type,
10121 fold_convert (type, TREE_OPERAND (arg0, 0)),
10122 fold_convert (type, negate_expr (arg1)));
10123 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10124 return fold_build2 (MULT_EXPR, type,
10125 fold_convert (type, negate_expr (arg0)),
10126 fold_convert (type, TREE_OPERAND (arg1, 0)));
10128 if (! FLOAT_TYPE_P (type))
10130 if (integer_zerop (arg1))
10131 return omit_one_operand (type, arg1, arg0);
10132 if (integer_onep (arg1))
10133 return non_lvalue (fold_convert (type, arg0));
10134 /* Transform x * -1 into -x. Make sure to do the negation
10135 on the original operand with conversions not stripped
10136 because we can only strip non-sign-changing conversions. */
10137 if (integer_all_onesp (arg1))
10138 return fold_convert (type, negate_expr (op0));
10139 /* Transform x * -C into -x * C if x is easily negatable. */
10140 if (TREE_CODE (arg1) == INTEGER_CST
10141 && tree_int_cst_sgn (arg1) == -1
10142 && negate_expr_p (arg0)
10143 && (tem = negate_expr (arg1)) != arg1
10144 && !TREE_OVERFLOW (tem))
10145 return fold_build2 (MULT_EXPR, type,
10146 fold_convert (type, negate_expr (arg0)), tem);
10148 /* (a * (1 << b)) is (a << b) */
10149 if (TREE_CODE (arg1) == LSHIFT_EXPR
10150 && integer_onep (TREE_OPERAND (arg1, 0)))
10151 return fold_build2 (LSHIFT_EXPR, type, op0,
10152 TREE_OPERAND (arg1, 1));
10153 if (TREE_CODE (arg0) == LSHIFT_EXPR
10154 && integer_onep (TREE_OPERAND (arg0, 0)))
10155 return fold_build2 (LSHIFT_EXPR, type, op1,
10156 TREE_OPERAND (arg0, 1));
10158 /* (A + A) * C -> A * 2 * C */
10159 if (TREE_CODE (arg0) == PLUS_EXPR
10160 && TREE_CODE (arg1) == INTEGER_CST
10161 && operand_equal_p (TREE_OPERAND (arg0, 0),
10162 TREE_OPERAND (arg0, 1), 0))
10163 return fold_build2 (MULT_EXPR, type,
10164 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10165 TREE_OPERAND (arg0, 1)),
10166 fold_build2 (MULT_EXPR, type,
10167 build_int_cst (type, 2) , arg1));
10169 strict_overflow_p = false;
10170 if (TREE_CODE (arg1) == INTEGER_CST
10171 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10172 &strict_overflow_p)))
10174 if (strict_overflow_p)
10175 fold_overflow_warning (("assuming signed overflow does not "
10176 "occur when simplifying "
10177 "multiplication"),
10178 WARN_STRICT_OVERFLOW_MISC);
10179 return fold_convert (type, tem);
10182 /* Optimize z * conj(z) for integer complex numbers. */
10183 if (TREE_CODE (arg0) == CONJ_EXPR
10184 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10185 return fold_mult_zconjz (type, arg1);
10186 if (TREE_CODE (arg1) == CONJ_EXPR
10187 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10188 return fold_mult_zconjz (type, arg0);
10190 else
10192 /* Maybe fold x * 0 to 0. The expressions aren't the same
10193 when x is NaN, since x * 0 is also NaN. Nor are they the
10194 same in modes with signed zeros, since multiplying a
10195 negative value by 0 gives -0, not +0. */
10196 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10197 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10198 && real_zerop (arg1))
10199 return omit_one_operand (type, arg1, arg0);
10200 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10201 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10202 && real_onep (arg1))
10203 return non_lvalue (fold_convert (type, arg0));
10205 /* Transform x * -1.0 into -x. */
10206 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10207 && real_minus_onep (arg1))
10208 return fold_convert (type, negate_expr (arg0));
10210 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10211 the result for floating point types due to rounding so it is applied
10212 only if -fassociative-math was specify. */
10213 if (flag_associative_math
10214 && TREE_CODE (arg0) == RDIV_EXPR
10215 && TREE_CODE (arg1) == REAL_CST
10216 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10218 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10219 arg1, 0);
10220 if (tem)
10221 return fold_build2 (RDIV_EXPR, type, tem,
10222 TREE_OPERAND (arg0, 1));
10225 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10226 if (operand_equal_p (arg0, arg1, 0))
10228 tree tem = fold_strip_sign_ops (arg0);
10229 if (tem != NULL_TREE)
10231 tem = fold_convert (type, tem);
10232 return fold_build2 (MULT_EXPR, type, tem, tem);
10236 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10237 This is not the same for NaNs or if signed zeros are
10238 involved. */
10239 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10240 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10241 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10242 && TREE_CODE (arg1) == COMPLEX_CST
10243 && real_zerop (TREE_REALPART (arg1)))
10245 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10246 if (real_onep (TREE_IMAGPART (arg1)))
10247 return fold_build2 (COMPLEX_EXPR, type,
10248 negate_expr (fold_build1 (IMAGPART_EXPR,
10249 rtype, arg0)),
10250 fold_build1 (REALPART_EXPR, rtype, arg0));
10251 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10252 return fold_build2 (COMPLEX_EXPR, type,
10253 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10254 negate_expr (fold_build1 (REALPART_EXPR,
10255 rtype, arg0)));
10258 /* Optimize z * conj(z) for floating point complex numbers.
10259 Guarded by flag_unsafe_math_optimizations as non-finite
10260 imaginary components don't produce scalar results. */
10261 if (flag_unsafe_math_optimizations
10262 && TREE_CODE (arg0) == CONJ_EXPR
10263 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10264 return fold_mult_zconjz (type, arg1);
10265 if (flag_unsafe_math_optimizations
10266 && TREE_CODE (arg1) == CONJ_EXPR
10267 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10268 return fold_mult_zconjz (type, arg0);
10270 if (flag_unsafe_math_optimizations)
10272 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10273 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10275 /* Optimizations of root(...)*root(...). */
10276 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10278 tree rootfn, arg;
10279 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10280 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10282 /* Optimize sqrt(x)*sqrt(x) as x. */
10283 if (BUILTIN_SQRT_P (fcode0)
10284 && operand_equal_p (arg00, arg10, 0)
10285 && ! HONOR_SNANS (TYPE_MODE (type)))
10286 return arg00;
10288 /* Optimize root(x)*root(y) as root(x*y). */
10289 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10290 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10291 return build_call_expr (rootfn, 1, arg);
10294 /* Optimize expN(x)*expN(y) as expN(x+y). */
10295 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10297 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10298 tree arg = fold_build2 (PLUS_EXPR, type,
10299 CALL_EXPR_ARG (arg0, 0),
10300 CALL_EXPR_ARG (arg1, 0));
10301 return build_call_expr (expfn, 1, arg);
10304 /* Optimizations of pow(...)*pow(...). */
10305 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10306 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10307 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10309 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10310 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10311 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10312 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10314 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10315 if (operand_equal_p (arg01, arg11, 0))
10317 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10318 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10319 return build_call_expr (powfn, 2, arg, arg01);
10322 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10323 if (operand_equal_p (arg00, arg10, 0))
10325 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10326 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10327 return build_call_expr (powfn, 2, arg00, arg);
10331 /* Optimize tan(x)*cos(x) as sin(x). */
10332 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10333 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10334 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10335 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10336 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10337 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10338 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10339 CALL_EXPR_ARG (arg1, 0), 0))
10341 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10343 if (sinfn != NULL_TREE)
10344 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10347 /* Optimize x*pow(x,c) as pow(x,c+1). */
10348 if (fcode1 == BUILT_IN_POW
10349 || fcode1 == BUILT_IN_POWF
10350 || fcode1 == BUILT_IN_POWL)
10352 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10353 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10354 if (TREE_CODE (arg11) == REAL_CST
10355 && !TREE_OVERFLOW (arg11)
10356 && operand_equal_p (arg0, arg10, 0))
10358 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10359 REAL_VALUE_TYPE c;
10360 tree arg;
10362 c = TREE_REAL_CST (arg11);
10363 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10364 arg = build_real (type, c);
10365 return build_call_expr (powfn, 2, arg0, arg);
10369 /* Optimize pow(x,c)*x as pow(x,c+1). */
10370 if (fcode0 == BUILT_IN_POW
10371 || fcode0 == BUILT_IN_POWF
10372 || fcode0 == BUILT_IN_POWL)
10374 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10375 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10376 if (TREE_CODE (arg01) == REAL_CST
10377 && !TREE_OVERFLOW (arg01)
10378 && operand_equal_p (arg1, arg00, 0))
10380 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10381 REAL_VALUE_TYPE c;
10382 tree arg;
10384 c = TREE_REAL_CST (arg01);
10385 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10386 arg = build_real (type, c);
10387 return build_call_expr (powfn, 2, arg1, arg);
10391 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10392 if (! optimize_size
10393 && operand_equal_p (arg0, arg1, 0))
10395 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10397 if (powfn)
10399 tree arg = build_real (type, dconst2);
10400 return build_call_expr (powfn, 2, arg0, arg);
10405 goto associate;
10407 case BIT_IOR_EXPR:
10408 bit_ior:
10409 if (integer_all_onesp (arg1))
10410 return omit_one_operand (type, arg1, arg0);
10411 if (integer_zerop (arg1))
10412 return non_lvalue (fold_convert (type, arg0));
10413 if (operand_equal_p (arg0, arg1, 0))
10414 return non_lvalue (fold_convert (type, arg0));
10416 /* ~X | X is -1. */
10417 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10420 t1 = fold_convert (type, integer_zero_node);
10421 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10422 return omit_one_operand (type, t1, arg1);
10425 /* X | ~X is -1. */
10426 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10427 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10429 t1 = fold_convert (type, integer_zero_node);
10430 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10431 return omit_one_operand (type, t1, arg0);
10434 /* Canonicalize (X & C1) | C2. */
10435 if (TREE_CODE (arg0) == BIT_AND_EXPR
10436 && TREE_CODE (arg1) == INTEGER_CST
10437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10439 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10440 int width = TYPE_PRECISION (type), w;
10441 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10442 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10443 hi2 = TREE_INT_CST_HIGH (arg1);
10444 lo2 = TREE_INT_CST_LOW (arg1);
10446 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10447 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10448 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10450 if (width > HOST_BITS_PER_WIDE_INT)
10452 mhi = (unsigned HOST_WIDE_INT) -1
10453 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10454 mlo = -1;
10456 else
10458 mhi = 0;
10459 mlo = (unsigned HOST_WIDE_INT) -1
10460 >> (HOST_BITS_PER_WIDE_INT - width);
10463 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10464 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10465 return fold_build2 (BIT_IOR_EXPR, type,
10466 TREE_OPERAND (arg0, 0), arg1);
10468 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10469 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10470 mode which allows further optimizations. */
10471 hi1 &= mhi;
10472 lo1 &= mlo;
10473 hi2 &= mhi;
10474 lo2 &= mlo;
10475 hi3 = hi1 & ~hi2;
10476 lo3 = lo1 & ~lo2;
10477 for (w = BITS_PER_UNIT;
10478 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10479 w <<= 1)
10481 unsigned HOST_WIDE_INT mask
10482 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10483 if (((lo1 | lo2) & mask) == mask
10484 && (lo1 & ~mask) == 0 && hi1 == 0)
10486 hi3 = 0;
10487 lo3 = mask;
10488 break;
10491 if (hi3 != hi1 || lo3 != lo1)
10492 return fold_build2 (BIT_IOR_EXPR, type,
10493 fold_build2 (BIT_AND_EXPR, type,
10494 TREE_OPERAND (arg0, 0),
10495 build_int_cst_wide (type,
10496 lo3, hi3)),
10497 arg1);
10500 /* (X & Y) | Y is (X, Y). */
10501 if (TREE_CODE (arg0) == BIT_AND_EXPR
10502 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10503 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10504 /* (X & Y) | X is (Y, X). */
10505 if (TREE_CODE (arg0) == BIT_AND_EXPR
10506 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10507 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10508 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10509 /* X | (X & Y) is (Y, X). */
10510 if (TREE_CODE (arg1) == BIT_AND_EXPR
10511 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10512 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10513 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10514 /* X | (Y & X) is (Y, X). */
10515 if (TREE_CODE (arg1) == BIT_AND_EXPR
10516 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10517 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10518 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10520 t1 = distribute_bit_expr (code, type, arg0, arg1);
10521 if (t1 != NULL_TREE)
10522 return t1;
10524 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10526 This results in more efficient code for machines without a NAND
10527 instruction. Combine will canonicalize to the first form
10528 which will allow use of NAND instructions provided by the
10529 backend if they exist. */
10530 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10531 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10533 return fold_build1 (BIT_NOT_EXPR, type,
10534 build2 (BIT_AND_EXPR, type,
10535 fold_convert (type,
10536 TREE_OPERAND (arg0, 0)),
10537 fold_convert (type,
10538 TREE_OPERAND (arg1, 0))));
10541 /* See if this can be simplified into a rotate first. If that
10542 is unsuccessful continue in the association code. */
10543 goto bit_rotate;
10545 case BIT_XOR_EXPR:
10546 if (integer_zerop (arg1))
10547 return non_lvalue (fold_convert (type, arg0));
10548 if (integer_all_onesp (arg1))
10549 return fold_build1 (BIT_NOT_EXPR, type, op0);
10550 if (operand_equal_p (arg0, arg1, 0))
10551 return omit_one_operand (type, integer_zero_node, arg0);
10553 /* ~X ^ X is -1. */
10554 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10555 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10557 t1 = fold_convert (type, integer_zero_node);
10558 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10559 return omit_one_operand (type, t1, arg1);
10562 /* X ^ ~X is -1. */
10563 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10564 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10566 t1 = fold_convert (type, integer_zero_node);
10567 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10568 return omit_one_operand (type, t1, arg0);
10571 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10572 with a constant, and the two constants have no bits in common,
10573 we should treat this as a BIT_IOR_EXPR since this may produce more
10574 simplifications. */
10575 if (TREE_CODE (arg0) == BIT_AND_EXPR
10576 && TREE_CODE (arg1) == BIT_AND_EXPR
10577 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10578 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10579 && integer_zerop (const_binop (BIT_AND_EXPR,
10580 TREE_OPERAND (arg0, 1),
10581 TREE_OPERAND (arg1, 1), 0)))
10583 code = BIT_IOR_EXPR;
10584 goto bit_ior;
10587 /* (X | Y) ^ X -> Y & ~ X*/
10588 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10589 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10591 tree t2 = TREE_OPERAND (arg0, 1);
10592 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10593 arg1);
10594 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10595 fold_convert (type, t1));
10596 return t1;
10599 /* (Y | X) ^ X -> Y & ~ X*/
10600 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10601 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10603 tree t2 = TREE_OPERAND (arg0, 0);
10604 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10605 arg1);
10606 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10607 fold_convert (type, t1));
10608 return t1;
10611 /* X ^ (X | Y) -> Y & ~ X*/
10612 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10613 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10615 tree t2 = TREE_OPERAND (arg1, 1);
10616 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10617 arg0);
10618 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10619 fold_convert (type, t1));
10620 return t1;
10623 /* X ^ (Y | X) -> Y & ~ X*/
10624 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10625 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10627 tree t2 = TREE_OPERAND (arg1, 0);
10628 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10629 arg0);
10630 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10631 fold_convert (type, t1));
10632 return t1;
10635 /* Convert ~X ^ ~Y to X ^ Y. */
10636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10637 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10638 return fold_build2 (code, type,
10639 fold_convert (type, TREE_OPERAND (arg0, 0)),
10640 fold_convert (type, TREE_OPERAND (arg1, 0)));
10642 /* Convert ~X ^ C to X ^ ~C. */
10643 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10644 && TREE_CODE (arg1) == INTEGER_CST)
10645 return fold_build2 (code, type,
10646 fold_convert (type, TREE_OPERAND (arg0, 0)),
10647 fold_build1 (BIT_NOT_EXPR, type, arg1));
10649 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10650 if (TREE_CODE (arg0) == BIT_AND_EXPR
10651 && integer_onep (TREE_OPERAND (arg0, 1))
10652 && integer_onep (arg1))
10653 return fold_build2 (EQ_EXPR, type, arg0,
10654 build_int_cst (TREE_TYPE (arg0), 0));
10656 /* Fold (X & Y) ^ Y as ~X & Y. */
10657 if (TREE_CODE (arg0) == BIT_AND_EXPR
10658 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10660 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10661 return fold_build2 (BIT_AND_EXPR, type,
10662 fold_build1 (BIT_NOT_EXPR, type, tem),
10663 fold_convert (type, arg1));
10665 /* Fold (X & Y) ^ X as ~Y & X. */
10666 if (TREE_CODE (arg0) == BIT_AND_EXPR
10667 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10668 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10670 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10671 return fold_build2 (BIT_AND_EXPR, type,
10672 fold_build1 (BIT_NOT_EXPR, type, tem),
10673 fold_convert (type, arg1));
10675 /* Fold X ^ (X & Y) as X & ~Y. */
10676 if (TREE_CODE (arg1) == BIT_AND_EXPR
10677 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10679 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10680 return fold_build2 (BIT_AND_EXPR, type,
10681 fold_convert (type, arg0),
10682 fold_build1 (BIT_NOT_EXPR, type, tem));
10684 /* Fold X ^ (Y & X) as ~Y & X. */
10685 if (TREE_CODE (arg1) == BIT_AND_EXPR
10686 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10687 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10689 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10690 return fold_build2 (BIT_AND_EXPR, type,
10691 fold_build1 (BIT_NOT_EXPR, type, tem),
10692 fold_convert (type, arg0));
10695 /* See if this can be simplified into a rotate first. If that
10696 is unsuccessful continue in the association code. */
10697 goto bit_rotate;
10699 case BIT_AND_EXPR:
10700 if (integer_all_onesp (arg1))
10701 return non_lvalue (fold_convert (type, arg0));
10702 if (integer_zerop (arg1))
10703 return omit_one_operand (type, arg1, arg0);
10704 if (operand_equal_p (arg0, arg1, 0))
10705 return non_lvalue (fold_convert (type, arg0));
10707 /* ~X & X is always zero. */
10708 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10709 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10710 return omit_one_operand (type, integer_zero_node, arg1);
10712 /* X & ~X is always zero. */
10713 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10714 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10715 return omit_one_operand (type, integer_zero_node, arg0);
10717 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10718 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10719 && TREE_CODE (arg1) == INTEGER_CST
10720 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10722 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10723 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10724 TREE_OPERAND (arg0, 0), tmp1);
10725 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10726 TREE_OPERAND (arg0, 1), tmp1);
10727 return fold_convert (type,
10728 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10729 tmp2, tmp3));
10732 /* (X | Y) & Y is (X, Y). */
10733 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10734 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10735 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10736 /* (X | Y) & X is (Y, X). */
10737 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10738 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10739 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10740 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10741 /* X & (X | Y) is (Y, X). */
10742 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10743 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10744 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10745 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10746 /* X & (Y | X) is (Y, X). */
10747 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10748 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10749 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10750 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10752 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10753 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10754 && integer_onep (TREE_OPERAND (arg0, 1))
10755 && integer_onep (arg1))
10757 tem = TREE_OPERAND (arg0, 0);
10758 return fold_build2 (EQ_EXPR, type,
10759 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10760 build_int_cst (TREE_TYPE (tem), 1)),
10761 build_int_cst (TREE_TYPE (tem), 0));
10763 /* Fold ~X & 1 as (X & 1) == 0. */
10764 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10765 && integer_onep (arg1))
10767 tem = TREE_OPERAND (arg0, 0);
10768 return fold_build2 (EQ_EXPR, type,
10769 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10770 build_int_cst (TREE_TYPE (tem), 1)),
10771 build_int_cst (TREE_TYPE (tem), 0));
10774 /* Fold (X ^ Y) & Y as ~X & Y. */
10775 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10776 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10778 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10779 return fold_build2 (BIT_AND_EXPR, type,
10780 fold_build1 (BIT_NOT_EXPR, type, tem),
10781 fold_convert (type, arg1));
10783 /* Fold (X ^ Y) & X as ~Y & X. */
10784 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10785 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10786 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10788 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10789 return fold_build2 (BIT_AND_EXPR, type,
10790 fold_build1 (BIT_NOT_EXPR, type, tem),
10791 fold_convert (type, arg1));
10793 /* Fold X & (X ^ Y) as X & ~Y. */
10794 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10795 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10797 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10798 return fold_build2 (BIT_AND_EXPR, type,
10799 fold_convert (type, arg0),
10800 fold_build1 (BIT_NOT_EXPR, type, tem));
10802 /* Fold X & (Y ^ X) as ~Y & X. */
10803 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10804 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10805 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10807 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10808 return fold_build2 (BIT_AND_EXPR, type,
10809 fold_build1 (BIT_NOT_EXPR, type, tem),
10810 fold_convert (type, arg0));
10813 t1 = distribute_bit_expr (code, type, arg0, arg1);
10814 if (t1 != NULL_TREE)
10815 return t1;
10816 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10817 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10818 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10820 unsigned int prec
10821 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10823 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10824 && (~TREE_INT_CST_LOW (arg1)
10825 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10826 return fold_convert (type, TREE_OPERAND (arg0, 0));
10829 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10831 This results in more efficient code for machines without a NOR
10832 instruction. Combine will canonicalize to the first form
10833 which will allow use of NOR instructions provided by the
10834 backend if they exist. */
10835 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10836 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10838 return fold_build1 (BIT_NOT_EXPR, type,
10839 build2 (BIT_IOR_EXPR, type,
10840 fold_convert (type,
10841 TREE_OPERAND (arg0, 0)),
10842 fold_convert (type,
10843 TREE_OPERAND (arg1, 0))));
10846 /* If arg0 is derived from the address of an object or function, we may
10847 be able to fold this expression using the object or function's
10848 alignment. */
10849 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10851 unsigned HOST_WIDE_INT modulus, residue;
10852 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10854 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10856 /* This works because modulus is a power of 2. If this weren't the
10857 case, we'd have to replace it by its greatest power-of-2
10858 divisor: modulus & -modulus. */
10859 if (low < modulus)
10860 return build_int_cst (type, residue & low);
10863 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10864 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10865 if the new mask might be further optimized. */
10866 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10867 || TREE_CODE (arg0) == RSHIFT_EXPR)
10868 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10869 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10870 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10871 < TYPE_PRECISION (TREE_TYPE (arg0))
10872 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10873 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10875 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10876 unsigned HOST_WIDE_INT mask
10877 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10878 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10879 tree shift_type = TREE_TYPE (arg0);
10881 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10882 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10883 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10884 && TYPE_PRECISION (TREE_TYPE (arg0))
10885 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10887 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10888 tree arg00 = TREE_OPERAND (arg0, 0);
10889 /* See if more bits can be proven as zero because of
10890 zero extension. */
10891 if (TREE_CODE (arg00) == NOP_EXPR
10892 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10894 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10895 if (TYPE_PRECISION (inner_type)
10896 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10897 && TYPE_PRECISION (inner_type) < prec)
10899 prec = TYPE_PRECISION (inner_type);
10900 /* See if we can shorten the right shift. */
10901 if (shiftc < prec)
10902 shift_type = inner_type;
10905 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10906 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10907 zerobits <<= prec - shiftc;
10908 /* For arithmetic shift if sign bit could be set, zerobits
10909 can contain actually sign bits, so no transformation is
10910 possible, unless MASK masks them all away. In that
10911 case the shift needs to be converted into logical shift. */
10912 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10913 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10915 if ((mask & zerobits) == 0)
10916 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10917 else
10918 zerobits = 0;
10922 /* ((X << 16) & 0xff00) is (X, 0). */
10923 if ((mask & zerobits) == mask)
10924 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10926 newmask = mask | zerobits;
10927 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10929 unsigned int prec;
10931 /* Only do the transformation if NEWMASK is some integer
10932 mode's mask. */
10933 for (prec = BITS_PER_UNIT;
10934 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10935 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10936 break;
10937 if (prec < HOST_BITS_PER_WIDE_INT
10938 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10940 if (shift_type != TREE_TYPE (arg0))
10942 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10943 fold_convert (shift_type,
10944 TREE_OPERAND (arg0, 0)),
10945 TREE_OPERAND (arg0, 1));
10946 tem = fold_convert (type, tem);
10948 else
10949 tem = op0;
10950 return fold_build2 (BIT_AND_EXPR, type, tem,
10951 build_int_cst_type (TREE_TYPE (op1),
10952 newmask));
10957 goto associate;
10959 case RDIV_EXPR:
10960 /* Don't touch a floating-point divide by zero unless the mode
10961 of the constant can represent infinity. */
10962 if (TREE_CODE (arg1) == REAL_CST
10963 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10964 && real_zerop (arg1))
10965 return NULL_TREE;
10967 /* Optimize A / A to 1.0 if we don't care about
10968 NaNs or Infinities. Skip the transformation
10969 for non-real operands. */
10970 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10971 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10972 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10973 && operand_equal_p (arg0, arg1, 0))
10975 tree r = build_real (TREE_TYPE (arg0), dconst1);
10977 return omit_two_operands (type, r, arg0, arg1);
10980 /* The complex version of the above A / A optimization. */
10981 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10982 && operand_equal_p (arg0, arg1, 0))
10984 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10985 if (! HONOR_NANS (TYPE_MODE (elem_type))
10986 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10988 tree r = build_real (elem_type, dconst1);
10989 /* omit_two_operands will call fold_convert for us. */
10990 return omit_two_operands (type, r, arg0, arg1);
10994 /* (-A) / (-B) -> A / B */
10995 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10996 return fold_build2 (RDIV_EXPR, type,
10997 TREE_OPERAND (arg0, 0),
10998 negate_expr (arg1));
10999 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11000 return fold_build2 (RDIV_EXPR, type,
11001 negate_expr (arg0),
11002 TREE_OPERAND (arg1, 0));
11004 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11005 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11006 && real_onep (arg1))
11007 return non_lvalue (fold_convert (type, arg0));
11009 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11010 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11011 && real_minus_onep (arg1))
11012 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11014 /* If ARG1 is a constant, we can convert this to a multiply by the
11015 reciprocal. This does not have the same rounding properties,
11016 so only do this if -freciprocal-math. We can actually
11017 always safely do it if ARG1 is a power of two, but it's hard to
11018 tell if it is or not in a portable manner. */
11019 if (TREE_CODE (arg1) == REAL_CST)
11021 if (flag_reciprocal_math
11022 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11023 arg1, 0)))
11024 return fold_build2 (MULT_EXPR, type, arg0, tem);
11025 /* Find the reciprocal if optimizing and the result is exact. */
11026 if (optimize)
11028 REAL_VALUE_TYPE r;
11029 r = TREE_REAL_CST (arg1);
11030 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11032 tem = build_real (type, r);
11033 return fold_build2 (MULT_EXPR, type,
11034 fold_convert (type, arg0), tem);
11038 /* Convert A/B/C to A/(B*C). */
11039 if (flag_reciprocal_math
11040 && TREE_CODE (arg0) == RDIV_EXPR)
11041 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11042 fold_build2 (MULT_EXPR, type,
11043 TREE_OPERAND (arg0, 1), arg1));
11045 /* Convert A/(B/C) to (A/B)*C. */
11046 if (flag_reciprocal_math
11047 && TREE_CODE (arg1) == RDIV_EXPR)
11048 return fold_build2 (MULT_EXPR, type,
11049 fold_build2 (RDIV_EXPR, type, arg0,
11050 TREE_OPERAND (arg1, 0)),
11051 TREE_OPERAND (arg1, 1));
11053 /* Convert C1/(X*C2) into (C1/C2)/X. */
11054 if (flag_reciprocal_math
11055 && TREE_CODE (arg1) == MULT_EXPR
11056 && TREE_CODE (arg0) == REAL_CST
11057 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11059 tree tem = const_binop (RDIV_EXPR, arg0,
11060 TREE_OPERAND (arg1, 1), 0);
11061 if (tem)
11062 return fold_build2 (RDIV_EXPR, type, tem,
11063 TREE_OPERAND (arg1, 0));
11066 if (flag_unsafe_math_optimizations)
11068 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11069 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11071 /* Optimize sin(x)/cos(x) as tan(x). */
11072 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11073 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11074 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11075 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11076 CALL_EXPR_ARG (arg1, 0), 0))
11078 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11080 if (tanfn != NULL_TREE)
11081 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11084 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11085 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11086 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11087 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11088 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11089 CALL_EXPR_ARG (arg1, 0), 0))
11091 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11093 if (tanfn != NULL_TREE)
11095 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11096 return fold_build2 (RDIV_EXPR, type,
11097 build_real (type, dconst1), tmp);
11101 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11102 NaNs or Infinities. */
11103 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11104 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11105 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11107 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11108 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11110 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11111 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11112 && operand_equal_p (arg00, arg01, 0))
11114 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11116 if (cosfn != NULL_TREE)
11117 return build_call_expr (cosfn, 1, arg00);
11121 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11122 NaNs or Infinities. */
11123 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11124 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11125 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11127 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11128 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11130 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11131 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11132 && operand_equal_p (arg00, arg01, 0))
11134 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11136 if (cosfn != NULL_TREE)
11138 tree tmp = build_call_expr (cosfn, 1, arg00);
11139 return fold_build2 (RDIV_EXPR, type,
11140 build_real (type, dconst1),
11141 tmp);
11146 /* Optimize pow(x,c)/x as pow(x,c-1). */
11147 if (fcode0 == BUILT_IN_POW
11148 || fcode0 == BUILT_IN_POWF
11149 || fcode0 == BUILT_IN_POWL)
11151 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11152 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11153 if (TREE_CODE (arg01) == REAL_CST
11154 && !TREE_OVERFLOW (arg01)
11155 && operand_equal_p (arg1, arg00, 0))
11157 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11158 REAL_VALUE_TYPE c;
11159 tree arg;
11161 c = TREE_REAL_CST (arg01);
11162 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11163 arg = build_real (type, c);
11164 return build_call_expr (powfn, 2, arg1, arg);
11168 /* Optimize a/root(b/c) into a*root(c/b). */
11169 if (BUILTIN_ROOT_P (fcode1))
11171 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11173 if (TREE_CODE (rootarg) == RDIV_EXPR)
11175 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11176 tree b = TREE_OPERAND (rootarg, 0);
11177 tree c = TREE_OPERAND (rootarg, 1);
11179 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11181 tmp = build_call_expr (rootfn, 1, tmp);
11182 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11186 /* Optimize x/expN(y) into x*expN(-y). */
11187 if (BUILTIN_EXPONENT_P (fcode1))
11189 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11190 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11191 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11192 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11195 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11196 if (fcode1 == BUILT_IN_POW
11197 || fcode1 == BUILT_IN_POWF
11198 || fcode1 == BUILT_IN_POWL)
11200 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11201 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11202 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11203 tree neg11 = fold_convert (type, negate_expr (arg11));
11204 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11205 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11208 return NULL_TREE;
11210 case TRUNC_DIV_EXPR:
11211 case FLOOR_DIV_EXPR:
11212 /* Simplify A / (B << N) where A and B are positive and B is
11213 a power of 2, to A >> (N + log2(B)). */
11214 strict_overflow_p = false;
11215 if (TREE_CODE (arg1) == LSHIFT_EXPR
11216 && (TYPE_UNSIGNED (type)
11217 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11219 tree sval = TREE_OPERAND (arg1, 0);
11220 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11222 tree sh_cnt = TREE_OPERAND (arg1, 1);
11223 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11225 if (strict_overflow_p)
11226 fold_overflow_warning (("assuming signed overflow does not "
11227 "occur when simplifying A / (B << N)"),
11228 WARN_STRICT_OVERFLOW_MISC);
11230 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11231 sh_cnt, build_int_cst (NULL_TREE, pow2));
11232 return fold_build2 (RSHIFT_EXPR, type,
11233 fold_convert (type, arg0), sh_cnt);
11237 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11238 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11239 if (INTEGRAL_TYPE_P (type)
11240 && TYPE_UNSIGNED (type)
11241 && code == FLOOR_DIV_EXPR)
11242 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11244 /* Fall thru */
11246 case ROUND_DIV_EXPR:
11247 case CEIL_DIV_EXPR:
11248 case EXACT_DIV_EXPR:
11249 if (integer_onep (arg1))
11250 return non_lvalue (fold_convert (type, arg0));
11251 if (integer_zerop (arg1))
11252 return NULL_TREE;
11253 /* X / -1 is -X. */
11254 if (!TYPE_UNSIGNED (type)
11255 && TREE_CODE (arg1) == INTEGER_CST
11256 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11257 && TREE_INT_CST_HIGH (arg1) == -1)
11258 return fold_convert (type, negate_expr (arg0));
11260 /* Convert -A / -B to A / B when the type is signed and overflow is
11261 undefined. */
11262 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11263 && TREE_CODE (arg0) == NEGATE_EXPR
11264 && negate_expr_p (arg1))
11266 if (INTEGRAL_TYPE_P (type))
11267 fold_overflow_warning (("assuming signed overflow does not occur "
11268 "when distributing negation across "
11269 "division"),
11270 WARN_STRICT_OVERFLOW_MISC);
11271 return fold_build2 (code, type,
11272 fold_convert (type, TREE_OPERAND (arg0, 0)),
11273 negate_expr (arg1));
11275 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11276 && TREE_CODE (arg1) == NEGATE_EXPR
11277 && negate_expr_p (arg0))
11279 if (INTEGRAL_TYPE_P (type))
11280 fold_overflow_warning (("assuming signed overflow does not occur "
11281 "when distributing negation across "
11282 "division"),
11283 WARN_STRICT_OVERFLOW_MISC);
11284 return fold_build2 (code, type, negate_expr (arg0),
11285 TREE_OPERAND (arg1, 0));
11288 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11289 operation, EXACT_DIV_EXPR.
11291 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11292 At one time others generated faster code, it's not clear if they do
11293 after the last round to changes to the DIV code in expmed.c. */
11294 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11295 && multiple_of_p (type, arg0, arg1))
11296 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11298 strict_overflow_p = false;
11299 if (TREE_CODE (arg1) == INTEGER_CST
11300 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11301 &strict_overflow_p)))
11303 if (strict_overflow_p)
11304 fold_overflow_warning (("assuming signed overflow does not occur "
11305 "when simplifying division"),
11306 WARN_STRICT_OVERFLOW_MISC);
11307 return fold_convert (type, tem);
11310 return NULL_TREE;
11312 case CEIL_MOD_EXPR:
11313 case FLOOR_MOD_EXPR:
11314 case ROUND_MOD_EXPR:
11315 case TRUNC_MOD_EXPR:
11316 /* X % 1 is always zero, but be sure to preserve any side
11317 effects in X. */
11318 if (integer_onep (arg1))
11319 return omit_one_operand (type, integer_zero_node, arg0);
11321 /* X % 0, return X % 0 unchanged so that we can get the
11322 proper warnings and errors. */
11323 if (integer_zerop (arg1))
11324 return NULL_TREE;
11326 /* 0 % X is always zero, but be sure to preserve any side
11327 effects in X. Place this after checking for X == 0. */
11328 if (integer_zerop (arg0))
11329 return omit_one_operand (type, integer_zero_node, arg1);
11331 /* X % -1 is zero. */
11332 if (!TYPE_UNSIGNED (type)
11333 && TREE_CODE (arg1) == INTEGER_CST
11334 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11335 && TREE_INT_CST_HIGH (arg1) == -1)
11336 return omit_one_operand (type, integer_zero_node, arg0);
11338 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11339 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11340 strict_overflow_p = false;
11341 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11342 && (TYPE_UNSIGNED (type)
11343 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11345 tree c = arg1;
11346 /* Also optimize A % (C << N) where C is a power of 2,
11347 to A & ((C << N) - 1). */
11348 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11349 c = TREE_OPERAND (arg1, 0);
11351 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11353 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11354 build_int_cst (TREE_TYPE (arg1), 1));
11355 if (strict_overflow_p)
11356 fold_overflow_warning (("assuming signed overflow does not "
11357 "occur when simplifying "
11358 "X % (power of two)"),
11359 WARN_STRICT_OVERFLOW_MISC);
11360 return fold_build2 (BIT_AND_EXPR, type,
11361 fold_convert (type, arg0),
11362 fold_convert (type, mask));
11366 /* X % -C is the same as X % C. */
11367 if (code == TRUNC_MOD_EXPR
11368 && !TYPE_UNSIGNED (type)
11369 && TREE_CODE (arg1) == INTEGER_CST
11370 && !TREE_OVERFLOW (arg1)
11371 && TREE_INT_CST_HIGH (arg1) < 0
11372 && !TYPE_OVERFLOW_TRAPS (type)
11373 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11374 && !sign_bit_p (arg1, arg1))
11375 return fold_build2 (code, type, fold_convert (type, arg0),
11376 fold_convert (type, negate_expr (arg1)));
11378 /* X % -Y is the same as X % Y. */
11379 if (code == TRUNC_MOD_EXPR
11380 && !TYPE_UNSIGNED (type)
11381 && TREE_CODE (arg1) == NEGATE_EXPR
11382 && !TYPE_OVERFLOW_TRAPS (type))
11383 return fold_build2 (code, type, fold_convert (type, arg0),
11384 fold_convert (type, TREE_OPERAND (arg1, 0)));
11386 if (TREE_CODE (arg1) == INTEGER_CST
11387 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11388 &strict_overflow_p)))
11390 if (strict_overflow_p)
11391 fold_overflow_warning (("assuming signed overflow does not occur "
11392 "when simplifying modulus"),
11393 WARN_STRICT_OVERFLOW_MISC);
11394 return fold_convert (type, tem);
11397 return NULL_TREE;
11399 case LROTATE_EXPR:
11400 case RROTATE_EXPR:
11401 if (integer_all_onesp (arg0))
11402 return omit_one_operand (type, arg0, arg1);
11403 goto shift;
11405 case RSHIFT_EXPR:
11406 /* Optimize -1 >> x for arithmetic right shifts. */
11407 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11408 return omit_one_operand (type, arg0, arg1);
11409 /* ... fall through ... */
11411 case LSHIFT_EXPR:
11412 shift:
11413 if (integer_zerop (arg1))
11414 return non_lvalue (fold_convert (type, arg0));
11415 if (integer_zerop (arg0))
11416 return omit_one_operand (type, arg0, arg1);
11418 /* Since negative shift count is not well-defined,
11419 don't try to compute it in the compiler. */
11420 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11421 return NULL_TREE;
11423 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11424 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11425 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11426 && host_integerp (TREE_OPERAND (arg0, 1), false)
11427 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11429 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11430 + TREE_INT_CST_LOW (arg1));
11432 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11433 being well defined. */
11434 if (low >= TYPE_PRECISION (type))
11436 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11437 low = low % TYPE_PRECISION (type);
11438 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11439 return build_int_cst (type, 0);
11440 else
11441 low = TYPE_PRECISION (type) - 1;
11444 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11445 build_int_cst (type, low));
11448 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11449 into x & ((unsigned)-1 >> c) for unsigned types. */
11450 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11451 || (TYPE_UNSIGNED (type)
11452 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11453 && host_integerp (arg1, false)
11454 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11455 && host_integerp (TREE_OPERAND (arg0, 1), false)
11456 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11458 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11459 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11460 tree lshift;
11461 tree arg00;
11463 if (low0 == low1)
11465 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11467 lshift = build_int_cst (type, -1);
11468 lshift = int_const_binop (code, lshift, arg1, 0);
11470 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11474 /* Rewrite an LROTATE_EXPR by a constant into an
11475 RROTATE_EXPR by a new constant. */
11476 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11478 tree tem = build_int_cst (TREE_TYPE (arg1),
11479 TYPE_PRECISION (type));
11480 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11481 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11484 /* If we have a rotate of a bit operation with the rotate count and
11485 the second operand of the bit operation both constant,
11486 permute the two operations. */
11487 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11488 && (TREE_CODE (arg0) == BIT_AND_EXPR
11489 || TREE_CODE (arg0) == BIT_IOR_EXPR
11490 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11491 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11492 return fold_build2 (TREE_CODE (arg0), type,
11493 fold_build2 (code, type,
11494 TREE_OPERAND (arg0, 0), arg1),
11495 fold_build2 (code, type,
11496 TREE_OPERAND (arg0, 1), arg1));
11498 /* Two consecutive rotates adding up to the precision of the
11499 type can be ignored. */
11500 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11501 && TREE_CODE (arg0) == RROTATE_EXPR
11502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11503 && TREE_INT_CST_HIGH (arg1) == 0
11504 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11505 && ((TREE_INT_CST_LOW (arg1)
11506 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11507 == (unsigned int) TYPE_PRECISION (type)))
11508 return TREE_OPERAND (arg0, 0);
11510 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11511 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11512 if the latter can be further optimized. */
11513 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11514 && TREE_CODE (arg0) == BIT_AND_EXPR
11515 && TREE_CODE (arg1) == INTEGER_CST
11516 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11518 tree mask = fold_build2 (code, type,
11519 fold_convert (type, TREE_OPERAND (arg0, 1)),
11520 arg1);
11521 tree shift = fold_build2 (code, type,
11522 fold_convert (type, TREE_OPERAND (arg0, 0)),
11523 arg1);
11524 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11525 if (tem)
11526 return tem;
11529 return NULL_TREE;
11531 case MIN_EXPR:
11532 if (operand_equal_p (arg0, arg1, 0))
11533 return omit_one_operand (type, arg0, arg1);
11534 if (INTEGRAL_TYPE_P (type)
11535 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11536 return omit_one_operand (type, arg1, arg0);
11537 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11538 if (tem)
11539 return tem;
11540 goto associate;
11542 case MAX_EXPR:
11543 if (operand_equal_p (arg0, arg1, 0))
11544 return omit_one_operand (type, arg0, arg1);
11545 if (INTEGRAL_TYPE_P (type)
11546 && TYPE_MAX_VALUE (type)
11547 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11548 return omit_one_operand (type, arg1, arg0);
11549 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11550 if (tem)
11551 return tem;
11552 goto associate;
11554 case TRUTH_ANDIF_EXPR:
11555 /* Note that the operands of this must be ints
11556 and their values must be 0 or 1.
11557 ("true" is a fixed value perhaps depending on the language.) */
11558 /* If first arg is constant zero, return it. */
11559 if (integer_zerop (arg0))
11560 return fold_convert (type, arg0);
11561 case TRUTH_AND_EXPR:
11562 /* If either arg is constant true, drop it. */
11563 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11564 return non_lvalue (fold_convert (type, arg1));
11565 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11566 /* Preserve sequence points. */
11567 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11568 return non_lvalue (fold_convert (type, arg0));
11569 /* If second arg is constant zero, result is zero, but first arg
11570 must be evaluated. */
11571 if (integer_zerop (arg1))
11572 return omit_one_operand (type, arg1, arg0);
11573 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11574 case will be handled here. */
11575 if (integer_zerop (arg0))
11576 return omit_one_operand (type, arg0, arg1);
11578 /* !X && X is always false. */
11579 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11581 return omit_one_operand (type, integer_zero_node, arg1);
11582 /* X && !X is always false. */
11583 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11584 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11585 return omit_one_operand (type, integer_zero_node, arg0);
11587 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11588 means A >= Y && A != MAX, but in this case we know that
11589 A < X <= MAX. */
11591 if (!TREE_SIDE_EFFECTS (arg0)
11592 && !TREE_SIDE_EFFECTS (arg1))
11594 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11595 if (tem && !operand_equal_p (tem, arg0, 0))
11596 return fold_build2 (code, type, tem, arg1);
11598 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11599 if (tem && !operand_equal_p (tem, arg1, 0))
11600 return fold_build2 (code, type, arg0, tem);
11603 truth_andor:
11604 /* We only do these simplifications if we are optimizing. */
11605 if (!optimize)
11606 return NULL_TREE;
11608 /* Check for things like (A || B) && (A || C). We can convert this
11609 to A || (B && C). Note that either operator can be any of the four
11610 truth and/or operations and the transformation will still be
11611 valid. Also note that we only care about order for the
11612 ANDIF and ORIF operators. If B contains side effects, this
11613 might change the truth-value of A. */
11614 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11615 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11616 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11617 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11618 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11619 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11621 tree a00 = TREE_OPERAND (arg0, 0);
11622 tree a01 = TREE_OPERAND (arg0, 1);
11623 tree a10 = TREE_OPERAND (arg1, 0);
11624 tree a11 = TREE_OPERAND (arg1, 1);
11625 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11626 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11627 && (code == TRUTH_AND_EXPR
11628 || code == TRUTH_OR_EXPR));
11630 if (operand_equal_p (a00, a10, 0))
11631 return fold_build2 (TREE_CODE (arg0), type, a00,
11632 fold_build2 (code, type, a01, a11));
11633 else if (commutative && operand_equal_p (a00, a11, 0))
11634 return fold_build2 (TREE_CODE (arg0), type, a00,
11635 fold_build2 (code, type, a01, a10));
11636 else if (commutative && operand_equal_p (a01, a10, 0))
11637 return fold_build2 (TREE_CODE (arg0), type, a01,
11638 fold_build2 (code, type, a00, a11));
11640 /* This case if tricky because we must either have commutative
11641 operators or else A10 must not have side-effects. */
11643 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11644 && operand_equal_p (a01, a11, 0))
11645 return fold_build2 (TREE_CODE (arg0), type,
11646 fold_build2 (code, type, a00, a10),
11647 a01);
11650 /* See if we can build a range comparison. */
11651 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11652 return tem;
11654 /* Check for the possibility of merging component references. If our
11655 lhs is another similar operation, try to merge its rhs with our
11656 rhs. Then try to merge our lhs and rhs. */
11657 if (TREE_CODE (arg0) == code
11658 && 0 != (tem = fold_truthop (code, type,
11659 TREE_OPERAND (arg0, 1), arg1)))
11660 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11662 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11663 return tem;
11665 return NULL_TREE;
11667 case TRUTH_ORIF_EXPR:
11668 /* Note that the operands of this must be ints
11669 and their values must be 0 or true.
11670 ("true" is a fixed value perhaps depending on the language.) */
11671 /* If first arg is constant true, return it. */
11672 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11673 return fold_convert (type, arg0);
11674 case TRUTH_OR_EXPR:
11675 /* If either arg is constant zero, drop it. */
11676 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11677 return non_lvalue (fold_convert (type, arg1));
11678 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11679 /* Preserve sequence points. */
11680 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11681 return non_lvalue (fold_convert (type, arg0));
11682 /* If second arg is constant true, result is true, but we must
11683 evaluate first arg. */
11684 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11685 return omit_one_operand (type, arg1, arg0);
11686 /* Likewise for first arg, but note this only occurs here for
11687 TRUTH_OR_EXPR. */
11688 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11689 return omit_one_operand (type, arg0, arg1);
11691 /* !X || X is always true. */
11692 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11693 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11694 return omit_one_operand (type, integer_one_node, arg1);
11695 /* X || !X is always true. */
11696 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11697 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11698 return omit_one_operand (type, integer_one_node, arg0);
11700 goto truth_andor;
11702 case TRUTH_XOR_EXPR:
11703 /* If the second arg is constant zero, drop it. */
11704 if (integer_zerop (arg1))
11705 return non_lvalue (fold_convert (type, arg0));
11706 /* If the second arg is constant true, this is a logical inversion. */
11707 if (integer_onep (arg1))
11709 /* Only call invert_truthvalue if operand is a truth value. */
11710 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11711 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11712 else
11713 tem = invert_truthvalue (arg0);
11714 return non_lvalue (fold_convert (type, tem));
11716 /* Identical arguments cancel to zero. */
11717 if (operand_equal_p (arg0, arg1, 0))
11718 return omit_one_operand (type, integer_zero_node, arg0);
11720 /* !X ^ X is always true. */
11721 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11722 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11723 return omit_one_operand (type, integer_one_node, arg1);
11725 /* X ^ !X is always true. */
11726 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11727 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11728 return omit_one_operand (type, integer_one_node, arg0);
11730 return NULL_TREE;
11732 case EQ_EXPR:
11733 case NE_EXPR:
11734 tem = fold_comparison (code, type, op0, op1);
11735 if (tem != NULL_TREE)
11736 return tem;
11738 /* bool_var != 0 becomes bool_var. */
11739 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11740 && code == NE_EXPR)
11741 return non_lvalue (fold_convert (type, arg0));
11743 /* bool_var == 1 becomes bool_var. */
11744 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11745 && code == EQ_EXPR)
11746 return non_lvalue (fold_convert (type, arg0));
11748 /* bool_var != 1 becomes !bool_var. */
11749 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11750 && code == NE_EXPR)
11751 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11753 /* bool_var == 0 becomes !bool_var. */
11754 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11755 && code == EQ_EXPR)
11756 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11758 /* If this is an equality comparison of the address of two non-weak,
11759 unaliased symbols neither of which are extern (since we do not
11760 have access to attributes for externs), then we know the result. */
11761 if (TREE_CODE (arg0) == ADDR_EXPR
11762 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11763 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11764 && ! lookup_attribute ("alias",
11765 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11766 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11767 && TREE_CODE (arg1) == ADDR_EXPR
11768 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11769 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11770 && ! lookup_attribute ("alias",
11771 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11772 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11774 /* We know that we're looking at the address of two
11775 non-weak, unaliased, static _DECL nodes.
11777 It is both wasteful and incorrect to call operand_equal_p
11778 to compare the two ADDR_EXPR nodes. It is wasteful in that
11779 all we need to do is test pointer equality for the arguments
11780 to the two ADDR_EXPR nodes. It is incorrect to use
11781 operand_equal_p as that function is NOT equivalent to a
11782 C equality test. It can in fact return false for two
11783 objects which would test as equal using the C equality
11784 operator. */
11785 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11786 return constant_boolean_node (equal
11787 ? code == EQ_EXPR : code != EQ_EXPR,
11788 type);
11791 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11792 a MINUS_EXPR of a constant, we can convert it into a comparison with
11793 a revised constant as long as no overflow occurs. */
11794 if (TREE_CODE (arg1) == INTEGER_CST
11795 && (TREE_CODE (arg0) == PLUS_EXPR
11796 || TREE_CODE (arg0) == MINUS_EXPR)
11797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11798 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11799 ? MINUS_EXPR : PLUS_EXPR,
11800 fold_convert (TREE_TYPE (arg0), arg1),
11801 TREE_OPERAND (arg0, 1), 0))
11802 && !TREE_OVERFLOW (tem))
11803 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11805 /* Similarly for a NEGATE_EXPR. */
11806 if (TREE_CODE (arg0) == NEGATE_EXPR
11807 && TREE_CODE (arg1) == INTEGER_CST
11808 && 0 != (tem = negate_expr (arg1))
11809 && TREE_CODE (tem) == INTEGER_CST
11810 && !TREE_OVERFLOW (tem))
11811 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11813 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11814 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11815 && TREE_CODE (arg1) == INTEGER_CST
11816 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11817 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11818 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11819 fold_convert (TREE_TYPE (arg0), arg1),
11820 TREE_OPERAND (arg0, 1)));
11822 /* Transform comparisons of the form X +- C CMP X. */
11823 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11824 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11825 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11826 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11827 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11829 tree cst = TREE_OPERAND (arg0, 1);
11831 if (code == EQ_EXPR
11832 && !integer_zerop (cst))
11833 return omit_two_operands (type, boolean_false_node,
11834 TREE_OPERAND (arg0, 0), arg1);
11835 else
11836 return omit_two_operands (type, boolean_true_node,
11837 TREE_OPERAND (arg0, 0), arg1);
11840 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11841 for !=. Don't do this for ordered comparisons due to overflow. */
11842 if (TREE_CODE (arg0) == MINUS_EXPR
11843 && integer_zerop (arg1))
11844 return fold_build2 (code, type,
11845 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11847 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11848 if (TREE_CODE (arg0) == ABS_EXPR
11849 && (integer_zerop (arg1) || real_zerop (arg1)))
11850 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11852 /* If this is an EQ or NE comparison with zero and ARG0 is
11853 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11854 two operations, but the latter can be done in one less insn
11855 on machines that have only two-operand insns or on which a
11856 constant cannot be the first operand. */
11857 if (TREE_CODE (arg0) == BIT_AND_EXPR
11858 && integer_zerop (arg1))
11860 tree arg00 = TREE_OPERAND (arg0, 0);
11861 tree arg01 = TREE_OPERAND (arg0, 1);
11862 if (TREE_CODE (arg00) == LSHIFT_EXPR
11863 && integer_onep (TREE_OPERAND (arg00, 0)))
11865 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11866 arg01, TREE_OPERAND (arg00, 1));
11867 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11868 build_int_cst (TREE_TYPE (arg0), 1));
11869 return fold_build2 (code, type,
11870 fold_convert (TREE_TYPE (arg1), tem), arg1);
11872 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11873 && integer_onep (TREE_OPERAND (arg01, 0)))
11875 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11876 arg00, TREE_OPERAND (arg01, 1));
11877 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11878 build_int_cst (TREE_TYPE (arg0), 1));
11879 return fold_build2 (code, type,
11880 fold_convert (TREE_TYPE (arg1), tem), arg1);
11884 /* If this is an NE or EQ comparison of zero against the result of a
11885 signed MOD operation whose second operand is a power of 2, make
11886 the MOD operation unsigned since it is simpler and equivalent. */
11887 if (integer_zerop (arg1)
11888 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11889 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11890 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11891 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11892 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11893 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11895 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11896 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11897 fold_convert (newtype,
11898 TREE_OPERAND (arg0, 0)),
11899 fold_convert (newtype,
11900 TREE_OPERAND (arg0, 1)));
11902 return fold_build2 (code, type, newmod,
11903 fold_convert (newtype, arg1));
11906 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11907 C1 is a valid shift constant, and C2 is a power of two, i.e.
11908 a single bit. */
11909 if (TREE_CODE (arg0) == BIT_AND_EXPR
11910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11911 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11912 == INTEGER_CST
11913 && integer_pow2p (TREE_OPERAND (arg0, 1))
11914 && integer_zerop (arg1))
11916 tree itype = TREE_TYPE (arg0);
11917 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11918 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11920 /* Check for a valid shift count. */
11921 if (TREE_INT_CST_HIGH (arg001) == 0
11922 && TREE_INT_CST_LOW (arg001) < prec)
11924 tree arg01 = TREE_OPERAND (arg0, 1);
11925 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11926 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11927 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11928 can be rewritten as (X & (C2 << C1)) != 0. */
11929 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11931 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11932 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11933 return fold_build2 (code, type, tem, arg1);
11935 /* Otherwise, for signed (arithmetic) shifts,
11936 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11937 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11938 else if (!TYPE_UNSIGNED (itype))
11939 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11940 arg000, build_int_cst (itype, 0));
11941 /* Otherwise, of unsigned (logical) shifts,
11942 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11943 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11944 else
11945 return omit_one_operand (type,
11946 code == EQ_EXPR ? integer_one_node
11947 : integer_zero_node,
11948 arg000);
11952 /* If this is an NE comparison of zero with an AND of one, remove the
11953 comparison since the AND will give the correct value. */
11954 if (code == NE_EXPR
11955 && integer_zerop (arg1)
11956 && TREE_CODE (arg0) == BIT_AND_EXPR
11957 && integer_onep (TREE_OPERAND (arg0, 1)))
11958 return fold_convert (type, arg0);
11960 /* If we have (A & C) == C where C is a power of 2, convert this into
11961 (A & C) != 0. Similarly for NE_EXPR. */
11962 if (TREE_CODE (arg0) == BIT_AND_EXPR
11963 && integer_pow2p (TREE_OPERAND (arg0, 1))
11964 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11965 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11966 arg0, fold_convert (TREE_TYPE (arg0),
11967 integer_zero_node));
11969 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11970 bit, then fold the expression into A < 0 or A >= 0. */
11971 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11972 if (tem)
11973 return tem;
11975 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11976 Similarly for NE_EXPR. */
11977 if (TREE_CODE (arg0) == BIT_AND_EXPR
11978 && TREE_CODE (arg1) == INTEGER_CST
11979 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11981 tree notc = fold_build1 (BIT_NOT_EXPR,
11982 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11983 TREE_OPERAND (arg0, 1));
11984 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11985 arg1, notc);
11986 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11987 if (integer_nonzerop (dandnotc))
11988 return omit_one_operand (type, rslt, arg0);
11991 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11992 Similarly for NE_EXPR. */
11993 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11994 && TREE_CODE (arg1) == INTEGER_CST
11995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11997 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11998 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11999 TREE_OPERAND (arg0, 1), notd);
12000 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12001 if (integer_nonzerop (candnotd))
12002 return omit_one_operand (type, rslt, arg0);
12005 /* Optimize comparisons of strlen vs zero to a compare of the
12006 first character of the string vs zero. To wit,
12007 strlen(ptr) == 0 => *ptr == 0
12008 strlen(ptr) != 0 => *ptr != 0
12009 Other cases should reduce to one of these two (or a constant)
12010 due to the return value of strlen being unsigned. */
12011 if (TREE_CODE (arg0) == CALL_EXPR
12012 && integer_zerop (arg1))
12014 tree fndecl = get_callee_fndecl (arg0);
12016 if (fndecl
12017 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12018 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12019 && call_expr_nargs (arg0) == 1
12020 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12022 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12023 return fold_build2 (code, type, iref,
12024 build_int_cst (TREE_TYPE (iref), 0));
12028 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12029 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12030 if (TREE_CODE (arg0) == RSHIFT_EXPR
12031 && integer_zerop (arg1)
12032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12034 tree arg00 = TREE_OPERAND (arg0, 0);
12035 tree arg01 = TREE_OPERAND (arg0, 1);
12036 tree itype = TREE_TYPE (arg00);
12037 if (TREE_INT_CST_HIGH (arg01) == 0
12038 && TREE_INT_CST_LOW (arg01)
12039 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12041 if (TYPE_UNSIGNED (itype))
12043 itype = signed_type_for (itype);
12044 arg00 = fold_convert (itype, arg00);
12046 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12047 type, arg00, build_int_cst (itype, 0));
12051 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12052 if (integer_zerop (arg1)
12053 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12054 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12055 TREE_OPERAND (arg0, 1));
12057 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12058 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12059 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12060 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12061 build_int_cst (TREE_TYPE (arg1), 0));
12062 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12063 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12064 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12065 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12066 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12067 build_int_cst (TREE_TYPE (arg1), 0));
12069 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12070 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12071 && TREE_CODE (arg1) == INTEGER_CST
12072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12073 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12074 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12075 TREE_OPERAND (arg0, 1), arg1));
12077 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12078 (X & C) == 0 when C is a single bit. */
12079 if (TREE_CODE (arg0) == BIT_AND_EXPR
12080 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12081 && integer_zerop (arg1)
12082 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12084 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12085 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12086 TREE_OPERAND (arg0, 1));
12087 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12088 type, tem, arg1);
12091 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12092 constant C is a power of two, i.e. a single bit. */
12093 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12094 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12095 && integer_zerop (arg1)
12096 && integer_pow2p (TREE_OPERAND (arg0, 1))
12097 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12098 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12100 tree arg00 = TREE_OPERAND (arg0, 0);
12101 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12102 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12105 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12106 when is C is a power of two, i.e. a single bit. */
12107 if (TREE_CODE (arg0) == BIT_AND_EXPR
12108 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12109 && integer_zerop (arg1)
12110 && integer_pow2p (TREE_OPERAND (arg0, 1))
12111 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12112 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12114 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12115 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12116 arg000, TREE_OPERAND (arg0, 1));
12117 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12118 tem, build_int_cst (TREE_TYPE (tem), 0));
12121 if (integer_zerop (arg1)
12122 && tree_expr_nonzero_p (arg0))
12124 tree res = constant_boolean_node (code==NE_EXPR, type);
12125 return omit_one_operand (type, res, arg0);
12128 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12129 if (TREE_CODE (arg0) == NEGATE_EXPR
12130 && TREE_CODE (arg1) == NEGATE_EXPR)
12131 return fold_build2 (code, type,
12132 TREE_OPERAND (arg0, 0),
12133 TREE_OPERAND (arg1, 0));
12135 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12136 if (TREE_CODE (arg0) == BIT_AND_EXPR
12137 && TREE_CODE (arg1) == BIT_AND_EXPR)
12139 tree arg00 = TREE_OPERAND (arg0, 0);
12140 tree arg01 = TREE_OPERAND (arg0, 1);
12141 tree arg10 = TREE_OPERAND (arg1, 0);
12142 tree arg11 = TREE_OPERAND (arg1, 1);
12143 tree itype = TREE_TYPE (arg0);
12145 if (operand_equal_p (arg01, arg11, 0))
12146 return fold_build2 (code, type,
12147 fold_build2 (BIT_AND_EXPR, itype,
12148 fold_build2 (BIT_XOR_EXPR, itype,
12149 arg00, arg10),
12150 arg01),
12151 build_int_cst (itype, 0));
12153 if (operand_equal_p (arg01, arg10, 0))
12154 return fold_build2 (code, type,
12155 fold_build2 (BIT_AND_EXPR, itype,
12156 fold_build2 (BIT_XOR_EXPR, itype,
12157 arg00, arg11),
12158 arg01),
12159 build_int_cst (itype, 0));
12161 if (operand_equal_p (arg00, arg11, 0))
12162 return fold_build2 (code, type,
12163 fold_build2 (BIT_AND_EXPR, itype,
12164 fold_build2 (BIT_XOR_EXPR, itype,
12165 arg01, arg10),
12166 arg00),
12167 build_int_cst (itype, 0));
12169 if (operand_equal_p (arg00, arg10, 0))
12170 return fold_build2 (code, type,
12171 fold_build2 (BIT_AND_EXPR, itype,
12172 fold_build2 (BIT_XOR_EXPR, itype,
12173 arg01, arg11),
12174 arg00),
12175 build_int_cst (itype, 0));
12178 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12179 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12181 tree arg00 = TREE_OPERAND (arg0, 0);
12182 tree arg01 = TREE_OPERAND (arg0, 1);
12183 tree arg10 = TREE_OPERAND (arg1, 0);
12184 tree arg11 = TREE_OPERAND (arg1, 1);
12185 tree itype = TREE_TYPE (arg0);
12187 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12188 operand_equal_p guarantees no side-effects so we don't need
12189 to use omit_one_operand on Z. */
12190 if (operand_equal_p (arg01, arg11, 0))
12191 return fold_build2 (code, type, arg00, arg10);
12192 if (operand_equal_p (arg01, arg10, 0))
12193 return fold_build2 (code, type, arg00, arg11);
12194 if (operand_equal_p (arg00, arg11, 0))
12195 return fold_build2 (code, type, arg01, arg10);
12196 if (operand_equal_p (arg00, arg10, 0))
12197 return fold_build2 (code, type, arg01, arg11);
12199 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12200 if (TREE_CODE (arg01) == INTEGER_CST
12201 && TREE_CODE (arg11) == INTEGER_CST)
12202 return fold_build2 (code, type,
12203 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12204 fold_build2 (BIT_XOR_EXPR, itype,
12205 arg01, arg11)),
12206 arg10);
12209 /* Attempt to simplify equality/inequality comparisons of complex
12210 values. Only lower the comparison if the result is known or
12211 can be simplified to a single scalar comparison. */
12212 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12213 || TREE_CODE (arg0) == COMPLEX_CST)
12214 && (TREE_CODE (arg1) == COMPLEX_EXPR
12215 || TREE_CODE (arg1) == COMPLEX_CST))
12217 tree real0, imag0, real1, imag1;
12218 tree rcond, icond;
12220 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12222 real0 = TREE_OPERAND (arg0, 0);
12223 imag0 = TREE_OPERAND (arg0, 1);
12225 else
12227 real0 = TREE_REALPART (arg0);
12228 imag0 = TREE_IMAGPART (arg0);
12231 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12233 real1 = TREE_OPERAND (arg1, 0);
12234 imag1 = TREE_OPERAND (arg1, 1);
12236 else
12238 real1 = TREE_REALPART (arg1);
12239 imag1 = TREE_IMAGPART (arg1);
12242 rcond = fold_binary (code, type, real0, real1);
12243 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12245 if (integer_zerop (rcond))
12247 if (code == EQ_EXPR)
12248 return omit_two_operands (type, boolean_false_node,
12249 imag0, imag1);
12250 return fold_build2 (NE_EXPR, type, imag0, imag1);
12252 else
12254 if (code == NE_EXPR)
12255 return omit_two_operands (type, boolean_true_node,
12256 imag0, imag1);
12257 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12261 icond = fold_binary (code, type, imag0, imag1);
12262 if (icond && TREE_CODE (icond) == INTEGER_CST)
12264 if (integer_zerop (icond))
12266 if (code == EQ_EXPR)
12267 return omit_two_operands (type, boolean_false_node,
12268 real0, real1);
12269 return fold_build2 (NE_EXPR, type, real0, real1);
12271 else
12273 if (code == NE_EXPR)
12274 return omit_two_operands (type, boolean_true_node,
12275 real0, real1);
12276 return fold_build2 (EQ_EXPR, type, real0, real1);
12281 return NULL_TREE;
12283 case LT_EXPR:
12284 case GT_EXPR:
12285 case LE_EXPR:
12286 case GE_EXPR:
12287 tem = fold_comparison (code, type, op0, op1);
12288 if (tem != NULL_TREE)
12289 return tem;
12291 /* Transform comparisons of the form X +- C CMP X. */
12292 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12293 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12294 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12295 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12296 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12297 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12299 tree arg01 = TREE_OPERAND (arg0, 1);
12300 enum tree_code code0 = TREE_CODE (arg0);
12301 int is_positive;
12303 if (TREE_CODE (arg01) == REAL_CST)
12304 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12305 else
12306 is_positive = tree_int_cst_sgn (arg01);
12308 /* (X - c) > X becomes false. */
12309 if (code == GT_EXPR
12310 && ((code0 == MINUS_EXPR && is_positive >= 0)
12311 || (code0 == PLUS_EXPR && is_positive <= 0)))
12313 if (TREE_CODE (arg01) == INTEGER_CST
12314 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12315 fold_overflow_warning (("assuming signed overflow does not "
12316 "occur when assuming that (X - c) > X "
12317 "is always false"),
12318 WARN_STRICT_OVERFLOW_ALL);
12319 return constant_boolean_node (0, type);
12322 /* Likewise (X + c) < X becomes false. */
12323 if (code == LT_EXPR
12324 && ((code0 == PLUS_EXPR && is_positive >= 0)
12325 || (code0 == MINUS_EXPR && is_positive <= 0)))
12327 if (TREE_CODE (arg01) == INTEGER_CST
12328 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12329 fold_overflow_warning (("assuming signed overflow does not "
12330 "occur when assuming that "
12331 "(X + c) < X is always false"),
12332 WARN_STRICT_OVERFLOW_ALL);
12333 return constant_boolean_node (0, type);
12336 /* Convert (X - c) <= X to true. */
12337 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12338 && code == LE_EXPR
12339 && ((code0 == MINUS_EXPR && is_positive >= 0)
12340 || (code0 == PLUS_EXPR && is_positive <= 0)))
12342 if (TREE_CODE (arg01) == INTEGER_CST
12343 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12344 fold_overflow_warning (("assuming signed overflow does not "
12345 "occur when assuming that "
12346 "(X - c) <= X is always true"),
12347 WARN_STRICT_OVERFLOW_ALL);
12348 return constant_boolean_node (1, type);
12351 /* Convert (X + c) >= X to true. */
12352 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12353 && code == GE_EXPR
12354 && ((code0 == PLUS_EXPR && is_positive >= 0)
12355 || (code0 == MINUS_EXPR && is_positive <= 0)))
12357 if (TREE_CODE (arg01) == INTEGER_CST
12358 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12359 fold_overflow_warning (("assuming signed overflow does not "
12360 "occur when assuming that "
12361 "(X + c) >= X is always true"),
12362 WARN_STRICT_OVERFLOW_ALL);
12363 return constant_boolean_node (1, type);
12366 if (TREE_CODE (arg01) == INTEGER_CST)
12368 /* Convert X + c > X and X - c < X to true for integers. */
12369 if (code == GT_EXPR
12370 && ((code0 == PLUS_EXPR && is_positive > 0)
12371 || (code0 == MINUS_EXPR && is_positive < 0)))
12373 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12374 fold_overflow_warning (("assuming signed overflow does "
12375 "not occur when assuming that "
12376 "(X + c) > X is always true"),
12377 WARN_STRICT_OVERFLOW_ALL);
12378 return constant_boolean_node (1, type);
12381 if (code == LT_EXPR
12382 && ((code0 == MINUS_EXPR && is_positive > 0)
12383 || (code0 == PLUS_EXPR && is_positive < 0)))
12385 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12386 fold_overflow_warning (("assuming signed overflow does "
12387 "not occur when assuming that "
12388 "(X - c) < X is always true"),
12389 WARN_STRICT_OVERFLOW_ALL);
12390 return constant_boolean_node (1, type);
12393 /* Convert X + c <= X and X - c >= X to false for integers. */
12394 if (code == LE_EXPR
12395 && ((code0 == PLUS_EXPR && is_positive > 0)
12396 || (code0 == MINUS_EXPR && is_positive < 0)))
12398 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12399 fold_overflow_warning (("assuming signed overflow does "
12400 "not occur when assuming that "
12401 "(X + c) <= X is always false"),
12402 WARN_STRICT_OVERFLOW_ALL);
12403 return constant_boolean_node (0, type);
12406 if (code == GE_EXPR
12407 && ((code0 == MINUS_EXPR && is_positive > 0)
12408 || (code0 == PLUS_EXPR && is_positive < 0)))
12410 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12411 fold_overflow_warning (("assuming signed overflow does "
12412 "not occur when assuming that "
12413 "(X - c) >= X is always false"),
12414 WARN_STRICT_OVERFLOW_ALL);
12415 return constant_boolean_node (0, type);
12420 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12421 This transformation affects the cases which are handled in later
12422 optimizations involving comparisons with non-negative constants. */
12423 if (TREE_CODE (arg1) == INTEGER_CST
12424 && TREE_CODE (arg0) != INTEGER_CST
12425 && tree_int_cst_sgn (arg1) > 0)
12427 if (code == GE_EXPR)
12429 arg1 = const_binop (MINUS_EXPR, arg1,
12430 build_int_cst (TREE_TYPE (arg1), 1), 0);
12431 return fold_build2 (GT_EXPR, type, arg0,
12432 fold_convert (TREE_TYPE (arg0), arg1));
12434 if (code == LT_EXPR)
12436 arg1 = const_binop (MINUS_EXPR, arg1,
12437 build_int_cst (TREE_TYPE (arg1), 1), 0);
12438 return fold_build2 (LE_EXPR, type, arg0,
12439 fold_convert (TREE_TYPE (arg0), arg1));
12443 /* Comparisons with the highest or lowest possible integer of
12444 the specified precision will have known values. */
12446 tree arg1_type = TREE_TYPE (arg1);
12447 unsigned int width = TYPE_PRECISION (arg1_type);
12449 if (TREE_CODE (arg1) == INTEGER_CST
12450 && !TREE_OVERFLOW (arg1)
12451 && width <= 2 * HOST_BITS_PER_WIDE_INT
12452 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12454 HOST_WIDE_INT signed_max_hi;
12455 unsigned HOST_WIDE_INT signed_max_lo;
12456 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12458 if (width <= HOST_BITS_PER_WIDE_INT)
12460 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12461 - 1;
12462 signed_max_hi = 0;
12463 max_hi = 0;
12465 if (TYPE_UNSIGNED (arg1_type))
12467 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12468 min_lo = 0;
12469 min_hi = 0;
12471 else
12473 max_lo = signed_max_lo;
12474 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12475 min_hi = -1;
12478 else
12480 width -= HOST_BITS_PER_WIDE_INT;
12481 signed_max_lo = -1;
12482 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12483 - 1;
12484 max_lo = -1;
12485 min_lo = 0;
12487 if (TYPE_UNSIGNED (arg1_type))
12489 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12490 min_hi = 0;
12492 else
12494 max_hi = signed_max_hi;
12495 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12499 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12500 && TREE_INT_CST_LOW (arg1) == max_lo)
12501 switch (code)
12503 case GT_EXPR:
12504 return omit_one_operand (type, integer_zero_node, arg0);
12506 case GE_EXPR:
12507 return fold_build2 (EQ_EXPR, type, op0, op1);
12509 case LE_EXPR:
12510 return omit_one_operand (type, integer_one_node, arg0);
12512 case LT_EXPR:
12513 return fold_build2 (NE_EXPR, type, op0, op1);
12515 /* The GE_EXPR and LT_EXPR cases above are not normally
12516 reached because of previous transformations. */
12518 default:
12519 break;
12521 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12522 == max_hi
12523 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12524 switch (code)
12526 case GT_EXPR:
12527 arg1 = const_binop (PLUS_EXPR, arg1,
12528 build_int_cst (TREE_TYPE (arg1), 1), 0);
12529 return fold_build2 (EQ_EXPR, type,
12530 fold_convert (TREE_TYPE (arg1), arg0),
12531 arg1);
12532 case LE_EXPR:
12533 arg1 = const_binop (PLUS_EXPR, arg1,
12534 build_int_cst (TREE_TYPE (arg1), 1), 0);
12535 return fold_build2 (NE_EXPR, type,
12536 fold_convert (TREE_TYPE (arg1), arg0),
12537 arg1);
12538 default:
12539 break;
12541 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12542 == min_hi
12543 && TREE_INT_CST_LOW (arg1) == min_lo)
12544 switch (code)
12546 case LT_EXPR:
12547 return omit_one_operand (type, integer_zero_node, arg0);
12549 case LE_EXPR:
12550 return fold_build2 (EQ_EXPR, type, op0, op1);
12552 case GE_EXPR:
12553 return omit_one_operand (type, integer_one_node, arg0);
12555 case GT_EXPR:
12556 return fold_build2 (NE_EXPR, type, op0, op1);
12558 default:
12559 break;
12561 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12562 == min_hi
12563 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12564 switch (code)
12566 case GE_EXPR:
12567 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12568 return fold_build2 (NE_EXPR, type,
12569 fold_convert (TREE_TYPE (arg1), arg0),
12570 arg1);
12571 case LT_EXPR:
12572 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12573 return fold_build2 (EQ_EXPR, type,
12574 fold_convert (TREE_TYPE (arg1), arg0),
12575 arg1);
12576 default:
12577 break;
12580 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12581 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12582 && TYPE_UNSIGNED (arg1_type)
12583 /* We will flip the signedness of the comparison operator
12584 associated with the mode of arg1, so the sign bit is
12585 specified by this mode. Check that arg1 is the signed
12586 max associated with this sign bit. */
12587 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12588 /* signed_type does not work on pointer types. */
12589 && INTEGRAL_TYPE_P (arg1_type))
12591 /* The following case also applies to X < signed_max+1
12592 and X >= signed_max+1 because previous transformations. */
12593 if (code == LE_EXPR || code == GT_EXPR)
12595 tree st;
12596 st = signed_type_for (TREE_TYPE (arg1));
12597 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12598 type, fold_convert (st, arg0),
12599 build_int_cst (st, 0));
12605 /* If we are comparing an ABS_EXPR with a constant, we can
12606 convert all the cases into explicit comparisons, but they may
12607 well not be faster than doing the ABS and one comparison.
12608 But ABS (X) <= C is a range comparison, which becomes a subtraction
12609 and a comparison, and is probably faster. */
12610 if (code == LE_EXPR
12611 && TREE_CODE (arg1) == INTEGER_CST
12612 && TREE_CODE (arg0) == ABS_EXPR
12613 && ! TREE_SIDE_EFFECTS (arg0)
12614 && (0 != (tem = negate_expr (arg1)))
12615 && TREE_CODE (tem) == INTEGER_CST
12616 && !TREE_OVERFLOW (tem))
12617 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12618 build2 (GE_EXPR, type,
12619 TREE_OPERAND (arg0, 0), tem),
12620 build2 (LE_EXPR, type,
12621 TREE_OPERAND (arg0, 0), arg1));
12623 /* Convert ABS_EXPR<x> >= 0 to true. */
12624 strict_overflow_p = false;
12625 if (code == GE_EXPR
12626 && (integer_zerop (arg1)
12627 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12628 && real_zerop (arg1)))
12629 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12631 if (strict_overflow_p)
12632 fold_overflow_warning (("assuming signed overflow does not occur "
12633 "when simplifying comparison of "
12634 "absolute value and zero"),
12635 WARN_STRICT_OVERFLOW_CONDITIONAL);
12636 return omit_one_operand (type, integer_one_node, arg0);
12639 /* Convert ABS_EXPR<x> < 0 to false. */
12640 strict_overflow_p = false;
12641 if (code == LT_EXPR
12642 && (integer_zerop (arg1) || real_zerop (arg1))
12643 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12645 if (strict_overflow_p)
12646 fold_overflow_warning (("assuming signed overflow does not occur "
12647 "when simplifying comparison of "
12648 "absolute value and zero"),
12649 WARN_STRICT_OVERFLOW_CONDITIONAL);
12650 return omit_one_operand (type, integer_zero_node, arg0);
12653 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12654 and similarly for >= into !=. */
12655 if ((code == LT_EXPR || code == GE_EXPR)
12656 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12657 && TREE_CODE (arg1) == LSHIFT_EXPR
12658 && integer_onep (TREE_OPERAND (arg1, 0)))
12659 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12660 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12661 TREE_OPERAND (arg1, 1)),
12662 build_int_cst (TREE_TYPE (arg0), 0));
12664 if ((code == LT_EXPR || code == GE_EXPR)
12665 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12666 && CONVERT_EXPR_P (arg1)
12667 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12668 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12669 return
12670 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12671 fold_convert (TREE_TYPE (arg0),
12672 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12673 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12674 1))),
12675 build_int_cst (TREE_TYPE (arg0), 0));
12677 return NULL_TREE;
12679 case UNORDERED_EXPR:
12680 case ORDERED_EXPR:
12681 case UNLT_EXPR:
12682 case UNLE_EXPR:
12683 case UNGT_EXPR:
12684 case UNGE_EXPR:
12685 case UNEQ_EXPR:
12686 case LTGT_EXPR:
12687 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12689 t1 = fold_relational_const (code, type, arg0, arg1);
12690 if (t1 != NULL_TREE)
12691 return t1;
12694 /* If the first operand is NaN, the result is constant. */
12695 if (TREE_CODE (arg0) == REAL_CST
12696 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12697 && (code != LTGT_EXPR || ! flag_trapping_math))
12699 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12700 ? integer_zero_node
12701 : integer_one_node;
12702 return omit_one_operand (type, t1, arg1);
12705 /* If the second operand is NaN, the result is constant. */
12706 if (TREE_CODE (arg1) == REAL_CST
12707 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12708 && (code != LTGT_EXPR || ! flag_trapping_math))
12710 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12711 ? integer_zero_node
12712 : integer_one_node;
12713 return omit_one_operand (type, t1, arg0);
12716 /* Simplify unordered comparison of something with itself. */
12717 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12718 && operand_equal_p (arg0, arg1, 0))
12719 return constant_boolean_node (1, type);
12721 if (code == LTGT_EXPR
12722 && !flag_trapping_math
12723 && operand_equal_p (arg0, arg1, 0))
12724 return constant_boolean_node (0, type);
12726 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12728 tree targ0 = strip_float_extensions (arg0);
12729 tree targ1 = strip_float_extensions (arg1);
12730 tree newtype = TREE_TYPE (targ0);
12732 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12733 newtype = TREE_TYPE (targ1);
12735 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12736 return fold_build2 (code, type, fold_convert (newtype, targ0),
12737 fold_convert (newtype, targ1));
12740 return NULL_TREE;
12742 case COMPOUND_EXPR:
12743 /* When pedantic, a compound expression can be neither an lvalue
12744 nor an integer constant expression. */
12745 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12746 return NULL_TREE;
12747 /* Don't let (0, 0) be null pointer constant. */
12748 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12749 : fold_convert (type, arg1);
12750 return pedantic_non_lvalue (tem);
12752 case COMPLEX_EXPR:
12753 if ((TREE_CODE (arg0) == REAL_CST
12754 && TREE_CODE (arg1) == REAL_CST)
12755 || (TREE_CODE (arg0) == INTEGER_CST
12756 && TREE_CODE (arg1) == INTEGER_CST))
12757 return build_complex (type, arg0, arg1);
12758 return NULL_TREE;
12760 case ASSERT_EXPR:
12761 /* An ASSERT_EXPR should never be passed to fold_binary. */
12762 gcc_unreachable ();
12764 default:
12765 return NULL_TREE;
12766 } /* switch (code) */
12769 /* Callback for walk_tree, looking for LABEL_EXPR.
12770 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12771 Do not check the sub-tree of GOTO_EXPR. */
12773 static tree
12774 contains_label_1 (tree *tp,
12775 int *walk_subtrees,
12776 void *data ATTRIBUTE_UNUSED)
12778 switch (TREE_CODE (*tp))
12780 case LABEL_EXPR:
12781 return *tp;
12782 case GOTO_EXPR:
12783 *walk_subtrees = 0;
12784 /* no break */
12785 default:
12786 return NULL_TREE;
12790 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12791 accessible from outside the sub-tree. Returns NULL_TREE if no
12792 addressable label is found. */
12794 static bool
12795 contains_label_p (tree st)
12797 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12800 /* Fold a ternary expression of code CODE and type TYPE with operands
12801 OP0, OP1, and OP2. Return the folded expression if folding is
12802 successful. Otherwise, return NULL_TREE. */
12804 tree
12805 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12807 tree tem;
12808 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12809 enum tree_code_class kind = TREE_CODE_CLASS (code);
12811 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12812 && TREE_CODE_LENGTH (code) == 3);
12814 /* Strip any conversions that don't change the mode. This is safe
12815 for every expression, except for a comparison expression because
12816 its signedness is derived from its operands. So, in the latter
12817 case, only strip conversions that don't change the signedness.
12819 Note that this is done as an internal manipulation within the
12820 constant folder, in order to find the simplest representation of
12821 the arguments so that their form can be studied. In any cases,
12822 the appropriate type conversions should be put back in the tree
12823 that will get out of the constant folder. */
12824 if (op0)
12826 arg0 = op0;
12827 STRIP_NOPS (arg0);
12830 if (op1)
12832 arg1 = op1;
12833 STRIP_NOPS (arg1);
12836 switch (code)
12838 case COMPONENT_REF:
12839 if (TREE_CODE (arg0) == CONSTRUCTOR
12840 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12842 unsigned HOST_WIDE_INT idx;
12843 tree field, value;
12844 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12845 if (field == arg1)
12846 return value;
12848 return NULL_TREE;
12850 case COND_EXPR:
12851 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12852 so all simple results must be passed through pedantic_non_lvalue. */
12853 if (TREE_CODE (arg0) == INTEGER_CST)
12855 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12856 tem = integer_zerop (arg0) ? op2 : op1;
12857 /* Only optimize constant conditions when the selected branch
12858 has the same type as the COND_EXPR. This avoids optimizing
12859 away "c ? x : throw", where the throw has a void type.
12860 Avoid throwing away that operand which contains label. */
12861 if ((!TREE_SIDE_EFFECTS (unused_op)
12862 || !contains_label_p (unused_op))
12863 && (! VOID_TYPE_P (TREE_TYPE (tem))
12864 || VOID_TYPE_P (type)))
12865 return pedantic_non_lvalue (tem);
12866 return NULL_TREE;
12868 if (operand_equal_p (arg1, op2, 0))
12869 return pedantic_omit_one_operand (type, arg1, arg0);
12871 /* If we have A op B ? A : C, we may be able to convert this to a
12872 simpler expression, depending on the operation and the values
12873 of B and C. Signed zeros prevent all of these transformations,
12874 for reasons given above each one.
12876 Also try swapping the arguments and inverting the conditional. */
12877 if (COMPARISON_CLASS_P (arg0)
12878 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12879 arg1, TREE_OPERAND (arg0, 1))
12880 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12882 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12883 if (tem)
12884 return tem;
12887 if (COMPARISON_CLASS_P (arg0)
12888 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12889 op2,
12890 TREE_OPERAND (arg0, 1))
12891 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12893 tem = fold_truth_not_expr (arg0);
12894 if (tem && COMPARISON_CLASS_P (tem))
12896 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12897 if (tem)
12898 return tem;
12902 /* If the second operand is simpler than the third, swap them
12903 since that produces better jump optimization results. */
12904 if (truth_value_p (TREE_CODE (arg0))
12905 && tree_swap_operands_p (op1, op2, false))
12907 /* See if this can be inverted. If it can't, possibly because
12908 it was a floating-point inequality comparison, don't do
12909 anything. */
12910 tem = fold_truth_not_expr (arg0);
12911 if (tem)
12912 return fold_build3 (code, type, tem, op2, op1);
12915 /* Convert A ? 1 : 0 to simply A. */
12916 if (integer_onep (op1)
12917 && integer_zerop (op2)
12918 /* If we try to convert OP0 to our type, the
12919 call to fold will try to move the conversion inside
12920 a COND, which will recurse. In that case, the COND_EXPR
12921 is probably the best choice, so leave it alone. */
12922 && type == TREE_TYPE (arg0))
12923 return pedantic_non_lvalue (arg0);
12925 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12926 over COND_EXPR in cases such as floating point comparisons. */
12927 if (integer_zerop (op1)
12928 && integer_onep (op2)
12929 && truth_value_p (TREE_CODE (arg0)))
12930 return pedantic_non_lvalue (fold_convert (type,
12931 invert_truthvalue (arg0)));
12933 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12934 if (TREE_CODE (arg0) == LT_EXPR
12935 && integer_zerop (TREE_OPERAND (arg0, 1))
12936 && integer_zerop (op2)
12937 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12939 /* sign_bit_p only checks ARG1 bits within A's precision.
12940 If <sign bit of A> has wider type than A, bits outside
12941 of A's precision in <sign bit of A> need to be checked.
12942 If they are all 0, this optimization needs to be done
12943 in unsigned A's type, if they are all 1 in signed A's type,
12944 otherwise this can't be done. */
12945 if (TYPE_PRECISION (TREE_TYPE (tem))
12946 < TYPE_PRECISION (TREE_TYPE (arg1))
12947 && TYPE_PRECISION (TREE_TYPE (tem))
12948 < TYPE_PRECISION (type))
12950 unsigned HOST_WIDE_INT mask_lo;
12951 HOST_WIDE_INT mask_hi;
12952 int inner_width, outer_width;
12953 tree tem_type;
12955 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12956 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12957 if (outer_width > TYPE_PRECISION (type))
12958 outer_width = TYPE_PRECISION (type);
12960 if (outer_width > HOST_BITS_PER_WIDE_INT)
12962 mask_hi = ((unsigned HOST_WIDE_INT) -1
12963 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12964 mask_lo = -1;
12966 else
12968 mask_hi = 0;
12969 mask_lo = ((unsigned HOST_WIDE_INT) -1
12970 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12972 if (inner_width > HOST_BITS_PER_WIDE_INT)
12974 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12975 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12976 mask_lo = 0;
12978 else
12979 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12980 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12982 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12983 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12985 tem_type = signed_type_for (TREE_TYPE (tem));
12986 tem = fold_convert (tem_type, tem);
12988 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12989 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12991 tem_type = unsigned_type_for (TREE_TYPE (tem));
12992 tem = fold_convert (tem_type, tem);
12994 else
12995 tem = NULL;
12998 if (tem)
12999 return fold_convert (type,
13000 fold_build2 (BIT_AND_EXPR,
13001 TREE_TYPE (tem), tem,
13002 fold_convert (TREE_TYPE (tem),
13003 arg1)));
13006 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13007 already handled above. */
13008 if (TREE_CODE (arg0) == BIT_AND_EXPR
13009 && integer_onep (TREE_OPERAND (arg0, 1))
13010 && integer_zerop (op2)
13011 && integer_pow2p (arg1))
13013 tree tem = TREE_OPERAND (arg0, 0);
13014 STRIP_NOPS (tem);
13015 if (TREE_CODE (tem) == RSHIFT_EXPR
13016 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13017 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13018 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13019 return fold_build2 (BIT_AND_EXPR, type,
13020 TREE_OPERAND (tem, 0), arg1);
13023 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13024 is probably obsolete because the first operand should be a
13025 truth value (that's why we have the two cases above), but let's
13026 leave it in until we can confirm this for all front-ends. */
13027 if (integer_zerop (op2)
13028 && TREE_CODE (arg0) == NE_EXPR
13029 && integer_zerop (TREE_OPERAND (arg0, 1))
13030 && integer_pow2p (arg1)
13031 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13032 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13033 arg1, OEP_ONLY_CONST))
13034 return pedantic_non_lvalue (fold_convert (type,
13035 TREE_OPERAND (arg0, 0)));
13037 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13038 if (integer_zerop (op2)
13039 && truth_value_p (TREE_CODE (arg0))
13040 && truth_value_p (TREE_CODE (arg1)))
13041 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13042 fold_convert (type, arg0),
13043 arg1);
13045 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13046 if (integer_onep (op2)
13047 && truth_value_p (TREE_CODE (arg0))
13048 && truth_value_p (TREE_CODE (arg1)))
13050 /* Only perform transformation if ARG0 is easily inverted. */
13051 tem = fold_truth_not_expr (arg0);
13052 if (tem)
13053 return fold_build2 (TRUTH_ORIF_EXPR, type,
13054 fold_convert (type, tem),
13055 arg1);
13058 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13059 if (integer_zerop (arg1)
13060 && truth_value_p (TREE_CODE (arg0))
13061 && truth_value_p (TREE_CODE (op2)))
13063 /* Only perform transformation if ARG0 is easily inverted. */
13064 tem = fold_truth_not_expr (arg0);
13065 if (tem)
13066 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13067 fold_convert (type, tem),
13068 op2);
13071 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13072 if (integer_onep (arg1)
13073 && truth_value_p (TREE_CODE (arg0))
13074 && truth_value_p (TREE_CODE (op2)))
13075 return fold_build2 (TRUTH_ORIF_EXPR, type,
13076 fold_convert (type, arg0),
13077 op2);
13079 return NULL_TREE;
13081 case CALL_EXPR:
13082 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13083 of fold_ternary on them. */
13084 gcc_unreachable ();
13086 case BIT_FIELD_REF:
13087 if ((TREE_CODE (arg0) == VECTOR_CST
13088 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13089 && type == TREE_TYPE (TREE_TYPE (arg0)))
13091 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13092 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13094 if (width != 0
13095 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13096 && (idx % width) == 0
13097 && (idx = idx / width)
13098 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13100 tree elements = NULL_TREE;
13102 if (TREE_CODE (arg0) == VECTOR_CST)
13103 elements = TREE_VECTOR_CST_ELTS (arg0);
13104 else
13106 unsigned HOST_WIDE_INT idx;
13107 tree value;
13109 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13110 elements = tree_cons (NULL_TREE, value, elements);
13112 while (idx-- > 0 && elements)
13113 elements = TREE_CHAIN (elements);
13114 if (elements)
13115 return TREE_VALUE (elements);
13116 else
13117 return fold_convert (type, integer_zero_node);
13121 /* A bit-field-ref that referenced the full argument can be stripped. */
13122 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13123 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13124 && integer_zerop (op2))
13125 return fold_convert (type, arg0);
13127 return NULL_TREE;
13129 default:
13130 return NULL_TREE;
13131 } /* switch (code) */
13134 /* Perform constant folding and related simplification of EXPR.
13135 The related simplifications include x*1 => x, x*0 => 0, etc.,
13136 and application of the associative law.
13137 NOP_EXPR conversions may be removed freely (as long as we
13138 are careful not to change the type of the overall expression).
13139 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13140 but we can constant-fold them if they have constant operands. */
13142 #ifdef ENABLE_FOLD_CHECKING
13143 # define fold(x) fold_1 (x)
13144 static tree fold_1 (tree);
13145 static
13146 #endif
13147 tree
13148 fold (tree expr)
13150 const tree t = expr;
13151 enum tree_code code = TREE_CODE (t);
13152 enum tree_code_class kind = TREE_CODE_CLASS (code);
13153 tree tem;
13155 /* Return right away if a constant. */
13156 if (kind == tcc_constant)
13157 return t;
13159 /* CALL_EXPR-like objects with variable numbers of operands are
13160 treated specially. */
13161 if (kind == tcc_vl_exp)
13163 if (code == CALL_EXPR)
13165 tem = fold_call_expr (expr, false);
13166 return tem ? tem : expr;
13168 return expr;
13171 if (IS_EXPR_CODE_CLASS (kind)
13172 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13174 tree type = TREE_TYPE (t);
13175 tree op0, op1, op2;
13177 switch (TREE_CODE_LENGTH (code))
13179 case 1:
13180 op0 = TREE_OPERAND (t, 0);
13181 tem = fold_unary (code, type, op0);
13182 return tem ? tem : expr;
13183 case 2:
13184 op0 = TREE_OPERAND (t, 0);
13185 op1 = TREE_OPERAND (t, 1);
13186 tem = fold_binary (code, type, op0, op1);
13187 return tem ? tem : expr;
13188 case 3:
13189 op0 = TREE_OPERAND (t, 0);
13190 op1 = TREE_OPERAND (t, 1);
13191 op2 = TREE_OPERAND (t, 2);
13192 tem = fold_ternary (code, type, op0, op1, op2);
13193 return tem ? tem : expr;
13194 default:
13195 break;
13199 switch (code)
13201 case ARRAY_REF:
13203 tree op0 = TREE_OPERAND (t, 0);
13204 tree op1 = TREE_OPERAND (t, 1);
13206 if (TREE_CODE (op1) == INTEGER_CST
13207 && TREE_CODE (op0) == CONSTRUCTOR
13208 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13210 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13211 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13212 unsigned HOST_WIDE_INT begin = 0;
13214 /* Find a matching index by means of a binary search. */
13215 while (begin != end)
13217 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13218 tree index = VEC_index (constructor_elt, elts, middle)->index;
13220 if (TREE_CODE (index) == INTEGER_CST
13221 && tree_int_cst_lt (index, op1))
13222 begin = middle + 1;
13223 else if (TREE_CODE (index) == INTEGER_CST
13224 && tree_int_cst_lt (op1, index))
13225 end = middle;
13226 else if (TREE_CODE (index) == RANGE_EXPR
13227 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13228 begin = middle + 1;
13229 else if (TREE_CODE (index) == RANGE_EXPR
13230 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13231 end = middle;
13232 else
13233 return VEC_index (constructor_elt, elts, middle)->value;
13237 return t;
13240 case CONST_DECL:
13241 return fold (DECL_INITIAL (t));
13243 default:
13244 return t;
13245 } /* switch (code) */
13248 #ifdef ENABLE_FOLD_CHECKING
13249 #undef fold
13251 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13252 static void fold_check_failed (const_tree, const_tree);
13253 void print_fold_checksum (const_tree);
13255 /* When --enable-checking=fold, compute a digest of expr before
13256 and after actual fold call to see if fold did not accidentally
13257 change original expr. */
13259 tree
13260 fold (tree expr)
13262 tree ret;
13263 struct md5_ctx ctx;
13264 unsigned char checksum_before[16], checksum_after[16];
13265 htab_t ht;
13267 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13268 md5_init_ctx (&ctx);
13269 fold_checksum_tree (expr, &ctx, ht);
13270 md5_finish_ctx (&ctx, checksum_before);
13271 htab_empty (ht);
13273 ret = fold_1 (expr);
13275 md5_init_ctx (&ctx);
13276 fold_checksum_tree (expr, &ctx, ht);
13277 md5_finish_ctx (&ctx, checksum_after);
13278 htab_delete (ht);
13280 if (memcmp (checksum_before, checksum_after, 16))
13281 fold_check_failed (expr, ret);
13283 return ret;
13286 void
13287 print_fold_checksum (const_tree expr)
13289 struct md5_ctx ctx;
13290 unsigned char checksum[16], cnt;
13291 htab_t ht;
13293 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13294 md5_init_ctx (&ctx);
13295 fold_checksum_tree (expr, &ctx, ht);
13296 md5_finish_ctx (&ctx, checksum);
13297 htab_delete (ht);
13298 for (cnt = 0; cnt < 16; ++cnt)
13299 fprintf (stderr, "%02x", checksum[cnt]);
13300 putc ('\n', stderr);
13303 static void
13304 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13306 internal_error ("fold check: original tree changed by fold");
13309 static void
13310 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13312 const void **slot;
13313 enum tree_code code;
13314 struct tree_function_decl buf;
13315 int i, len;
13317 recursive_label:
13319 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13320 <= sizeof (struct tree_function_decl))
13321 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13322 if (expr == NULL)
13323 return;
13324 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13325 if (*slot != NULL)
13326 return;
13327 *slot = expr;
13328 code = TREE_CODE (expr);
13329 if (TREE_CODE_CLASS (code) == tcc_declaration
13330 && DECL_ASSEMBLER_NAME_SET_P (expr))
13332 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13333 memcpy ((char *) &buf, expr, tree_size (expr));
13334 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13335 expr = (tree) &buf;
13337 else if (TREE_CODE_CLASS (code) == tcc_type
13338 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13339 || TYPE_CACHED_VALUES_P (expr)
13340 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13342 /* Allow these fields to be modified. */
13343 tree tmp;
13344 memcpy ((char *) &buf, expr, tree_size (expr));
13345 expr = tmp = (tree) &buf;
13346 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13347 TYPE_POINTER_TO (tmp) = NULL;
13348 TYPE_REFERENCE_TO (tmp) = NULL;
13349 if (TYPE_CACHED_VALUES_P (tmp))
13351 TYPE_CACHED_VALUES_P (tmp) = 0;
13352 TYPE_CACHED_VALUES (tmp) = NULL;
13355 md5_process_bytes (expr, tree_size (expr), ctx);
13356 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13357 if (TREE_CODE_CLASS (code) != tcc_type
13358 && TREE_CODE_CLASS (code) != tcc_declaration
13359 && code != TREE_LIST
13360 && code != SSA_NAME)
13361 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13362 switch (TREE_CODE_CLASS (code))
13364 case tcc_constant:
13365 switch (code)
13367 case STRING_CST:
13368 md5_process_bytes (TREE_STRING_POINTER (expr),
13369 TREE_STRING_LENGTH (expr), ctx);
13370 break;
13371 case COMPLEX_CST:
13372 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13373 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13374 break;
13375 case VECTOR_CST:
13376 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13377 break;
13378 default:
13379 break;
13381 break;
13382 case tcc_exceptional:
13383 switch (code)
13385 case TREE_LIST:
13386 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13387 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13388 expr = TREE_CHAIN (expr);
13389 goto recursive_label;
13390 break;
13391 case TREE_VEC:
13392 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13393 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13394 break;
13395 default:
13396 break;
13398 break;
13399 case tcc_expression:
13400 case tcc_reference:
13401 case tcc_comparison:
13402 case tcc_unary:
13403 case tcc_binary:
13404 case tcc_statement:
13405 case tcc_vl_exp:
13406 len = TREE_OPERAND_LENGTH (expr);
13407 for (i = 0; i < len; ++i)
13408 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13409 break;
13410 case tcc_declaration:
13411 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13412 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13413 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13415 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13416 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13417 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13418 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13419 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13421 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13422 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13424 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13426 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13427 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13428 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13430 break;
13431 case tcc_type:
13432 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13433 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13434 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13435 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13436 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13437 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13438 if (INTEGRAL_TYPE_P (expr)
13439 || SCALAR_FLOAT_TYPE_P (expr))
13441 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13442 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13444 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13445 if (TREE_CODE (expr) == RECORD_TYPE
13446 || TREE_CODE (expr) == UNION_TYPE
13447 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13448 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13449 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13450 break;
13451 default:
13452 break;
13456 /* Helper function for outputting the checksum of a tree T. When
13457 debugging with gdb, you can "define mynext" to be "next" followed
13458 by "call debug_fold_checksum (op0)", then just trace down till the
13459 outputs differ. */
13461 void
13462 debug_fold_checksum (const_tree t)
13464 int i;
13465 unsigned char checksum[16];
13466 struct md5_ctx ctx;
13467 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13469 md5_init_ctx (&ctx);
13470 fold_checksum_tree (t, &ctx, ht);
13471 md5_finish_ctx (&ctx, checksum);
13472 htab_empty (ht);
13474 for (i = 0; i < 16; i++)
13475 fprintf (stderr, "%d ", checksum[i]);
13477 fprintf (stderr, "\n");
13480 #endif
13482 /* Fold a unary tree expression with code CODE of type TYPE with an
13483 operand OP0. Return a folded expression if successful. Otherwise,
13484 return a tree expression with code CODE of type TYPE with an
13485 operand OP0. */
13487 tree
13488 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13490 tree tem;
13491 #ifdef ENABLE_FOLD_CHECKING
13492 unsigned char checksum_before[16], checksum_after[16];
13493 struct md5_ctx ctx;
13494 htab_t ht;
13496 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13497 md5_init_ctx (&ctx);
13498 fold_checksum_tree (op0, &ctx, ht);
13499 md5_finish_ctx (&ctx, checksum_before);
13500 htab_empty (ht);
13501 #endif
13503 tem = fold_unary (code, type, op0);
13504 if (!tem)
13505 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13507 #ifdef ENABLE_FOLD_CHECKING
13508 md5_init_ctx (&ctx);
13509 fold_checksum_tree (op0, &ctx, ht);
13510 md5_finish_ctx (&ctx, checksum_after);
13511 htab_delete (ht);
13513 if (memcmp (checksum_before, checksum_after, 16))
13514 fold_check_failed (op0, tem);
13515 #endif
13516 return tem;
13519 /* Fold a binary tree expression with code CODE of type TYPE with
13520 operands OP0 and OP1. Return a folded expression if successful.
13521 Otherwise, return a tree expression with code CODE of type TYPE
13522 with operands OP0 and OP1. */
13524 tree
13525 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13526 MEM_STAT_DECL)
13528 tree tem;
13529 #ifdef ENABLE_FOLD_CHECKING
13530 unsigned char checksum_before_op0[16],
13531 checksum_before_op1[16],
13532 checksum_after_op0[16],
13533 checksum_after_op1[16];
13534 struct md5_ctx ctx;
13535 htab_t ht;
13537 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13538 md5_init_ctx (&ctx);
13539 fold_checksum_tree (op0, &ctx, ht);
13540 md5_finish_ctx (&ctx, checksum_before_op0);
13541 htab_empty (ht);
13543 md5_init_ctx (&ctx);
13544 fold_checksum_tree (op1, &ctx, ht);
13545 md5_finish_ctx (&ctx, checksum_before_op1);
13546 htab_empty (ht);
13547 #endif
13549 tem = fold_binary (code, type, op0, op1);
13550 if (!tem)
13551 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13553 #ifdef ENABLE_FOLD_CHECKING
13554 md5_init_ctx (&ctx);
13555 fold_checksum_tree (op0, &ctx, ht);
13556 md5_finish_ctx (&ctx, checksum_after_op0);
13557 htab_empty (ht);
13559 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13560 fold_check_failed (op0, tem);
13562 md5_init_ctx (&ctx);
13563 fold_checksum_tree (op1, &ctx, ht);
13564 md5_finish_ctx (&ctx, checksum_after_op1);
13565 htab_delete (ht);
13567 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13568 fold_check_failed (op1, tem);
13569 #endif
13570 return tem;
13573 /* Fold a ternary tree expression with code CODE of type TYPE with
13574 operands OP0, OP1, and OP2. Return a folded expression if
13575 successful. Otherwise, return a tree expression with code CODE of
13576 type TYPE with operands OP0, OP1, and OP2. */
13578 tree
13579 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13580 MEM_STAT_DECL)
13582 tree tem;
13583 #ifdef ENABLE_FOLD_CHECKING
13584 unsigned char checksum_before_op0[16],
13585 checksum_before_op1[16],
13586 checksum_before_op2[16],
13587 checksum_after_op0[16],
13588 checksum_after_op1[16],
13589 checksum_after_op2[16];
13590 struct md5_ctx ctx;
13591 htab_t ht;
13593 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13594 md5_init_ctx (&ctx);
13595 fold_checksum_tree (op0, &ctx, ht);
13596 md5_finish_ctx (&ctx, checksum_before_op0);
13597 htab_empty (ht);
13599 md5_init_ctx (&ctx);
13600 fold_checksum_tree (op1, &ctx, ht);
13601 md5_finish_ctx (&ctx, checksum_before_op1);
13602 htab_empty (ht);
13604 md5_init_ctx (&ctx);
13605 fold_checksum_tree (op2, &ctx, ht);
13606 md5_finish_ctx (&ctx, checksum_before_op2);
13607 htab_empty (ht);
13608 #endif
13610 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13611 tem = fold_ternary (code, type, op0, op1, op2);
13612 if (!tem)
13613 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13615 #ifdef ENABLE_FOLD_CHECKING
13616 md5_init_ctx (&ctx);
13617 fold_checksum_tree (op0, &ctx, ht);
13618 md5_finish_ctx (&ctx, checksum_after_op0);
13619 htab_empty (ht);
13621 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13622 fold_check_failed (op0, tem);
13624 md5_init_ctx (&ctx);
13625 fold_checksum_tree (op1, &ctx, ht);
13626 md5_finish_ctx (&ctx, checksum_after_op1);
13627 htab_empty (ht);
13629 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13630 fold_check_failed (op1, tem);
13632 md5_init_ctx (&ctx);
13633 fold_checksum_tree (op2, &ctx, ht);
13634 md5_finish_ctx (&ctx, checksum_after_op2);
13635 htab_delete (ht);
13637 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13638 fold_check_failed (op2, tem);
13639 #endif
13640 return tem;
13643 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13644 arguments in ARGARRAY, and a null static chain.
13645 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13646 of type TYPE from the given operands as constructed by build_call_array. */
13648 tree
13649 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13651 tree tem;
13652 #ifdef ENABLE_FOLD_CHECKING
13653 unsigned char checksum_before_fn[16],
13654 checksum_before_arglist[16],
13655 checksum_after_fn[16],
13656 checksum_after_arglist[16];
13657 struct md5_ctx ctx;
13658 htab_t ht;
13659 int i;
13661 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13662 md5_init_ctx (&ctx);
13663 fold_checksum_tree (fn, &ctx, ht);
13664 md5_finish_ctx (&ctx, checksum_before_fn);
13665 htab_empty (ht);
13667 md5_init_ctx (&ctx);
13668 for (i = 0; i < nargs; i++)
13669 fold_checksum_tree (argarray[i], &ctx, ht);
13670 md5_finish_ctx (&ctx, checksum_before_arglist);
13671 htab_empty (ht);
13672 #endif
13674 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13676 #ifdef ENABLE_FOLD_CHECKING
13677 md5_init_ctx (&ctx);
13678 fold_checksum_tree (fn, &ctx, ht);
13679 md5_finish_ctx (&ctx, checksum_after_fn);
13680 htab_empty (ht);
13682 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13683 fold_check_failed (fn, tem);
13685 md5_init_ctx (&ctx);
13686 for (i = 0; i < nargs; i++)
13687 fold_checksum_tree (argarray[i], &ctx, ht);
13688 md5_finish_ctx (&ctx, checksum_after_arglist);
13689 htab_delete (ht);
13691 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13692 fold_check_failed (NULL_TREE, tem);
13693 #endif
13694 return tem;
13697 /* Perform constant folding and related simplification of initializer
13698 expression EXPR. These behave identically to "fold_buildN" but ignore
13699 potential run-time traps and exceptions that fold must preserve. */
13701 #define START_FOLD_INIT \
13702 int saved_signaling_nans = flag_signaling_nans;\
13703 int saved_trapping_math = flag_trapping_math;\
13704 int saved_rounding_math = flag_rounding_math;\
13705 int saved_trapv = flag_trapv;\
13706 int saved_folding_initializer = folding_initializer;\
13707 flag_signaling_nans = 0;\
13708 flag_trapping_math = 0;\
13709 flag_rounding_math = 0;\
13710 flag_trapv = 0;\
13711 folding_initializer = 1;
13713 #define END_FOLD_INIT \
13714 flag_signaling_nans = saved_signaling_nans;\
13715 flag_trapping_math = saved_trapping_math;\
13716 flag_rounding_math = saved_rounding_math;\
13717 flag_trapv = saved_trapv;\
13718 folding_initializer = saved_folding_initializer;
13720 tree
13721 fold_build1_initializer (enum tree_code code, tree type, tree op)
13723 tree result;
13724 START_FOLD_INIT;
13726 result = fold_build1 (code, type, op);
13728 END_FOLD_INIT;
13729 return result;
13732 tree
13733 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13735 tree result;
13736 START_FOLD_INIT;
13738 result = fold_build2 (code, type, op0, op1);
13740 END_FOLD_INIT;
13741 return result;
13744 tree
13745 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13746 tree op2)
13748 tree result;
13749 START_FOLD_INIT;
13751 result = fold_build3 (code, type, op0, op1, op2);
13753 END_FOLD_INIT;
13754 return result;
13757 tree
13758 fold_build_call_array_initializer (tree type, tree fn,
13759 int nargs, tree *argarray)
13761 tree result;
13762 START_FOLD_INIT;
13764 result = fold_build_call_array (type, fn, nargs, argarray);
13766 END_FOLD_INIT;
13767 return result;
13770 #undef START_FOLD_INIT
13771 #undef END_FOLD_INIT
13773 /* Determine if first argument is a multiple of second argument. Return 0 if
13774 it is not, or we cannot easily determined it to be.
13776 An example of the sort of thing we care about (at this point; this routine
13777 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13778 fold cases do now) is discovering that
13780 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13782 is a multiple of
13784 SAVE_EXPR (J * 8)
13786 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13788 This code also handles discovering that
13790 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13792 is a multiple of 8 so we don't have to worry about dealing with a
13793 possible remainder.
13795 Note that we *look* inside a SAVE_EXPR only to determine how it was
13796 calculated; it is not safe for fold to do much of anything else with the
13797 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13798 at run time. For example, the latter example above *cannot* be implemented
13799 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13800 evaluation time of the original SAVE_EXPR is not necessarily the same at
13801 the time the new expression is evaluated. The only optimization of this
13802 sort that would be valid is changing
13804 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13806 divided by 8 to
13808 SAVE_EXPR (I) * SAVE_EXPR (J)
13810 (where the same SAVE_EXPR (J) is used in the original and the
13811 transformed version). */
13814 multiple_of_p (tree type, const_tree top, const_tree bottom)
13816 if (operand_equal_p (top, bottom, 0))
13817 return 1;
13819 if (TREE_CODE (type) != INTEGER_TYPE)
13820 return 0;
13822 switch (TREE_CODE (top))
13824 case BIT_AND_EXPR:
13825 /* Bitwise and provides a power of two multiple. If the mask is
13826 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13827 if (!integer_pow2p (bottom))
13828 return 0;
13829 /* FALLTHRU */
13831 case MULT_EXPR:
13832 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13833 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13835 case PLUS_EXPR:
13836 case MINUS_EXPR:
13837 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13838 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13840 case LSHIFT_EXPR:
13841 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13843 tree op1, t1;
13845 op1 = TREE_OPERAND (top, 1);
13846 /* const_binop may not detect overflow correctly,
13847 so check for it explicitly here. */
13848 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13849 > TREE_INT_CST_LOW (op1)
13850 && TREE_INT_CST_HIGH (op1) == 0
13851 && 0 != (t1 = fold_convert (type,
13852 const_binop (LSHIFT_EXPR,
13853 size_one_node,
13854 op1, 0)))
13855 && !TREE_OVERFLOW (t1))
13856 return multiple_of_p (type, t1, bottom);
13858 return 0;
13860 case NOP_EXPR:
13861 /* Can't handle conversions from non-integral or wider integral type. */
13862 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13863 || (TYPE_PRECISION (type)
13864 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13865 return 0;
13867 /* .. fall through ... */
13869 case SAVE_EXPR:
13870 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13872 case INTEGER_CST:
13873 if (TREE_CODE (bottom) != INTEGER_CST
13874 || integer_zerop (bottom)
13875 || (TYPE_UNSIGNED (type)
13876 && (tree_int_cst_sgn (top) < 0
13877 || tree_int_cst_sgn (bottom) < 0)))
13878 return 0;
13879 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13880 top, bottom, 0));
13882 default:
13883 return 0;
13887 /* Return true if CODE or TYPE is known to be non-negative. */
13889 static bool
13890 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13892 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13893 && truth_value_p (code))
13894 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13895 have a signed:1 type (where the value is -1 and 0). */
13896 return true;
13897 return false;
13900 /* Return true if (CODE OP0) is known to be non-negative. If the return
13901 value is based on the assumption that signed overflow is undefined,
13902 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13903 *STRICT_OVERFLOW_P. */
13905 bool
13906 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13907 bool *strict_overflow_p)
13909 if (TYPE_UNSIGNED (type))
13910 return true;
13912 switch (code)
13914 case ABS_EXPR:
13915 /* We can't return 1 if flag_wrapv is set because
13916 ABS_EXPR<INT_MIN> = INT_MIN. */
13917 if (!INTEGRAL_TYPE_P (type))
13918 return true;
13919 if (TYPE_OVERFLOW_UNDEFINED (type))
13921 *strict_overflow_p = true;
13922 return true;
13924 break;
13926 case NON_LVALUE_EXPR:
13927 case FLOAT_EXPR:
13928 case FIX_TRUNC_EXPR:
13929 return tree_expr_nonnegative_warnv_p (op0,
13930 strict_overflow_p);
13932 case NOP_EXPR:
13934 tree inner_type = TREE_TYPE (op0);
13935 tree outer_type = type;
13937 if (TREE_CODE (outer_type) == REAL_TYPE)
13939 if (TREE_CODE (inner_type) == REAL_TYPE)
13940 return tree_expr_nonnegative_warnv_p (op0,
13941 strict_overflow_p);
13942 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13944 if (TYPE_UNSIGNED (inner_type))
13945 return true;
13946 return tree_expr_nonnegative_warnv_p (op0,
13947 strict_overflow_p);
13950 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13952 if (TREE_CODE (inner_type) == REAL_TYPE)
13953 return tree_expr_nonnegative_warnv_p (op0,
13954 strict_overflow_p);
13955 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13956 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13957 && TYPE_UNSIGNED (inner_type);
13960 break;
13962 default:
13963 return tree_simple_nonnegative_warnv_p (code, type);
13966 /* We don't know sign of `t', so be conservative and return false. */
13967 return false;
13970 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13971 value is based on the assumption that signed overflow is undefined,
13972 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13973 *STRICT_OVERFLOW_P. */
13975 bool
13976 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13977 tree op1, bool *strict_overflow_p)
13979 if (TYPE_UNSIGNED (type))
13980 return true;
13982 switch (code)
13984 case POINTER_PLUS_EXPR:
13985 case PLUS_EXPR:
13986 if (FLOAT_TYPE_P (type))
13987 return (tree_expr_nonnegative_warnv_p (op0,
13988 strict_overflow_p)
13989 && tree_expr_nonnegative_warnv_p (op1,
13990 strict_overflow_p));
13992 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13993 both unsigned and at least 2 bits shorter than the result. */
13994 if (TREE_CODE (type) == INTEGER_TYPE
13995 && TREE_CODE (op0) == NOP_EXPR
13996 && TREE_CODE (op1) == NOP_EXPR)
13998 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13999 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14000 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14001 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14003 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14004 TYPE_PRECISION (inner2)) + 1;
14005 return prec < TYPE_PRECISION (type);
14008 break;
14010 case MULT_EXPR:
14011 if (FLOAT_TYPE_P (type))
14013 /* x * x for floating point x is always non-negative. */
14014 if (operand_equal_p (op0, op1, 0))
14015 return true;
14016 return (tree_expr_nonnegative_warnv_p (op0,
14017 strict_overflow_p)
14018 && tree_expr_nonnegative_warnv_p (op1,
14019 strict_overflow_p));
14022 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14023 both unsigned and their total bits is shorter than the result. */
14024 if (TREE_CODE (type) == INTEGER_TYPE
14025 && TREE_CODE (op0) == NOP_EXPR
14026 && TREE_CODE (op1) == NOP_EXPR)
14028 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14029 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14030 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14031 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14032 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14033 < TYPE_PRECISION (type);
14035 return false;
14037 case BIT_AND_EXPR:
14038 case MAX_EXPR:
14039 return (tree_expr_nonnegative_warnv_p (op0,
14040 strict_overflow_p)
14041 || tree_expr_nonnegative_warnv_p (op1,
14042 strict_overflow_p));
14044 case BIT_IOR_EXPR:
14045 case BIT_XOR_EXPR:
14046 case MIN_EXPR:
14047 case RDIV_EXPR:
14048 case TRUNC_DIV_EXPR:
14049 case CEIL_DIV_EXPR:
14050 case FLOOR_DIV_EXPR:
14051 case ROUND_DIV_EXPR:
14052 return (tree_expr_nonnegative_warnv_p (op0,
14053 strict_overflow_p)
14054 && tree_expr_nonnegative_warnv_p (op1,
14055 strict_overflow_p));
14057 case TRUNC_MOD_EXPR:
14058 case CEIL_MOD_EXPR:
14059 case FLOOR_MOD_EXPR:
14060 case ROUND_MOD_EXPR:
14061 return tree_expr_nonnegative_warnv_p (op0,
14062 strict_overflow_p);
14063 default:
14064 return tree_simple_nonnegative_warnv_p (code, type);
14067 /* We don't know sign of `t', so be conservative and return false. */
14068 return false;
14071 /* Return true if T is known to be non-negative. If the return
14072 value is based on the assumption that signed overflow is undefined,
14073 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14074 *STRICT_OVERFLOW_P. */
14076 bool
14077 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14079 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14080 return true;
14082 switch (TREE_CODE (t))
14084 case INTEGER_CST:
14085 return tree_int_cst_sgn (t) >= 0;
14087 case REAL_CST:
14088 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14090 case FIXED_CST:
14091 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14093 case COND_EXPR:
14094 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14095 strict_overflow_p)
14096 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14097 strict_overflow_p));
14098 default:
14099 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14100 TREE_TYPE (t));
14102 /* We don't know sign of `t', so be conservative and return false. */
14103 return false;
14106 /* Return true if T is known to be non-negative. If the return
14107 value is based on the assumption that signed overflow is undefined,
14108 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14109 *STRICT_OVERFLOW_P. */
14111 bool
14112 tree_call_nonnegative_warnv_p (enum tree_code code, tree type, tree fndecl,
14113 tree arg0, tree arg1, bool *strict_overflow_p)
14115 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14116 switch (DECL_FUNCTION_CODE (fndecl))
14118 CASE_FLT_FN (BUILT_IN_ACOS):
14119 CASE_FLT_FN (BUILT_IN_ACOSH):
14120 CASE_FLT_FN (BUILT_IN_CABS):
14121 CASE_FLT_FN (BUILT_IN_COSH):
14122 CASE_FLT_FN (BUILT_IN_ERFC):
14123 CASE_FLT_FN (BUILT_IN_EXP):
14124 CASE_FLT_FN (BUILT_IN_EXP10):
14125 CASE_FLT_FN (BUILT_IN_EXP2):
14126 CASE_FLT_FN (BUILT_IN_FABS):
14127 CASE_FLT_FN (BUILT_IN_FDIM):
14128 CASE_FLT_FN (BUILT_IN_HYPOT):
14129 CASE_FLT_FN (BUILT_IN_POW10):
14130 CASE_INT_FN (BUILT_IN_FFS):
14131 CASE_INT_FN (BUILT_IN_PARITY):
14132 CASE_INT_FN (BUILT_IN_POPCOUNT):
14133 case BUILT_IN_BSWAP32:
14134 case BUILT_IN_BSWAP64:
14135 /* Always true. */
14136 return true;
14138 CASE_FLT_FN (BUILT_IN_SQRT):
14139 /* sqrt(-0.0) is -0.0. */
14140 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14141 return true;
14142 return tree_expr_nonnegative_warnv_p (arg0,
14143 strict_overflow_p);
14145 CASE_FLT_FN (BUILT_IN_ASINH):
14146 CASE_FLT_FN (BUILT_IN_ATAN):
14147 CASE_FLT_FN (BUILT_IN_ATANH):
14148 CASE_FLT_FN (BUILT_IN_CBRT):
14149 CASE_FLT_FN (BUILT_IN_CEIL):
14150 CASE_FLT_FN (BUILT_IN_ERF):
14151 CASE_FLT_FN (BUILT_IN_EXPM1):
14152 CASE_FLT_FN (BUILT_IN_FLOOR):
14153 CASE_FLT_FN (BUILT_IN_FMOD):
14154 CASE_FLT_FN (BUILT_IN_FREXP):
14155 CASE_FLT_FN (BUILT_IN_LCEIL):
14156 CASE_FLT_FN (BUILT_IN_LDEXP):
14157 CASE_FLT_FN (BUILT_IN_LFLOOR):
14158 CASE_FLT_FN (BUILT_IN_LLCEIL):
14159 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14160 CASE_FLT_FN (BUILT_IN_LLRINT):
14161 CASE_FLT_FN (BUILT_IN_LLROUND):
14162 CASE_FLT_FN (BUILT_IN_LRINT):
14163 CASE_FLT_FN (BUILT_IN_LROUND):
14164 CASE_FLT_FN (BUILT_IN_MODF):
14165 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14166 CASE_FLT_FN (BUILT_IN_RINT):
14167 CASE_FLT_FN (BUILT_IN_ROUND):
14168 CASE_FLT_FN (BUILT_IN_SCALB):
14169 CASE_FLT_FN (BUILT_IN_SCALBLN):
14170 CASE_FLT_FN (BUILT_IN_SCALBN):
14171 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14172 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14173 CASE_FLT_FN (BUILT_IN_SINH):
14174 CASE_FLT_FN (BUILT_IN_TANH):
14175 CASE_FLT_FN (BUILT_IN_TRUNC):
14176 /* True if the 1st argument is nonnegative. */
14177 return tree_expr_nonnegative_warnv_p (arg0,
14178 strict_overflow_p);
14180 CASE_FLT_FN (BUILT_IN_FMAX):
14181 /* True if the 1st OR 2nd arguments are nonnegative. */
14182 return (tree_expr_nonnegative_warnv_p (arg0,
14183 strict_overflow_p)
14184 || (tree_expr_nonnegative_warnv_p (arg1,
14185 strict_overflow_p)));
14187 CASE_FLT_FN (BUILT_IN_FMIN):
14188 /* True if the 1st AND 2nd arguments are nonnegative. */
14189 return (tree_expr_nonnegative_warnv_p (arg0,
14190 strict_overflow_p)
14191 && (tree_expr_nonnegative_warnv_p (arg1,
14192 strict_overflow_p)));
14194 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14195 /* True if the 2nd argument is nonnegative. */
14196 return tree_expr_nonnegative_warnv_p (arg1,
14197 strict_overflow_p);
14199 CASE_FLT_FN (BUILT_IN_POWI):
14200 /* True if the 1st argument is nonnegative or the second
14201 argument is an even integer. */
14202 if (TREE_CODE (arg1) == INTEGER_CST
14203 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14204 return true;
14205 return tree_expr_nonnegative_warnv_p (arg0,
14206 strict_overflow_p);
14208 CASE_FLT_FN (BUILT_IN_POW):
14209 /* True if the 1st argument is nonnegative or the second
14210 argument is an even integer valued real. */
14211 if (TREE_CODE (arg1) == REAL_CST)
14213 REAL_VALUE_TYPE c;
14214 HOST_WIDE_INT n;
14216 c = TREE_REAL_CST (arg1);
14217 n = real_to_integer (&c);
14218 if ((n & 1) == 0)
14220 REAL_VALUE_TYPE cint;
14221 real_from_integer (&cint, VOIDmode, n,
14222 n < 0 ? -1 : 0, 0);
14223 if (real_identical (&c, &cint))
14224 return true;
14227 return tree_expr_nonnegative_warnv_p (arg0,
14228 strict_overflow_p);
14230 default:
14231 break;
14233 return tree_simple_nonnegative_warnv_p (code,
14234 type);
14237 /* Return true if T is known to be non-negative. If the return
14238 value is based on the assumption that signed overflow is undefined,
14239 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14240 *STRICT_OVERFLOW_P. */
14242 bool
14243 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14245 enum tree_code code = TREE_CODE (t);
14246 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14247 return true;
14249 switch (code)
14251 case TARGET_EXPR:
14253 tree temp = TARGET_EXPR_SLOT (t);
14254 t = TARGET_EXPR_INITIAL (t);
14256 /* If the initializer is non-void, then it's a normal expression
14257 that will be assigned to the slot. */
14258 if (!VOID_TYPE_P (t))
14259 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14261 /* Otherwise, the initializer sets the slot in some way. One common
14262 way is an assignment statement at the end of the initializer. */
14263 while (1)
14265 if (TREE_CODE (t) == BIND_EXPR)
14266 t = expr_last (BIND_EXPR_BODY (t));
14267 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14268 || TREE_CODE (t) == TRY_CATCH_EXPR)
14269 t = expr_last (TREE_OPERAND (t, 0));
14270 else if (TREE_CODE (t) == STATEMENT_LIST)
14271 t = expr_last (t);
14272 else
14273 break;
14275 if ((TREE_CODE (t) == MODIFY_EXPR
14276 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14277 && GENERIC_TREE_OPERAND (t, 0) == temp)
14278 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14279 strict_overflow_p);
14281 return false;
14284 case CALL_EXPR:
14286 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14287 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14289 return tree_call_nonnegative_warnv_p (TREE_CODE (t),
14290 TREE_TYPE (t),
14291 get_callee_fndecl (t),
14292 arg0,
14293 arg1,
14294 strict_overflow_p);
14296 case COMPOUND_EXPR:
14297 case MODIFY_EXPR:
14298 case GIMPLE_MODIFY_STMT:
14299 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14300 strict_overflow_p);
14301 case BIND_EXPR:
14302 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14303 strict_overflow_p);
14304 case SAVE_EXPR:
14305 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14306 strict_overflow_p);
14308 default:
14309 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14310 TREE_TYPE (t));
14313 /* We don't know sign of `t', so be conservative and return false. */
14314 return false;
14317 /* Return true if T is known to be non-negative. If the return
14318 value is based on the assumption that signed overflow is undefined,
14319 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14320 *STRICT_OVERFLOW_P. */
14322 bool
14323 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14325 enum tree_code code;
14326 if (t == error_mark_node)
14327 return false;
14329 code = TREE_CODE (t);
14330 switch (TREE_CODE_CLASS (code))
14332 case tcc_binary:
14333 case tcc_comparison:
14334 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14335 TREE_TYPE (t),
14336 TREE_OPERAND (t, 0),
14337 TREE_OPERAND (t, 1),
14338 strict_overflow_p);
14340 case tcc_unary:
14341 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14342 TREE_TYPE (t),
14343 TREE_OPERAND (t, 0),
14344 strict_overflow_p);
14346 case tcc_constant:
14347 case tcc_declaration:
14348 case tcc_reference:
14349 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14351 default:
14352 break;
14355 switch (code)
14357 case TRUTH_AND_EXPR:
14358 case TRUTH_OR_EXPR:
14359 case TRUTH_XOR_EXPR:
14360 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14361 TREE_TYPE (t),
14362 TREE_OPERAND (t, 0),
14363 TREE_OPERAND (t, 1),
14364 strict_overflow_p);
14365 case TRUTH_NOT_EXPR:
14366 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14367 TREE_TYPE (t),
14368 TREE_OPERAND (t, 0),
14369 strict_overflow_p);
14371 case COND_EXPR:
14372 case CONSTRUCTOR:
14373 case OBJ_TYPE_REF:
14374 case ASSERT_EXPR:
14375 case ADDR_EXPR:
14376 case WITH_SIZE_EXPR:
14377 case EXC_PTR_EXPR:
14378 case SSA_NAME:
14379 case FILTER_EXPR:
14380 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14382 default:
14383 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14387 /* Return true if `t' is known to be non-negative. Handle warnings
14388 about undefined signed overflow. */
14390 bool
14391 tree_expr_nonnegative_p (tree t)
14393 bool ret, strict_overflow_p;
14395 strict_overflow_p = false;
14396 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14397 if (strict_overflow_p)
14398 fold_overflow_warning (("assuming signed overflow does not occur when "
14399 "determining that expression is always "
14400 "non-negative"),
14401 WARN_STRICT_OVERFLOW_MISC);
14402 return ret;
14406 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14407 For floating point we further ensure that T is not denormal.
14408 Similar logic is present in nonzero_address in rtlanal.h.
14410 If the return value is based on the assumption that signed overflow
14411 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14412 change *STRICT_OVERFLOW_P. */
14414 bool
14415 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14416 bool *strict_overflow_p)
14418 switch (code)
14420 case ABS_EXPR:
14421 return tree_expr_nonzero_warnv_p (op0,
14422 strict_overflow_p);
14424 case NOP_EXPR:
14426 tree inner_type = TREE_TYPE (op0);
14427 tree outer_type = type;
14429 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14430 && tree_expr_nonzero_warnv_p (op0,
14431 strict_overflow_p));
14433 break;
14435 case NON_LVALUE_EXPR:
14436 return tree_expr_nonzero_warnv_p (op0,
14437 strict_overflow_p);
14439 default:
14440 break;
14443 return false;
14446 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14447 For floating point we further ensure that T is not denormal.
14448 Similar logic is present in nonzero_address in rtlanal.h.
14450 If the return value is based on the assumption that signed overflow
14451 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14452 change *STRICT_OVERFLOW_P. */
14454 bool
14455 tree_binary_nonzero_warnv_p (enum tree_code code,
14456 tree type,
14457 tree op0,
14458 tree op1, bool *strict_overflow_p)
14460 bool sub_strict_overflow_p;
14461 switch (code)
14463 case POINTER_PLUS_EXPR:
14464 case PLUS_EXPR:
14465 if (TYPE_OVERFLOW_UNDEFINED (type))
14467 /* With the presence of negative values it is hard
14468 to say something. */
14469 sub_strict_overflow_p = false;
14470 if (!tree_expr_nonnegative_warnv_p (op0,
14471 &sub_strict_overflow_p)
14472 || !tree_expr_nonnegative_warnv_p (op1,
14473 &sub_strict_overflow_p))
14474 return false;
14475 /* One of operands must be positive and the other non-negative. */
14476 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14477 overflows, on a twos-complement machine the sum of two
14478 nonnegative numbers can never be zero. */
14479 return (tree_expr_nonzero_warnv_p (op0,
14480 strict_overflow_p)
14481 || tree_expr_nonzero_warnv_p (op1,
14482 strict_overflow_p));
14484 break;
14486 case MULT_EXPR:
14487 if (TYPE_OVERFLOW_UNDEFINED (type))
14489 if (tree_expr_nonzero_warnv_p (op0,
14490 strict_overflow_p)
14491 && tree_expr_nonzero_warnv_p (op1,
14492 strict_overflow_p))
14494 *strict_overflow_p = true;
14495 return true;
14498 break;
14500 case MIN_EXPR:
14501 sub_strict_overflow_p = false;
14502 if (tree_expr_nonzero_warnv_p (op0,
14503 &sub_strict_overflow_p)
14504 && tree_expr_nonzero_warnv_p (op1,
14505 &sub_strict_overflow_p))
14507 if (sub_strict_overflow_p)
14508 *strict_overflow_p = true;
14510 break;
14512 case MAX_EXPR:
14513 sub_strict_overflow_p = false;
14514 if (tree_expr_nonzero_warnv_p (op0,
14515 &sub_strict_overflow_p))
14517 if (sub_strict_overflow_p)
14518 *strict_overflow_p = true;
14520 /* When both operands are nonzero, then MAX must be too. */
14521 if (tree_expr_nonzero_warnv_p (op1,
14522 strict_overflow_p))
14523 return true;
14525 /* MAX where operand 0 is positive is positive. */
14526 return tree_expr_nonnegative_warnv_p (op0,
14527 strict_overflow_p);
14529 /* MAX where operand 1 is positive is positive. */
14530 else if (tree_expr_nonzero_warnv_p (op1,
14531 &sub_strict_overflow_p)
14532 && tree_expr_nonnegative_warnv_p (op1,
14533 &sub_strict_overflow_p))
14535 if (sub_strict_overflow_p)
14536 *strict_overflow_p = true;
14537 return true;
14539 break;
14541 case BIT_IOR_EXPR:
14542 return (tree_expr_nonzero_warnv_p (op1,
14543 strict_overflow_p)
14544 || tree_expr_nonzero_warnv_p (op0,
14545 strict_overflow_p));
14547 default:
14548 break;
14551 return false;
14554 /* Return true when T is an address and is known to be nonzero.
14555 For floating point we further ensure that T is not denormal.
14556 Similar logic is present in nonzero_address in rtlanal.h.
14558 If the return value is based on the assumption that signed overflow
14559 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14560 change *STRICT_OVERFLOW_P. */
14562 bool
14563 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14565 bool sub_strict_overflow_p;
14566 switch (TREE_CODE (t))
14568 case INTEGER_CST:
14569 return !integer_zerop (t);
14571 case ADDR_EXPR:
14573 tree base = get_base_address (TREE_OPERAND (t, 0));
14575 if (!base)
14576 return false;
14578 /* Weak declarations may link to NULL. */
14579 if (VAR_OR_FUNCTION_DECL_P (base))
14580 return !DECL_WEAK (base);
14582 /* Constants are never weak. */
14583 if (CONSTANT_CLASS_P (base))
14584 return true;
14586 return false;
14589 case COND_EXPR:
14590 sub_strict_overflow_p = false;
14591 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14592 &sub_strict_overflow_p)
14593 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14594 &sub_strict_overflow_p))
14596 if (sub_strict_overflow_p)
14597 *strict_overflow_p = true;
14598 return true;
14600 break;
14602 default:
14603 break;
14605 return false;
14608 /* Return true when T is an address and is known to be nonzero.
14609 For floating point we further ensure that T is not denormal.
14610 Similar logic is present in nonzero_address in rtlanal.h.
14612 If the return value is based on the assumption that signed overflow
14613 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14614 change *STRICT_OVERFLOW_P. */
14616 bool
14617 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14619 tree type = TREE_TYPE (t);
14620 enum tree_code code;
14622 /* Doing something useful for floating point would need more work. */
14623 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14624 return false;
14626 code = TREE_CODE (t);
14627 switch (TREE_CODE_CLASS (code))
14629 case tcc_unary:
14630 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14631 strict_overflow_p);
14632 case tcc_binary:
14633 case tcc_comparison:
14634 return tree_binary_nonzero_warnv_p (code, type,
14635 TREE_OPERAND (t, 0),
14636 TREE_OPERAND (t, 1),
14637 strict_overflow_p);
14638 case tcc_constant:
14639 case tcc_declaration:
14640 case tcc_reference:
14641 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14643 default:
14644 break;
14647 switch (code)
14649 case TRUTH_NOT_EXPR:
14650 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14651 strict_overflow_p);
14653 case TRUTH_AND_EXPR:
14654 case TRUTH_OR_EXPR:
14655 case TRUTH_XOR_EXPR:
14656 return tree_binary_nonzero_warnv_p (code, type,
14657 TREE_OPERAND (t, 0),
14658 TREE_OPERAND (t, 1),
14659 strict_overflow_p);
14661 case COND_EXPR:
14662 case CONSTRUCTOR:
14663 case OBJ_TYPE_REF:
14664 case ASSERT_EXPR:
14665 case ADDR_EXPR:
14666 case WITH_SIZE_EXPR:
14667 case EXC_PTR_EXPR:
14668 case SSA_NAME:
14669 case FILTER_EXPR:
14670 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14672 case COMPOUND_EXPR:
14673 case MODIFY_EXPR:
14674 case GIMPLE_MODIFY_STMT:
14675 case BIND_EXPR:
14676 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14677 strict_overflow_p);
14679 case SAVE_EXPR:
14680 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14681 strict_overflow_p);
14683 case CALL_EXPR:
14684 return alloca_call_p (t);
14686 default:
14687 break;
14689 return false;
14692 /* Return true when T is an address and is known to be nonzero.
14693 Handle warnings about undefined signed overflow. */
14695 bool
14696 tree_expr_nonzero_p (tree t)
14698 bool ret, strict_overflow_p;
14700 strict_overflow_p = false;
14701 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14702 if (strict_overflow_p)
14703 fold_overflow_warning (("assuming signed overflow does not occur when "
14704 "determining that expression is always "
14705 "non-zero"),
14706 WARN_STRICT_OVERFLOW_MISC);
14707 return ret;
14710 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14711 attempt to fold the expression to a constant without modifying TYPE,
14712 OP0 or OP1.
14714 If the expression could be simplified to a constant, then return
14715 the constant. If the expression would not be simplified to a
14716 constant, then return NULL_TREE. */
14718 tree
14719 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14721 tree tem = fold_binary (code, type, op0, op1);
14722 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14725 /* Given the components of a unary expression CODE, TYPE and OP0,
14726 attempt to fold the expression to a constant without modifying
14727 TYPE or OP0.
14729 If the expression could be simplified to a constant, then return
14730 the constant. If the expression would not be simplified to a
14731 constant, then return NULL_TREE. */
14733 tree
14734 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14736 tree tem = fold_unary (code, type, op0);
14737 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14740 /* If EXP represents referencing an element in a constant string
14741 (either via pointer arithmetic or array indexing), return the
14742 tree representing the value accessed, otherwise return NULL. */
14744 tree
14745 fold_read_from_constant_string (tree exp)
14747 if ((TREE_CODE (exp) == INDIRECT_REF
14748 || TREE_CODE (exp) == ARRAY_REF)
14749 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14751 tree exp1 = TREE_OPERAND (exp, 0);
14752 tree index;
14753 tree string;
14755 if (TREE_CODE (exp) == INDIRECT_REF)
14756 string = string_constant (exp1, &index);
14757 else
14759 tree low_bound = array_ref_low_bound (exp);
14760 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14762 /* Optimize the special-case of a zero lower bound.
14764 We convert the low_bound to sizetype to avoid some problems
14765 with constant folding. (E.g. suppose the lower bound is 1,
14766 and its mode is QI. Without the conversion,l (ARRAY
14767 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14768 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14769 if (! integer_zerop (low_bound))
14770 index = size_diffop (index, fold_convert (sizetype, low_bound));
14772 string = exp1;
14775 if (string
14776 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14777 && TREE_CODE (string) == STRING_CST
14778 && TREE_CODE (index) == INTEGER_CST
14779 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14780 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14781 == MODE_INT)
14782 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14783 return build_int_cst_type (TREE_TYPE (exp),
14784 (TREE_STRING_POINTER (string)
14785 [TREE_INT_CST_LOW (index)]));
14787 return NULL;
14790 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14791 an integer constant, real, or fixed-point constant.
14793 TYPE is the type of the result. */
14795 static tree
14796 fold_negate_const (tree arg0, tree type)
14798 tree t = NULL_TREE;
14800 switch (TREE_CODE (arg0))
14802 case INTEGER_CST:
14804 unsigned HOST_WIDE_INT low;
14805 HOST_WIDE_INT high;
14806 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14807 TREE_INT_CST_HIGH (arg0),
14808 &low, &high);
14809 t = force_fit_type_double (type, low, high, 1,
14810 (overflow | TREE_OVERFLOW (arg0))
14811 && !TYPE_UNSIGNED (type));
14812 break;
14815 case REAL_CST:
14816 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14817 break;
14819 case FIXED_CST:
14821 FIXED_VALUE_TYPE f;
14822 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14823 &(TREE_FIXED_CST (arg0)), NULL,
14824 TYPE_SATURATING (type));
14825 t = build_fixed (type, f);
14826 /* Propagate overflow flags. */
14827 if (overflow_p | TREE_OVERFLOW (arg0))
14829 TREE_OVERFLOW (t) = 1;
14830 TREE_CONSTANT_OVERFLOW (t) = 1;
14832 else if (TREE_CONSTANT_OVERFLOW (arg0))
14833 TREE_CONSTANT_OVERFLOW (t) = 1;
14834 break;
14837 default:
14838 gcc_unreachable ();
14841 return t;
14844 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14845 an integer constant or real constant.
14847 TYPE is the type of the result. */
14849 tree
14850 fold_abs_const (tree arg0, tree type)
14852 tree t = NULL_TREE;
14854 switch (TREE_CODE (arg0))
14856 case INTEGER_CST:
14857 /* If the value is unsigned, then the absolute value is
14858 the same as the ordinary value. */
14859 if (TYPE_UNSIGNED (type))
14860 t = arg0;
14861 /* Similarly, if the value is non-negative. */
14862 else if (INT_CST_LT (integer_minus_one_node, arg0))
14863 t = arg0;
14864 /* If the value is negative, then the absolute value is
14865 its negation. */
14866 else
14868 unsigned HOST_WIDE_INT low;
14869 HOST_WIDE_INT high;
14870 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14871 TREE_INT_CST_HIGH (arg0),
14872 &low, &high);
14873 t = force_fit_type_double (type, low, high, -1,
14874 overflow | TREE_OVERFLOW (arg0));
14876 break;
14878 case REAL_CST:
14879 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14880 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14881 else
14882 t = arg0;
14883 break;
14885 default:
14886 gcc_unreachable ();
14889 return t;
14892 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14893 constant. TYPE is the type of the result. */
14895 static tree
14896 fold_not_const (tree arg0, tree type)
14898 tree t = NULL_TREE;
14900 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14902 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14903 ~TREE_INT_CST_HIGH (arg0), 0,
14904 TREE_OVERFLOW (arg0));
14906 return t;
14909 /* Given CODE, a relational operator, the target type, TYPE and two
14910 constant operands OP0 and OP1, return the result of the
14911 relational operation. If the result is not a compile time
14912 constant, then return NULL_TREE. */
14914 static tree
14915 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14917 int result, invert;
14919 /* From here on, the only cases we handle are when the result is
14920 known to be a constant. */
14922 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14924 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14925 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14927 /* Handle the cases where either operand is a NaN. */
14928 if (real_isnan (c0) || real_isnan (c1))
14930 switch (code)
14932 case EQ_EXPR:
14933 case ORDERED_EXPR:
14934 result = 0;
14935 break;
14937 case NE_EXPR:
14938 case UNORDERED_EXPR:
14939 case UNLT_EXPR:
14940 case UNLE_EXPR:
14941 case UNGT_EXPR:
14942 case UNGE_EXPR:
14943 case UNEQ_EXPR:
14944 result = 1;
14945 break;
14947 case LT_EXPR:
14948 case LE_EXPR:
14949 case GT_EXPR:
14950 case GE_EXPR:
14951 case LTGT_EXPR:
14952 if (flag_trapping_math)
14953 return NULL_TREE;
14954 result = 0;
14955 break;
14957 default:
14958 gcc_unreachable ();
14961 return constant_boolean_node (result, type);
14964 return constant_boolean_node (real_compare (code, c0, c1), type);
14967 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14969 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14970 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14971 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14974 /* Handle equality/inequality of complex constants. */
14975 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14977 tree rcond = fold_relational_const (code, type,
14978 TREE_REALPART (op0),
14979 TREE_REALPART (op1));
14980 tree icond = fold_relational_const (code, type,
14981 TREE_IMAGPART (op0),
14982 TREE_IMAGPART (op1));
14983 if (code == EQ_EXPR)
14984 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14985 else if (code == NE_EXPR)
14986 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14987 else
14988 return NULL_TREE;
14991 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14993 To compute GT, swap the arguments and do LT.
14994 To compute GE, do LT and invert the result.
14995 To compute LE, swap the arguments, do LT and invert the result.
14996 To compute NE, do EQ and invert the result.
14998 Therefore, the code below must handle only EQ and LT. */
15000 if (code == LE_EXPR || code == GT_EXPR)
15002 tree tem = op0;
15003 op0 = op1;
15004 op1 = tem;
15005 code = swap_tree_comparison (code);
15008 /* Note that it is safe to invert for real values here because we
15009 have already handled the one case that it matters. */
15011 invert = 0;
15012 if (code == NE_EXPR || code == GE_EXPR)
15014 invert = 1;
15015 code = invert_tree_comparison (code, false);
15018 /* Compute a result for LT or EQ if args permit;
15019 Otherwise return T. */
15020 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15022 if (code == EQ_EXPR)
15023 result = tree_int_cst_equal (op0, op1);
15024 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15025 result = INT_CST_LT_UNSIGNED (op0, op1);
15026 else
15027 result = INT_CST_LT (op0, op1);
15029 else
15030 return NULL_TREE;
15032 if (invert)
15033 result ^= 1;
15034 return constant_boolean_node (result, type);
15037 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15038 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15039 itself. */
15041 tree
15042 fold_build_cleanup_point_expr (tree type, tree expr)
15044 /* If the expression does not have side effects then we don't have to wrap
15045 it with a cleanup point expression. */
15046 if (!TREE_SIDE_EFFECTS (expr))
15047 return expr;
15049 /* If the expression is a return, check to see if the expression inside the
15050 return has no side effects or the right hand side of the modify expression
15051 inside the return. If either don't have side effects set we don't need to
15052 wrap the expression in a cleanup point expression. Note we don't check the
15053 left hand side of the modify because it should always be a return decl. */
15054 if (TREE_CODE (expr) == RETURN_EXPR)
15056 tree op = TREE_OPERAND (expr, 0);
15057 if (!op || !TREE_SIDE_EFFECTS (op))
15058 return expr;
15059 op = TREE_OPERAND (op, 1);
15060 if (!TREE_SIDE_EFFECTS (op))
15061 return expr;
15064 return build1 (CLEANUP_POINT_EXPR, type, expr);
15067 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15068 of an indirection through OP0, or NULL_TREE if no simplification is
15069 possible. */
15071 tree
15072 fold_indirect_ref_1 (tree type, tree op0)
15074 tree sub = op0;
15075 tree subtype;
15077 STRIP_NOPS (sub);
15078 subtype = TREE_TYPE (sub);
15079 if (!POINTER_TYPE_P (subtype))
15080 return NULL_TREE;
15082 if (TREE_CODE (sub) == ADDR_EXPR)
15084 tree op = TREE_OPERAND (sub, 0);
15085 tree optype = TREE_TYPE (op);
15086 /* *&CONST_DECL -> to the value of the const decl. */
15087 if (TREE_CODE (op) == CONST_DECL)
15088 return DECL_INITIAL (op);
15089 /* *&p => p; make sure to handle *&"str"[cst] here. */
15090 if (type == optype)
15092 tree fop = fold_read_from_constant_string (op);
15093 if (fop)
15094 return fop;
15095 else
15096 return op;
15098 /* *(foo *)&fooarray => fooarray[0] */
15099 else if (TREE_CODE (optype) == ARRAY_TYPE
15100 && type == TREE_TYPE (optype))
15102 tree type_domain = TYPE_DOMAIN (optype);
15103 tree min_val = size_zero_node;
15104 if (type_domain && TYPE_MIN_VALUE (type_domain))
15105 min_val = TYPE_MIN_VALUE (type_domain);
15106 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15108 /* *(foo *)&complexfoo => __real__ complexfoo */
15109 else if (TREE_CODE (optype) == COMPLEX_TYPE
15110 && type == TREE_TYPE (optype))
15111 return fold_build1 (REALPART_EXPR, type, op);
15112 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15113 else if (TREE_CODE (optype) == VECTOR_TYPE
15114 && type == TREE_TYPE (optype))
15116 tree part_width = TYPE_SIZE (type);
15117 tree index = bitsize_int (0);
15118 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15122 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15123 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15124 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15126 tree op00 = TREE_OPERAND (sub, 0);
15127 tree op01 = TREE_OPERAND (sub, 1);
15128 tree op00type;
15130 STRIP_NOPS (op00);
15131 op00type = TREE_TYPE (op00);
15132 if (TREE_CODE (op00) == ADDR_EXPR
15133 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15134 && type == TREE_TYPE (TREE_TYPE (op00type)))
15136 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15137 tree part_width = TYPE_SIZE (type);
15138 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15139 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15140 tree index = bitsize_int (indexi);
15142 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15143 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15144 part_width, index);
15150 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15151 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15152 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15154 tree op00 = TREE_OPERAND (sub, 0);
15155 tree op01 = TREE_OPERAND (sub, 1);
15156 tree op00type;
15158 STRIP_NOPS (op00);
15159 op00type = TREE_TYPE (op00);
15160 if (TREE_CODE (op00) == ADDR_EXPR
15161 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15162 && type == TREE_TYPE (TREE_TYPE (op00type)))
15164 tree size = TYPE_SIZE_UNIT (type);
15165 if (tree_int_cst_equal (size, op01))
15166 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15170 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15171 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15172 && type == TREE_TYPE (TREE_TYPE (subtype)))
15174 tree type_domain;
15175 tree min_val = size_zero_node;
15176 sub = build_fold_indirect_ref (sub);
15177 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15178 if (type_domain && TYPE_MIN_VALUE (type_domain))
15179 min_val = TYPE_MIN_VALUE (type_domain);
15180 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15183 return NULL_TREE;
15186 /* Builds an expression for an indirection through T, simplifying some
15187 cases. */
15189 tree
15190 build_fold_indirect_ref (tree t)
15192 tree type = TREE_TYPE (TREE_TYPE (t));
15193 tree sub = fold_indirect_ref_1 (type, t);
15195 if (sub)
15196 return sub;
15197 else
15198 return build1 (INDIRECT_REF, type, t);
15201 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15203 tree
15204 fold_indirect_ref (tree t)
15206 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15208 if (sub)
15209 return sub;
15210 else
15211 return t;
15214 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15215 whose result is ignored. The type of the returned tree need not be
15216 the same as the original expression. */
15218 tree
15219 fold_ignored_result (tree t)
15221 if (!TREE_SIDE_EFFECTS (t))
15222 return integer_zero_node;
15224 for (;;)
15225 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15227 case tcc_unary:
15228 t = TREE_OPERAND (t, 0);
15229 break;
15231 case tcc_binary:
15232 case tcc_comparison:
15233 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15234 t = TREE_OPERAND (t, 0);
15235 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15236 t = TREE_OPERAND (t, 1);
15237 else
15238 return t;
15239 break;
15241 case tcc_expression:
15242 switch (TREE_CODE (t))
15244 case COMPOUND_EXPR:
15245 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15246 return t;
15247 t = TREE_OPERAND (t, 0);
15248 break;
15250 case COND_EXPR:
15251 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15252 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15253 return t;
15254 t = TREE_OPERAND (t, 0);
15255 break;
15257 default:
15258 return t;
15260 break;
15262 default:
15263 return t;
15267 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15268 This can only be applied to objects of a sizetype. */
15270 tree
15271 round_up (tree value, int divisor)
15273 tree div = NULL_TREE;
15275 gcc_assert (divisor > 0);
15276 if (divisor == 1)
15277 return value;
15279 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15280 have to do anything. Only do this when we are not given a const,
15281 because in that case, this check is more expensive than just
15282 doing it. */
15283 if (TREE_CODE (value) != INTEGER_CST)
15285 div = build_int_cst (TREE_TYPE (value), divisor);
15287 if (multiple_of_p (TREE_TYPE (value), value, div))
15288 return value;
15291 /* If divisor is a power of two, simplify this to bit manipulation. */
15292 if (divisor == (divisor & -divisor))
15294 if (TREE_CODE (value) == INTEGER_CST)
15296 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15297 unsigned HOST_WIDE_INT high;
15298 bool overflow_p;
15300 if ((low & (divisor - 1)) == 0)
15301 return value;
15303 overflow_p = TREE_OVERFLOW (value);
15304 high = TREE_INT_CST_HIGH (value);
15305 low &= ~(divisor - 1);
15306 low += divisor;
15307 if (low == 0)
15309 high++;
15310 if (high == 0)
15311 overflow_p = true;
15314 return force_fit_type_double (TREE_TYPE (value), low, high,
15315 -1, overflow_p);
15317 else
15319 tree t;
15321 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15322 value = size_binop (PLUS_EXPR, value, t);
15323 t = build_int_cst (TREE_TYPE (value), -divisor);
15324 value = size_binop (BIT_AND_EXPR, value, t);
15327 else
15329 if (!div)
15330 div = build_int_cst (TREE_TYPE (value), divisor);
15331 value = size_binop (CEIL_DIV_EXPR, value, div);
15332 value = size_binop (MULT_EXPR, value, div);
15335 return value;
15338 /* Likewise, but round down. */
15340 tree
15341 round_down (tree value, int divisor)
15343 tree div = NULL_TREE;
15345 gcc_assert (divisor > 0);
15346 if (divisor == 1)
15347 return value;
15349 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15350 have to do anything. Only do this when we are not given a const,
15351 because in that case, this check is more expensive than just
15352 doing it. */
15353 if (TREE_CODE (value) != INTEGER_CST)
15355 div = build_int_cst (TREE_TYPE (value), divisor);
15357 if (multiple_of_p (TREE_TYPE (value), value, div))
15358 return value;
15361 /* If divisor is a power of two, simplify this to bit manipulation. */
15362 if (divisor == (divisor & -divisor))
15364 tree t;
15366 t = build_int_cst (TREE_TYPE (value), -divisor);
15367 value = size_binop (BIT_AND_EXPR, value, t);
15369 else
15371 if (!div)
15372 div = build_int_cst (TREE_TYPE (value), divisor);
15373 value = size_binop (FLOOR_DIV_EXPR, value, div);
15374 value = size_binop (MULT_EXPR, value, div);
15377 return value;
15380 /* Returns the pointer to the base of the object addressed by EXP and
15381 extracts the information about the offset of the access, storing it
15382 to PBITPOS and POFFSET. */
15384 static tree
15385 split_address_to_core_and_offset (tree exp,
15386 HOST_WIDE_INT *pbitpos, tree *poffset)
15388 tree core;
15389 enum machine_mode mode;
15390 int unsignedp, volatilep;
15391 HOST_WIDE_INT bitsize;
15393 if (TREE_CODE (exp) == ADDR_EXPR)
15395 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15396 poffset, &mode, &unsignedp, &volatilep,
15397 false);
15398 core = fold_addr_expr (core);
15400 else
15402 core = exp;
15403 *pbitpos = 0;
15404 *poffset = NULL_TREE;
15407 return core;
15410 /* Returns true if addresses of E1 and E2 differ by a constant, false
15411 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15413 bool
15414 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15416 tree core1, core2;
15417 HOST_WIDE_INT bitpos1, bitpos2;
15418 tree toffset1, toffset2, tdiff, type;
15420 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15421 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15423 if (bitpos1 % BITS_PER_UNIT != 0
15424 || bitpos2 % BITS_PER_UNIT != 0
15425 || !operand_equal_p (core1, core2, 0))
15426 return false;
15428 if (toffset1 && toffset2)
15430 type = TREE_TYPE (toffset1);
15431 if (type != TREE_TYPE (toffset2))
15432 toffset2 = fold_convert (type, toffset2);
15434 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15435 if (!cst_and_fits_in_hwi (tdiff))
15436 return false;
15438 *diff = int_cst_value (tdiff);
15440 else if (toffset1 || toffset2)
15442 /* If only one of the offsets is non-constant, the difference cannot
15443 be a constant. */
15444 return false;
15446 else
15447 *diff = 0;
15449 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15450 return true;
15453 /* Simplify the floating point expression EXP when the sign of the
15454 result is not significant. Return NULL_TREE if no simplification
15455 is possible. */
15457 tree
15458 fold_strip_sign_ops (tree exp)
15460 tree arg0, arg1;
15462 switch (TREE_CODE (exp))
15464 case ABS_EXPR:
15465 case NEGATE_EXPR:
15466 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15467 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15469 case MULT_EXPR:
15470 case RDIV_EXPR:
15471 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15472 return NULL_TREE;
15473 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15474 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15475 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15476 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15477 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15478 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15479 break;
15481 case COMPOUND_EXPR:
15482 arg0 = TREE_OPERAND (exp, 0);
15483 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15484 if (arg1)
15485 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15486 break;
15488 case COND_EXPR:
15489 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15490 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15491 if (arg0 || arg1)
15492 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15493 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15494 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15495 break;
15497 case CALL_EXPR:
15499 const enum built_in_function fcode = builtin_mathfn_code (exp);
15500 switch (fcode)
15502 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15503 /* Strip copysign function call, return the 1st argument. */
15504 arg0 = CALL_EXPR_ARG (exp, 0);
15505 arg1 = CALL_EXPR_ARG (exp, 1);
15506 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15508 default:
15509 /* Strip sign ops from the argument of "odd" math functions. */
15510 if (negate_mathfn_p (fcode))
15512 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15513 if (arg0)
15514 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15516 break;
15519 break;
15521 default:
15522 break;
15524 return NULL_TREE;