merge with trunk @ 139506
[official-gcc.git] / gcc / fold-const.c
blobe6769a650a8836b3af76c343cca3b5a0a2241de4
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int truth_value_p (enum tree_code);
109 static int operand_equal_for_comparison_p (tree, tree, tree);
110 static int twoval_comparison_p (tree, tree *, tree *, int *);
111 static tree eval_subst (tree, tree, tree, tree, tree);
112 static tree pedantic_omit_one_operand (tree, tree, tree);
113 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 tree);
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 tree, tree, tree);
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
149 addition.
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 sign. */
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
157 We do that by representing the two-word integer in 4 words, with only
158 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
159 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 #define LOWPART(x) \
162 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
163 #define HIGHPART(x) \
164 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
165 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167 /* Unpack a two-word integer into 4 words.
168 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
169 WORDS points to the array of HOST_WIDE_INTs. */
171 static void
172 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 words[0] = LOWPART (low);
175 words[1] = HIGHPART (low);
176 words[2] = LOWPART (hi);
177 words[3] = HIGHPART (hi);
180 /* Pack an array of 4 words into a two-word integer.
181 WORDS points to the array of words.
182 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 static void
185 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
186 HOST_WIDE_INT *hi)
188 *low = words[0] + words[1] * BASE;
189 *hi = words[2] + words[3] * BASE;
192 /* Force the double-word integer L1, H1 to be within the range of the
193 integer type TYPE. Stores the properly truncated and sign-extended
194 double-word integer in *LV, *HV. Returns true if the operation
195 overflows, that is, argument and result are different. */
198 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
199 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
201 unsigned HOST_WIDE_INT low0 = l1;
202 HOST_WIDE_INT high0 = h1;
203 unsigned int prec;
204 int sign_extended_type;
206 if (POINTER_TYPE_P (type)
207 || TREE_CODE (type) == OFFSET_TYPE)
208 prec = POINTER_SIZE;
209 else
210 prec = TYPE_PRECISION (type);
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 else
224 h1 = 0;
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
244 h1 = -1;
246 else
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 h1 = -1;
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
256 *lv = l1;
257 *hv = h1;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
278 tree
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
281 bool overflowed)
283 int sign_extended_type;
284 bool overflow;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
296 if (overflowed
297 || overflowable < 0
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
305 return t;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323 bool unsigned_p)
325 unsigned HOST_WIDE_INT l;
326 HOST_WIDE_INT h;
328 l = l1 + l2;
329 h = h1 + h2 + (l < l1);
331 *lv = l;
332 *hv = h;
334 if (unsigned_p)
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 else
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
349 if (l1 == 0)
351 *lv = 0;
352 *hv = - h1;
353 return (*hv & h1) < 0;
355 else
357 *lv = -l1;
358 *hv = ~h1;
359 return 0;
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
373 bool unsigned_p)
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
379 int i, j, k;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
386 memset (prod, 0, sizeof prod);
388 for (i = 0; i < 4; i++)
390 carry = 0;
391 for (j = 0; j < 4; j++)
393 k = i + j;
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 carry += prod[k];
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
401 prod[i + 4] = carry;
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
407 /* Unsigned overflow is immediate. */
408 if (unsigned_p)
409 return (toplow | tophigh) != 0;
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
413 if (h1 < 0)
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
418 if (h2 < 0)
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 void
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 unsigned HOST_WIDE_INT signmask;
439 if (count < 0)
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
442 return;
445 if (SHIFT_COUNT_TRUNCATED)
446 count %= prec;
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
452 *hv = 0;
453 *lv = 0;
455 else if (count >= HOST_BITS_PER_WIDE_INT)
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
458 *lv = 0;
460 else
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
464 *lv = l1 << count;
467 /* Sign extend all bits that are beyond the precision. */
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
481 else
483 *hv = signmask;
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 void
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
498 int arith)
500 unsigned HOST_WIDE_INT signmask;
502 signmask = (arith
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
504 : 0);
506 if (SHIFT_COUNT_TRUNCATED)
507 count %= prec;
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
513 *hv = 0;
514 *lv = 0;
516 else if (count >= HOST_BITS_PER_WIDE_INT)
518 *hv = 0;
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
521 else
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 *lv = ((l1 >> count)
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 /* Zero / sign extend all bits that are beyond the precision. */
530 if (count >= (HOST_WIDE_INT)prec)
532 *hv = signmask;
533 *lv = signmask;
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
542 else
544 *hv = signmask;
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 void
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
563 count %= prec;
564 if (count < 0)
565 count += prec;
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
569 *lv = s1l | s2l;
570 *hv = s1h | s2h;
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 void
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
585 count %= prec;
586 if (count < 0)
587 count += prec;
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
591 *lv = s1l | s2l;
592 *hv = s1h | s2h;
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 or EXACT_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
612 HOST_WIDE_INT *hrem)
614 int quo_neg = 0;
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
617 int i, j;
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
624 int overflow = 0;
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
629 /* Calculate quotient sign and convert operands to unsigned. */
630 if (!uns)
632 if (hnum < 0)
634 quo_neg = ~ quo_neg;
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
638 overflow = 1;
640 if (hden < 0)
642 quo_neg = ~ quo_neg;
643 neg_double (lden, hden, &lden, &hden);
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
649 *hquo = *hrem = 0;
650 /* This unsigned division rounds toward zero. */
651 *lquo = lnum / lden;
652 goto finish_up;
655 if (hnum == 0)
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
658 *hquo = *lquo = 0;
659 *hrem = hnum;
660 *lrem = lnum;
661 goto finish_up;
664 memset (quo, 0, sizeof quo);
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
680 carry = work % lden;
683 else
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
692 if (den[i] != 0)
694 den_hi_sig = i;
695 break;
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
701 scale = BASE / (den[den_hi_sig] + 1);
702 if (scale > 1)
703 { /* scale divisor and dividend */
704 carry = 0;
705 for (i = 0; i <= 4 - 1; i++)
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
712 num[4] = carry;
713 carry = 0;
714 for (i = 0; i <= 4 - 1; i++)
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
723 num_hi_sig = 4;
725 /* Main loop */
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
737 else
738 quo_est = BASE - 1;
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
742 if (tmp < BASE
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
745 quo_est--;
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
751 carry = 0;
752 for (j = 0; j <= den_hi_sig; j++)
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 quo_est--;
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
774 num [num_hi_sig] += carry;
777 /* Store the quotient digit. */
778 quo[i] = quo_est;
782 decode (quo, lquo, hquo);
784 finish_up:
785 /* If result is negative, make it so. */
786 if (quo_neg)
787 neg_double (*lquo, *hquo, lquo, hquo);
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
794 switch (code)
796 case TRUNC_DIV_EXPR:
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
799 return overflow;
801 case FLOOR_DIV_EXPR:
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 /* quo = quo - 1; */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
807 lquo, hquo);
809 else
810 return overflow;
811 break;
813 case CEIL_DIV_EXPR:
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
818 lquo, hquo);
820 else
821 return overflow;
822 break;
824 case ROUND_DIV_EXPR:
825 case ROUND_MOD_EXPR: /* round to closest integer */
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
832 /* Get absolute values. */
833 if (*hrem < 0)
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 if (hden < 0)
836 neg_double (lden, hden, &labs_den, &habs_den);
838 /* If (2 * abs (lrem) >= abs (lden)) */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, &ltwice, &htwice);
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den < ltwice)))
848 if (*hquo < 0)
849 /* quo = quo - 1; */
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
852 else
853 /* quo = quo + 1; */
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
855 lquo, hquo);
857 else
858 return overflow;
860 break;
862 default:
863 gcc_unreachable ();
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
870 return overflow;
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */
877 static tree
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 unsigned HOST_WIDE_INT int1l, int2l;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 tree type = TREE_TYPE (arg1);
885 int uns = TYPE_UNSIGNED (type);
887 int1l = TREE_INT_CST_LOW (arg1);
888 int1h = TREE_INT_CST_HIGH (arg1);
889 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
890 &obj[some_exotic_number]. */
891 if (POINTER_TYPE_P (type))
893 uns = false;
894 type = signed_type_for (type);
895 fit_double_type (int1l, int1h, &int1l, &int1h,
896 type);
898 else
899 fit_double_type (int1l, int1h, &int1l, &int1h, type);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
906 return NULL_TREE;
908 return build_int_cst_wide (type, quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
918 used. */
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
937 void
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
950 deferred code. */
952 void
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
955 const char *warnmsg;
956 location_t locus;
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
963 && code != 0
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = code;
966 return;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
973 return;
975 if (gimple_no_warning_p (stmt))
976 return;
978 /* Use the smallest code level when deciding to issue the
979 warning. */
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
984 return;
986 if (stmt == NULL)
987 locus = input_location;
988 else
989 locus = gimple_location (stmt);
990 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
994 warnings. */
996 void
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1004 bool
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1013 static void
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1032 static bool
1033 negate_mathfn_p (enum built_in_function code)
1035 switch (code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1060 return true;
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1068 default:
1069 break;
1071 return false;
1074 /* Check whether we may negate an integer constant T without causing
1075 overflow. */
1077 bool
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1081 unsigned int prec;
1082 tree type;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1088 return false;
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1094 return true;
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1098 else
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1108 static bool
1109 negate_expr_p (tree t)
1111 tree type;
1113 if (t == 0)
1114 return false;
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1121 case INTEGER_CST:
1122 if (TYPE_OVERFLOW_WRAPS (type))
1123 return true;
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1127 case BIT_NOT_EXPR:
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1131 case FIXED_CST:
1132 case REAL_CST:
1133 case NEGATE_EXPR:
1134 return true;
1136 case COMPLEX_CST:
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1140 case COMPLEX_EXPR:
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1144 case CONJ_EXPR:
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1147 case PLUS_EXPR:
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 return false;
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1155 return true;
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1159 case MINUS_EXPR:
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1166 case MULT_EXPR:
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1168 break;
1170 /* Fall through. */
1172 case RDIV_EXPR:
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1176 break;
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1181 case CEIL_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1187 overflow. */
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 break;
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1194 case NOP_EXPR:
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1198 tree tem = strip_float_extensions (t);
1199 if (tem != t)
1200 return negate_expr_p (tem);
1202 break;
1204 case CALL_EXPR:
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1208 break;
1210 case RSHIFT_EXPR:
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1218 return true;
1220 break;
1222 default:
1223 break;
1225 return false;
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1231 returned. */
1233 static tree
1234 fold_negate_expr (tree t)
1236 tree type = TREE_TYPE (t);
1237 tree tem;
1239 switch (TREE_CODE (t))
1241 /* Convert - (~A) to A + 1. */
1242 case BIT_NOT_EXPR:
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1246 break;
1248 case INTEGER_CST:
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1252 return tem;
1253 break;
1255 case REAL_CST:
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1259 return tem;
1260 break;
1262 case FIXED_CST:
1263 tem = fold_negate_const (t, type);
1264 return tem;
1266 case COMPLEX_CST:
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1277 break;
1279 case COMPLEX_EXPR:
1280 if (negate_expr_p (t))
1281 return fold_build2 (COMPLEX_EXPR, type,
1282 fold_negate_expr (TREE_OPERAND (t, 0)),
1283 fold_negate_expr (TREE_OPERAND (t, 1)));
1284 break;
1286 case CONJ_EXPR:
1287 if (negate_expr_p (t))
1288 return fold_build1 (CONJ_EXPR, type,
1289 fold_negate_expr (TREE_OPERAND (t, 0)));
1290 break;
1292 case NEGATE_EXPR:
1293 return TREE_OPERAND (t, 0);
1295 case PLUS_EXPR:
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2 (MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2 (MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1317 break;
1319 case MINUS_EXPR:
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2 (MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1326 break;
1328 case MULT_EXPR:
1329 if (TYPE_UNSIGNED (type))
1330 break;
1332 /* Fall through. */
1334 case RDIV_EXPR:
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2 (TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2 (TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1346 break;
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1351 case CEIL_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1357 overflow. */
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2 (TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1383 break;
1385 case NOP_EXPR:
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert (type, negate_expr (tem));
1393 break;
1395 case CALL_EXPR:
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1400 tree fndecl, arg;
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr (fndecl, 1, arg);
1406 break;
1408 case RSHIFT_EXPR:
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert (type, temp);
1425 break;
1427 default:
1428 break;
1431 return NULL_TREE;
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1438 static tree
1439 negate_expr (tree t)
1441 tree type, tem;
1443 if (t == NULL_TREE)
1444 return NULL_TREE;
1446 type = TREE_TYPE (t);
1447 STRIP_SIGN_NOPS (t);
1449 tem = fold_negate_expr (t);
1450 if (!tem)
1451 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1452 return fold_convert (type, tem);
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1470 If IN is itself a literal or constant, return it as appropriate.
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1475 static tree
1476 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1477 tree *minus_litp, int negate_p)
1479 tree var = 0;
1481 *conp = 0;
1482 *litp = 0;
1483 *minus_litp = 0;
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in);
1488 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1489 || TREE_CODE (in) == FIXED_CST)
1490 *litp = in;
1491 else if (TREE_CODE (in) == code
1492 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1499 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1501 tree op0 = TREE_OPERAND (in, 0);
1502 tree op1 = TREE_OPERAND (in, 1);
1503 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1504 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1508 || TREE_CODE (op0) == FIXED_CST)
1509 *litp = op0, op0 = 0;
1510 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1511 || TREE_CODE (op1) == FIXED_CST)
1512 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1514 if (op0 != 0 && TREE_CONSTANT (op0))
1515 *conp = op0, op0 = 0;
1516 else if (op1 != 0 && TREE_CONSTANT (op1))
1517 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0 != 0 && op1 != 0)
1522 var = in;
1523 else if (op0 != 0)
1524 var = op0;
1525 else
1526 var = op1, neg_var_p = neg1_p;
1528 /* Now do any needed negations. */
1529 if (neg_litp_p)
1530 *minus_litp = *litp, *litp = 0;
1531 if (neg_conp_p)
1532 *conp = negate_expr (*conp);
1533 if (neg_var_p)
1534 var = negate_expr (var);
1536 else if (TREE_CONSTANT (in))
1537 *conp = in;
1538 else
1539 var = in;
1541 if (negate_p)
1543 if (*litp)
1544 *minus_litp = *litp, *litp = 0;
1545 else if (*minus_litp)
1546 *litp = *minus_litp, *minus_litp = 0;
1547 *conp = negate_expr (*conp);
1548 var = negate_expr (var);
1551 return var;
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1558 static tree
1559 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1561 if (t1 == 0)
1562 return t2;
1563 else if (t2 == 0)
1564 return t1;
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1570 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1572 if (code == PLUS_EXPR)
1574 if (TREE_CODE (t1) == NEGATE_EXPR)
1575 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1576 fold_convert (type, TREE_OPERAND (t1, 0)));
1577 else if (TREE_CODE (t2) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1579 fold_convert (type, TREE_OPERAND (t2, 0)));
1580 else if (integer_zerop (t2))
1581 return fold_convert (type, t1);
1583 else if (code == MINUS_EXPR)
1585 if (integer_zerop (t2))
1586 return fold_convert (type, t1);
1589 return build2 (code, type, fold_convert (type, t1),
1590 fold_convert (type, t2));
1593 return fold_build2 (code, type, fold_convert (type, t1),
1594 fold_convert (type, t2));
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1600 static bool
1601 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1603 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1604 return false;
1605 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1606 return false;
1608 switch (code)
1610 case LSHIFT_EXPR:
1611 case RSHIFT_EXPR:
1612 case LROTATE_EXPR:
1613 case RROTATE_EXPR:
1614 return true;
1616 default:
1617 break;
1620 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1621 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1622 && TYPE_MODE (type1) == TYPE_MODE (type2);
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632 tree
1633 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1635 unsigned HOST_WIDE_INT int1l, int2l;
1636 HOST_WIDE_INT int1h, int2h;
1637 unsigned HOST_WIDE_INT low;
1638 HOST_WIDE_INT hi;
1639 unsigned HOST_WIDE_INT garbagel;
1640 HOST_WIDE_INT garbageh;
1641 tree t;
1642 tree type = TREE_TYPE (arg1);
1643 int uns = TYPE_UNSIGNED (type);
1644 int is_sizetype
1645 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1646 int overflow = 0;
1648 int1l = TREE_INT_CST_LOW (arg1);
1649 int1h = TREE_INT_CST_HIGH (arg1);
1650 int2l = TREE_INT_CST_LOW (arg2);
1651 int2h = TREE_INT_CST_HIGH (arg2);
1653 switch (code)
1655 case BIT_IOR_EXPR:
1656 low = int1l | int2l, hi = int1h | int2h;
1657 break;
1659 case BIT_XOR_EXPR:
1660 low = int1l ^ int2l, hi = int1h ^ int2h;
1661 break;
1663 case BIT_AND_EXPR:
1664 low = int1l & int2l, hi = int1h & int2h;
1665 break;
1667 case RSHIFT_EXPR:
1668 int2l = -int2l;
1669 case LSHIFT_EXPR:
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1674 &low, &hi, !uns);
1675 break;
1677 case RROTATE_EXPR:
1678 int2l = - int2l;
1679 case LROTATE_EXPR:
1680 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1681 &low, &hi);
1682 break;
1684 case PLUS_EXPR:
1685 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1686 break;
1688 case MINUS_EXPR:
1689 neg_double (int2l, int2h, &low, &hi);
1690 add_double (int1l, int1h, low, hi, &low, &hi);
1691 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1692 break;
1694 case MULT_EXPR:
1695 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1696 break;
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 if (code == CEIL_DIV_EXPR)
1708 int1l += int2l - 1;
1710 low = int1l / int2l, hi = 0;
1711 break;
1714 /* ... fall through ... */
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1718 return NULL_TREE;
1719 if (int2h == 0 && int2l == 1)
1721 low = int1l, hi = int1h;
1722 break;
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1727 low = 1, hi = 0;
1728 break;
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1732 break;
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 if (code == CEIL_MOD_EXPR)
1743 int1l += int2l - 1;
1744 low = int1l % int2l, hi = 0;
1745 break;
1748 /* ... fall through ... */
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1752 return NULL_TREE;
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1756 break;
1758 case MIN_EXPR:
1759 case MAX_EXPR:
1760 if (uns)
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1765 && int1l < int2l));
1766 else
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1772 else
1773 low = int2l, hi = int2h;
1774 break;
1776 default:
1777 return NULL_TREE;
1780 if (notrunc)
1782 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1788 t = copy_node (t);
1789 TREE_OVERFLOW (t) = 1;
1792 else
1793 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1794 ((!uns || is_sizetype) && overflow)
1795 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1797 return t;
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1807 static tree
1808 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1810 /* Sanity check for the recursive cases. */
1811 if (!arg1 || !arg2)
1812 return NULL_TREE;
1814 STRIP_NOPS (arg1);
1815 STRIP_NOPS (arg2);
1817 if (TREE_CODE (arg1) == INTEGER_CST)
1818 return int_const_binop (code, arg1, arg2, notrunc);
1820 if (TREE_CODE (arg1) == REAL_CST)
1822 enum machine_mode mode;
1823 REAL_VALUE_TYPE d1;
1824 REAL_VALUE_TYPE d2;
1825 REAL_VALUE_TYPE value;
1826 REAL_VALUE_TYPE result;
1827 bool inexact;
1828 tree t, type;
1830 /* The following codes are handled by real_arithmetic. */
1831 switch (code)
1833 case PLUS_EXPR:
1834 case MINUS_EXPR:
1835 case MULT_EXPR:
1836 case RDIV_EXPR:
1837 case MIN_EXPR:
1838 case MAX_EXPR:
1839 break;
1841 default:
1842 return NULL_TREE;
1845 d1 = TREE_REAL_CST (arg1);
1846 d2 = TREE_REAL_CST (arg2);
1848 type = TREE_TYPE (arg1);
1849 mode = TYPE_MODE (type);
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode)
1854 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1855 return NULL_TREE;
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code == RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2, dconst0)
1861 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1862 return NULL_TREE;
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1))
1867 return arg1;
1868 else if (REAL_VALUE_ISNAN (d2))
1869 return arg2;
1871 inexact = real_arithmetic (&value, code, &d1, &d2);
1872 real_convert (&result, mode, &value);
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode)
1878 && REAL_VALUE_ISINF (result)
1879 && !REAL_VALUE_ISINF (d1)
1880 && !REAL_VALUE_ISINF (d2))
1881 return NULL_TREE;
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1890 return NULL_TREE;
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1895 return t;
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1903 tree t, type;
1904 int sat_p;
1905 bool overflow_p;
1907 /* The following codes are handled by fixed_arithmetic. */
1908 switch (code)
1910 case PLUS_EXPR:
1911 case MINUS_EXPR:
1912 case MULT_EXPR:
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1915 break;
1917 case LSHIFT_EXPR:
1918 case RSHIFT_EXPR:
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1921 f2.mode = SImode;
1922 break;
1924 default:
1925 return NULL_TREE;
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1941 return t;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1951 tree real, imag;
1953 switch (code)
1955 case PLUS_EXPR:
1956 case MINUS_EXPR:
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1959 break;
1961 case MULT_EXPR:
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1965 notrunc);
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1969 notrunc);
1970 break;
1972 case RDIV_EXPR:
1974 tree magsquared
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1978 notrunc);
1979 tree t1
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1983 notrunc);
1984 tree t2
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1988 notrunc);
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
1996 break;
1998 default:
1999 return NULL_TREE;
2002 if (real && imag)
2003 return build_complex (type, real, imag);
2006 return NULL_TREE;
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2012 tree
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2023 tree
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2032 TREE_TYPE (arg1)));
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2041 return arg1;
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2043 return arg0;
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2048 return arg0;
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2053 return arg1;
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2067 tree
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2071 tree ctype;
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2081 ctype = ssizetype;
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2084 else
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2102 else
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2105 arg1, arg0)));
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2111 static tree
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2114 tree t;
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2142 return t;
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2148 static tree
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2151 int overflow = 0;
2152 tree t;
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2164 REAL_VALUE_TYPE r;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2167 switch (code)
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2171 break;
2173 default:
2174 gcc_unreachable ();
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2180 overflow = 1;
2181 high = 0;
2182 low = 0;
2185 /* See if R is less than the lower bound or greater than the
2186 upper bound. */
2188 if (! overflow)
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2194 overflow = 1;
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2200 if (! overflow)
2202 tree ut = TYPE_MAX_VALUE (type);
2203 if (ut)
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2208 overflow = 1;
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2215 if (! overflow)
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2220 return t;
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2226 static tree
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2229 tree t;
2230 double_int temp, temp_trunc;
2231 unsigned int mode;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2248 else
2250 temp.low = 0;
2251 temp.high = 0;
2252 temp_trunc.low = 0;
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2261 double_int one;
2262 one.low = 1;
2263 one.high = 0;
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2270 (temp.high < 0
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2275 return t;
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2281 static tree
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2285 tree t;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2291 return t;
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2297 static tree
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2301 tree t;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2309 return t;
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2315 static tree
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2319 tree t;
2320 bool overflow_p;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2334 return t;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2340 static tree
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2344 tree t;
2345 bool overflow_p;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2361 return t;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2367 static tree
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2371 tree t;
2372 bool overflow_p;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2387 return t;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2393 static tree
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2397 return arg1;
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2400 || TREE_CODE (type) == OFFSET_TYPE)
2402 if (TREE_CODE (arg1) == INTEGER_CST)
2403 return fold_convert_const_int_from_int (type, arg1);
2404 else if (TREE_CODE (arg1) == REAL_CST)
2405 return fold_convert_const_int_from_real (code, type, arg1);
2406 else if (TREE_CODE (arg1) == FIXED_CST)
2407 return fold_convert_const_int_from_fixed (type, arg1);
2409 else if (TREE_CODE (type) == REAL_TYPE)
2411 if (TREE_CODE (arg1) == INTEGER_CST)
2412 return build_real_from_int_cst (type, arg1);
2413 else if (TREE_CODE (arg1) == REAL_CST)
2414 return fold_convert_const_real_from_real (type, arg1);
2415 else if (TREE_CODE (arg1) == FIXED_CST)
2416 return fold_convert_const_real_from_fixed (type, arg1);
2418 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2420 if (TREE_CODE (arg1) == FIXED_CST)
2421 return fold_convert_const_fixed_from_fixed (type, arg1);
2422 else if (TREE_CODE (arg1) == INTEGER_CST)
2423 return fold_convert_const_fixed_from_int (type, arg1);
2424 else if (TREE_CODE (arg1) == REAL_CST)
2425 return fold_convert_const_fixed_from_real (type, arg1);
2427 return NULL_TREE;
2430 /* Construct a vector of zero elements of vector type TYPE. */
2432 static tree
2433 build_zero_vector (tree type)
2435 tree elem, list;
2436 int i, units;
2438 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2439 units = TYPE_VECTOR_SUBPARTS (type);
2441 list = NULL_TREE;
2442 for (i = 0; i < units; i++)
2443 list = tree_cons (NULL_TREE, elem, list);
2444 return build_vector (type, list);
2447 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2449 bool
2450 fold_convertible_p (const_tree type, const_tree arg)
2452 tree orig = TREE_TYPE (arg);
2454 if (type == orig)
2455 return true;
2457 if (TREE_CODE (arg) == ERROR_MARK
2458 || TREE_CODE (type) == ERROR_MARK
2459 || TREE_CODE (orig) == ERROR_MARK)
2460 return false;
2462 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2463 return true;
2465 switch (TREE_CODE (type))
2467 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2468 case POINTER_TYPE: case REFERENCE_TYPE:
2469 case OFFSET_TYPE:
2470 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2471 || TREE_CODE (orig) == OFFSET_TYPE)
2472 return true;
2473 return (TREE_CODE (orig) == VECTOR_TYPE
2474 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2476 case REAL_TYPE:
2477 case FIXED_POINT_TYPE:
2478 case COMPLEX_TYPE:
2479 case VECTOR_TYPE:
2480 case VOID_TYPE:
2481 return TREE_CODE (type) == TREE_CODE (orig);
2483 default:
2484 return false;
2488 /* Convert expression ARG to type TYPE. Used by the middle-end for
2489 simple conversions in preference to calling the front-end's convert. */
2491 tree
2492 fold_convert (tree type, tree arg)
2494 tree orig = TREE_TYPE (arg);
2495 tree tem;
2497 if (type == orig)
2498 return arg;
2500 if (TREE_CODE (arg) == ERROR_MARK
2501 || TREE_CODE (type) == ERROR_MARK
2502 || TREE_CODE (orig) == ERROR_MARK)
2503 return error_mark_node;
2505 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2506 return fold_build1 (NOP_EXPR, type, arg);
2508 switch (TREE_CODE (type))
2510 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case OFFSET_TYPE:
2513 if (TREE_CODE (arg) == INTEGER_CST)
2515 tem = fold_convert_const (NOP_EXPR, type, arg);
2516 if (tem != NULL_TREE)
2517 return tem;
2519 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 || TREE_CODE (orig) == OFFSET_TYPE)
2521 return fold_build1 (NOP_EXPR, type, arg);
2522 if (TREE_CODE (orig) == COMPLEX_TYPE)
2524 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2525 return fold_convert (type, tem);
2527 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2528 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2529 return fold_build1 (NOP_EXPR, type, arg);
2531 case REAL_TYPE:
2532 if (TREE_CODE (arg) == INTEGER_CST)
2534 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2535 if (tem != NULL_TREE)
2536 return tem;
2538 else if (TREE_CODE (arg) == REAL_CST)
2540 tem = fold_convert_const (NOP_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2542 return tem;
2544 else if (TREE_CODE (arg) == FIXED_CST)
2546 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2548 return tem;
2551 switch (TREE_CODE (orig))
2553 case INTEGER_TYPE:
2554 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2555 case POINTER_TYPE: case REFERENCE_TYPE:
2556 return fold_build1 (FLOAT_EXPR, type, arg);
2558 case REAL_TYPE:
2559 return fold_build1 (NOP_EXPR, type, arg);
2561 case FIXED_POINT_TYPE:
2562 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2564 case COMPLEX_TYPE:
2565 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2566 return fold_convert (type, tem);
2568 default:
2569 gcc_unreachable ();
2572 case FIXED_POINT_TYPE:
2573 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2574 || TREE_CODE (arg) == REAL_CST)
2576 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2577 if (tem != NULL_TREE)
2578 return tem;
2581 switch (TREE_CODE (orig))
2583 case FIXED_POINT_TYPE:
2584 case INTEGER_TYPE:
2585 case ENUMERAL_TYPE:
2586 case BOOLEAN_TYPE:
2587 case REAL_TYPE:
2588 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2590 case COMPLEX_TYPE:
2591 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2592 return fold_convert (type, tem);
2594 default:
2595 gcc_unreachable ();
2598 case COMPLEX_TYPE:
2599 switch (TREE_CODE (orig))
2601 case INTEGER_TYPE:
2602 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2604 case REAL_TYPE:
2605 case FIXED_POINT_TYPE:
2606 return build2 (COMPLEX_EXPR, type,
2607 fold_convert (TREE_TYPE (type), arg),
2608 fold_convert (TREE_TYPE (type), integer_zero_node));
2609 case COMPLEX_TYPE:
2611 tree rpart, ipart;
2613 if (TREE_CODE (arg) == COMPLEX_EXPR)
2615 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2616 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2617 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2620 arg = save_expr (arg);
2621 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2622 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2623 rpart = fold_convert (TREE_TYPE (type), rpart);
2624 ipart = fold_convert (TREE_TYPE (type), ipart);
2625 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2628 default:
2629 gcc_unreachable ();
2632 case VECTOR_TYPE:
2633 if (integer_zerop (arg))
2634 return build_zero_vector (type);
2635 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2636 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2637 || TREE_CODE (orig) == VECTOR_TYPE);
2638 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2640 case VOID_TYPE:
2641 tem = fold_ignored_result (arg);
2642 if (TREE_CODE (tem) == MODIFY_EXPR)
2643 return tem;
2644 return fold_build1 (NOP_EXPR, type, tem);
2646 default:
2647 gcc_unreachable ();
2651 /* Return false if expr can be assumed not to be an lvalue, true
2652 otherwise. */
2654 static bool
2655 maybe_lvalue_p (const_tree x)
2657 /* We only need to wrap lvalue tree codes. */
2658 switch (TREE_CODE (x))
2660 case VAR_DECL:
2661 case PARM_DECL:
2662 case RESULT_DECL:
2663 case LABEL_DECL:
2664 case FUNCTION_DECL:
2665 case SSA_NAME:
2667 case COMPONENT_REF:
2668 case INDIRECT_REF:
2669 case ALIGN_INDIRECT_REF:
2670 case MISALIGNED_INDIRECT_REF:
2671 case ARRAY_REF:
2672 case ARRAY_RANGE_REF:
2673 case BIT_FIELD_REF:
2674 case OBJ_TYPE_REF:
2676 case REALPART_EXPR:
2677 case IMAGPART_EXPR:
2678 case PREINCREMENT_EXPR:
2679 case PREDECREMENT_EXPR:
2680 case SAVE_EXPR:
2681 case TRY_CATCH_EXPR:
2682 case WITH_CLEANUP_EXPR:
2683 case COMPOUND_EXPR:
2684 case MODIFY_EXPR:
2685 case TARGET_EXPR:
2686 case COND_EXPR:
2687 case BIND_EXPR:
2688 case MIN_EXPR:
2689 case MAX_EXPR:
2690 break;
2692 default:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2695 break;
2696 return false;
2699 return true;
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2704 tree
2705 non_lvalue (tree x)
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2708 us. */
2709 if (in_gimple_form)
2710 return x;
2712 if (! maybe_lvalue_p (x))
2713 return x;
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2725 static tree
2726 pedantic_non_lvalue (tree x)
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2730 else
2731 return x;
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2739 enum tree_code
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2742 if (honor_nans && flag_trapping_math)
2743 return ERROR_MARK;
2745 switch (code)
2747 case EQ_EXPR:
2748 return NE_EXPR;
2749 case NE_EXPR:
2750 return EQ_EXPR;
2751 case GT_EXPR:
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2753 case GE_EXPR:
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2755 case LT_EXPR:
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2757 case LE_EXPR:
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2759 case LTGT_EXPR:
2760 return UNEQ_EXPR;
2761 case UNEQ_EXPR:
2762 return LTGT_EXPR;
2763 case UNGT_EXPR:
2764 return LE_EXPR;
2765 case UNGE_EXPR:
2766 return LT_EXPR;
2767 case UNLT_EXPR:
2768 return GE_EXPR;
2769 case UNLE_EXPR:
2770 return GT_EXPR;
2771 case ORDERED_EXPR:
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2775 default:
2776 gcc_unreachable ();
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2783 enum tree_code
2784 swap_tree_comparison (enum tree_code code)
2786 switch (code)
2788 case EQ_EXPR:
2789 case NE_EXPR:
2790 case ORDERED_EXPR:
2791 case UNORDERED_EXPR:
2792 case LTGT_EXPR:
2793 case UNEQ_EXPR:
2794 return code;
2795 case GT_EXPR:
2796 return LT_EXPR;
2797 case GE_EXPR:
2798 return LE_EXPR;
2799 case LT_EXPR:
2800 return GT_EXPR;
2801 case LE_EXPR:
2802 return GE_EXPR;
2803 case UNGT_EXPR:
2804 return UNLT_EXPR;
2805 case UNGE_EXPR:
2806 return UNLE_EXPR;
2807 case UNLT_EXPR:
2808 return UNGT_EXPR;
2809 case UNLE_EXPR:
2810 return UNGE_EXPR;
2811 default:
2812 gcc_unreachable ();
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2824 switch (code)
2826 case LT_EXPR:
2827 return COMPCODE_LT;
2828 case EQ_EXPR:
2829 return COMPCODE_EQ;
2830 case LE_EXPR:
2831 return COMPCODE_LE;
2832 case GT_EXPR:
2833 return COMPCODE_GT;
2834 case NE_EXPR:
2835 return COMPCODE_NE;
2836 case GE_EXPR:
2837 return COMPCODE_GE;
2838 case ORDERED_EXPR:
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2842 case UNLT_EXPR:
2843 return COMPCODE_UNLT;
2844 case UNEQ_EXPR:
2845 return COMPCODE_UNEQ;
2846 case UNLE_EXPR:
2847 return COMPCODE_UNLE;
2848 case UNGT_EXPR:
2849 return COMPCODE_UNGT;
2850 case LTGT_EXPR:
2851 return COMPCODE_LTGT;
2852 case UNGE_EXPR:
2853 return COMPCODE_UNGE;
2854 default:
2855 gcc_unreachable ();
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2866 switch (code)
2868 case COMPCODE_LT:
2869 return LT_EXPR;
2870 case COMPCODE_EQ:
2871 return EQ_EXPR;
2872 case COMPCODE_LE:
2873 return LE_EXPR;
2874 case COMPCODE_GT:
2875 return GT_EXPR;
2876 case COMPCODE_NE:
2877 return NE_EXPR;
2878 case COMPCODE_GE:
2879 return GE_EXPR;
2880 case COMPCODE_ORD:
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2884 case COMPCODE_UNLT:
2885 return UNLT_EXPR;
2886 case COMPCODE_UNEQ:
2887 return UNEQ_EXPR;
2888 case COMPCODE_UNLE:
2889 return UNLE_EXPR;
2890 case COMPCODE_UNGT:
2891 return UNGT_EXPR;
2892 case COMPCODE_LTGT:
2893 return LTGT_EXPR;
2894 case COMPCODE_UNGE:
2895 return UNGE_EXPR;
2896 default:
2897 gcc_unreachable ();
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2907 tree
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2917 switch (code)
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2921 break;
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2925 break;
2927 default:
2928 return NULL_TREE;
2931 if (!honor_nans)
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2941 else if (flag_trapping_math)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2963 rtrap = false;
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2967 if (rtrap && !ltrap
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2969 return NULL_TREE;
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2973 return NULL_TREE;
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2980 else
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2987 static int
2988 truth_value_p (enum tree_code code)
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3027 return 0;
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. As pointers
3037 strictly don't have a signedness, require either two pointers or
3038 two non-pointers as well. */
3039 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3040 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3041 return 0;
3043 /* If both types don't have the same precision, then it is not safe
3044 to strip NOPs. */
3045 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3046 return 0;
3048 STRIP_NOPS (arg0);
3049 STRIP_NOPS (arg1);
3051 /* In case both args are comparisons but with different comparison
3052 code, try to swap the comparison operands of one arg to produce
3053 a match and compare that variant. */
3054 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3055 && COMPARISON_CLASS_P (arg0)
3056 && COMPARISON_CLASS_P (arg1))
3058 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3060 if (TREE_CODE (arg0) == swap_code)
3061 return operand_equal_p (TREE_OPERAND (arg0, 0),
3062 TREE_OPERAND (arg1, 1), flags)
3063 && operand_equal_p (TREE_OPERAND (arg0, 1),
3064 TREE_OPERAND (arg1, 0), flags);
3067 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3068 /* This is needed for conversions and for COMPONENT_REF.
3069 Might as well play it safe and always test this. */
3070 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3073 return 0;
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below. */
3082 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3083 && (TREE_CODE (arg0) == SAVE_EXPR
3084 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3085 return 1;
3087 /* Next handle constant cases, those for which we can return 1 even
3088 if ONLY_CONST is set. */
3089 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3090 switch (TREE_CODE (arg0))
3092 case INTEGER_CST:
3093 return tree_int_cst_equal (arg0, arg1);
3095 case FIXED_CST:
3096 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3097 TREE_FIXED_CST (arg1));
3099 case REAL_CST:
3100 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3101 TREE_REAL_CST (arg1)))
3102 return 1;
3105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3107 /* If we do not distinguish between signed and unsigned zero,
3108 consider them equal. */
3109 if (real_zerop (arg0) && real_zerop (arg1))
3110 return 1;
3112 return 0;
3114 case VECTOR_CST:
3116 tree v1, v2;
3118 v1 = TREE_VECTOR_CST_ELTS (arg0);
3119 v2 = TREE_VECTOR_CST_ELTS (arg1);
3120 while (v1 && v2)
3122 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3123 flags))
3124 return 0;
3125 v1 = TREE_CHAIN (v1);
3126 v2 = TREE_CHAIN (v2);
3129 return v1 == v2;
3132 case COMPLEX_CST:
3133 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3134 flags)
3135 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3136 flags));
3138 case STRING_CST:
3139 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3140 && ! memcmp (TREE_STRING_POINTER (arg0),
3141 TREE_STRING_POINTER (arg1),
3142 TREE_STRING_LENGTH (arg0)));
3144 case ADDR_EXPR:
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3147 default:
3148 break;
3151 if (flags & OEP_ONLY_CONST)
3152 return 0;
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3167 case tcc_unary:
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3171 CASE_CONVERT:
3172 case FIX_TRUNC_EXPR:
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3174 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3175 return 0;
3176 break;
3177 default:
3178 break;
3181 return OP_SAME (0);
3184 case tcc_comparison:
3185 case tcc_binary:
3186 if (OP_SAME (0) && OP_SAME (1))
3187 return 1;
3189 /* For commutative ops, allow the other order. */
3190 return (commutative_tree_code (TREE_CODE (arg0))
3191 && operand_equal_p (TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 1), flags)
3193 && operand_equal_p (TREE_OPERAND (arg0, 1),
3194 TREE_OPERAND (arg1, 0), flags));
3196 case tcc_reference:
3197 /* If either of the pointer (or reference) expressions we are
3198 dereferencing contain a side effect, these cannot be equal. */
3199 if (TREE_SIDE_EFFECTS (arg0)
3200 || TREE_SIDE_EFFECTS (arg1))
3201 return 0;
3203 switch (TREE_CODE (arg0))
3205 case INDIRECT_REF:
3206 case ALIGN_INDIRECT_REF:
3207 case MISALIGNED_INDIRECT_REF:
3208 case REALPART_EXPR:
3209 case IMAGPART_EXPR:
3210 return OP_SAME (0);
3212 case ARRAY_REF:
3213 case ARRAY_RANGE_REF:
3214 /* Operands 2 and 3 may be null.
3215 Compare the array index by value if it is constant first as we
3216 may have different types but same value here. */
3217 return (OP_SAME (0)
3218 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3219 TREE_OPERAND (arg1, 1))
3220 || OP_SAME (1))
3221 && OP_SAME_WITH_NULL (2)
3222 && OP_SAME_WITH_NULL (3));
3224 case COMPONENT_REF:
3225 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3226 may be NULL when we're called to compare MEM_EXPRs. */
3227 return OP_SAME_WITH_NULL (0)
3228 && OP_SAME (1)
3229 && OP_SAME_WITH_NULL (2);
3231 case BIT_FIELD_REF:
3232 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3234 default:
3235 return 0;
3238 case tcc_expression:
3239 switch (TREE_CODE (arg0))
3241 case ADDR_EXPR:
3242 case TRUTH_NOT_EXPR:
3243 return OP_SAME (0);
3245 case TRUTH_ANDIF_EXPR:
3246 case TRUTH_ORIF_EXPR:
3247 return OP_SAME (0) && OP_SAME (1);
3249 case TRUTH_AND_EXPR:
3250 case TRUTH_OR_EXPR:
3251 case TRUTH_XOR_EXPR:
3252 if (OP_SAME (0) && OP_SAME (1))
3253 return 1;
3255 /* Otherwise take into account this is a commutative operation. */
3256 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 1), flags)
3258 && operand_equal_p (TREE_OPERAND (arg0, 1),
3259 TREE_OPERAND (arg1, 0), flags));
3261 case COND_EXPR:
3262 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3264 default:
3265 return 0;
3268 case tcc_vl_exp:
3269 switch (TREE_CODE (arg0))
3271 case CALL_EXPR:
3272 /* If the CALL_EXPRs call different functions, then they
3273 clearly can not be equal. */
3274 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3275 flags))
3276 return 0;
3279 unsigned int cef = call_expr_flags (arg0);
3280 if (flags & OEP_PURE_SAME)
3281 cef &= ECF_CONST | ECF_PURE;
3282 else
3283 cef &= ECF_CONST;
3284 if (!cef)
3285 return 0;
3288 /* Now see if all the arguments are the same. */
3290 const_call_expr_arg_iterator iter0, iter1;
3291 const_tree a0, a1;
3292 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3293 a1 = first_const_call_expr_arg (arg1, &iter1);
3294 a0 && a1;
3295 a0 = next_const_call_expr_arg (&iter0),
3296 a1 = next_const_call_expr_arg (&iter1))
3297 if (! operand_equal_p (a0, a1, flags))
3298 return 0;
3300 /* If we get here and both argument lists are exhausted
3301 then the CALL_EXPRs are equal. */
3302 return ! (a0 || a1);
3304 default:
3305 return 0;
3308 case tcc_declaration:
3309 /* Consider __builtin_sqrt equal to sqrt. */
3310 return (TREE_CODE (arg0) == FUNCTION_DECL
3311 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3312 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3313 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3315 default:
3316 return 0;
3319 #undef OP_SAME
3320 #undef OP_SAME_WITH_NULL
3323 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3324 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3326 When in doubt, return 0. */
3328 static int
3329 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3331 int unsignedp1, unsignedpo;
3332 tree primarg0, primarg1, primother;
3333 unsigned int correct_width;
3335 if (operand_equal_p (arg0, arg1, 0))
3336 return 1;
3338 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3339 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3340 return 0;
3342 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3343 and see if the inner values are the same. This removes any
3344 signedness comparison, which doesn't matter here. */
3345 primarg0 = arg0, primarg1 = arg1;
3346 STRIP_NOPS (primarg0);
3347 STRIP_NOPS (primarg1);
3348 if (operand_equal_p (primarg0, primarg1, 0))
3349 return 1;
3351 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3352 actual comparison operand, ARG0.
3354 First throw away any conversions to wider types
3355 already present in the operands. */
3357 primarg1 = get_narrower (arg1, &unsignedp1);
3358 primother = get_narrower (other, &unsignedpo);
3360 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3361 if (unsignedp1 == unsignedpo
3362 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3363 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3365 tree type = TREE_TYPE (arg0);
3367 /* Make sure shorter operand is extended the right way
3368 to match the longer operand. */
3369 primarg1 = fold_convert (signed_or_unsigned_type_for
3370 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3372 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3373 return 1;
3376 return 0;
3379 /* See if ARG is an expression that is either a comparison or is performing
3380 arithmetic on comparisons. The comparisons must only be comparing
3381 two different values, which will be stored in *CVAL1 and *CVAL2; if
3382 they are nonzero it means that some operands have already been found.
3383 No variables may be used anywhere else in the expression except in the
3384 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3385 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3387 If this is true, return 1. Otherwise, return zero. */
3389 static int
3390 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3392 enum tree_code code = TREE_CODE (arg);
3393 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3395 /* We can handle some of the tcc_expression cases here. */
3396 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3397 tclass = tcc_unary;
3398 else if (tclass == tcc_expression
3399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3400 || code == COMPOUND_EXPR))
3401 tclass = tcc_binary;
3403 else if (tclass == tcc_expression && code == SAVE_EXPR
3404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3406 /* If we've already found a CVAL1 or CVAL2, this expression is
3407 two complex to handle. */
3408 if (*cval1 || *cval2)
3409 return 0;
3411 tclass = tcc_unary;
3412 *save_p = 1;
3415 switch (tclass)
3417 case tcc_unary:
3418 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3420 case tcc_binary:
3421 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3422 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3423 cval1, cval2, save_p));
3425 case tcc_constant:
3426 return 1;
3428 case tcc_expression:
3429 if (code == COND_EXPR)
3430 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3431 cval1, cval2, save_p)
3432 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3433 cval1, cval2, save_p)
3434 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3435 cval1, cval2, save_p));
3436 return 0;
3438 case tcc_comparison:
3439 /* First see if we can handle the first operand, then the second. For
3440 the second operand, we know *CVAL1 can't be zero. It must be that
3441 one side of the comparison is each of the values; test for the
3442 case where this isn't true by failing if the two operands
3443 are the same. */
3445 if (operand_equal_p (TREE_OPERAND (arg, 0),
3446 TREE_OPERAND (arg, 1), 0))
3447 return 0;
3449 if (*cval1 == 0)
3450 *cval1 = TREE_OPERAND (arg, 0);
3451 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3453 else if (*cval2 == 0)
3454 *cval2 = TREE_OPERAND (arg, 0);
3455 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3457 else
3458 return 0;
3460 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3462 else if (*cval2 == 0)
3463 *cval2 = TREE_OPERAND (arg, 1);
3464 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3466 else
3467 return 0;
3469 return 1;
3471 default:
3472 return 0;
3476 /* ARG is a tree that is known to contain just arithmetic operations and
3477 comparisons. Evaluate the operations in the tree substituting NEW0 for
3478 any occurrence of OLD0 as an operand of a comparison and likewise for
3479 NEW1 and OLD1. */
3481 static tree
3482 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3484 tree type = TREE_TYPE (arg);
3485 enum tree_code code = TREE_CODE (arg);
3486 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3488 /* We can handle some of the tcc_expression cases here. */
3489 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3490 tclass = tcc_unary;
3491 else if (tclass == tcc_expression
3492 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3493 tclass = tcc_binary;
3495 switch (tclass)
3497 case tcc_unary:
3498 return fold_build1 (code, type,
3499 eval_subst (TREE_OPERAND (arg, 0),
3500 old0, new0, old1, new1));
3502 case tcc_binary:
3503 return fold_build2 (code, type,
3504 eval_subst (TREE_OPERAND (arg, 0),
3505 old0, new0, old1, new1),
3506 eval_subst (TREE_OPERAND (arg, 1),
3507 old0, new0, old1, new1));
3509 case tcc_expression:
3510 switch (code)
3512 case SAVE_EXPR:
3513 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3515 case COMPOUND_EXPR:
3516 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3518 case COND_EXPR:
3519 return fold_build3 (code, type,
3520 eval_subst (TREE_OPERAND (arg, 0),
3521 old0, new0, old1, new1),
3522 eval_subst (TREE_OPERAND (arg, 1),
3523 old0, new0, old1, new1),
3524 eval_subst (TREE_OPERAND (arg, 2),
3525 old0, new0, old1, new1));
3526 default:
3527 break;
3529 /* Fall through - ??? */
3531 case tcc_comparison:
3533 tree arg0 = TREE_OPERAND (arg, 0);
3534 tree arg1 = TREE_OPERAND (arg, 1);
3536 /* We need to check both for exact equality and tree equality. The
3537 former will be true if the operand has a side-effect. In that
3538 case, we know the operand occurred exactly once. */
3540 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3541 arg0 = new0;
3542 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3543 arg0 = new1;
3545 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3546 arg1 = new0;
3547 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3548 arg1 = new1;
3550 return fold_build2 (code, type, arg0, arg1);
3553 default:
3554 return arg;
3558 /* Return a tree for the case when the result of an expression is RESULT
3559 converted to TYPE and OMITTED was previously an operand of the expression
3560 but is now not needed (e.g., we folded OMITTED * 0).
3562 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3563 the conversion of RESULT to TYPE. */
3565 tree
3566 omit_one_operand (tree type, tree result, tree omitted)
3568 tree t = fold_convert (type, result);
3570 /* If the resulting operand is an empty statement, just return the omitted
3571 statement casted to void. */
3572 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3573 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3575 if (TREE_SIDE_EFFECTS (omitted))
3576 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3578 return non_lvalue (t);
3581 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3583 static tree
3584 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3586 tree t = fold_convert (type, result);
3588 /* If the resulting operand is an empty statement, just return the omitted
3589 statement casted to void. */
3590 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3591 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3593 if (TREE_SIDE_EFFECTS (omitted))
3594 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3596 return pedantic_non_lvalue (t);
3599 /* Return a tree for the case when the result of an expression is RESULT
3600 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3601 of the expression but are now not needed.
3603 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3604 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3605 evaluated before OMITTED2. Otherwise, if neither has side effects,
3606 just do the conversion of RESULT to TYPE. */
3608 tree
3609 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3611 tree t = fold_convert (type, result);
3613 if (TREE_SIDE_EFFECTS (omitted2))
3614 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3615 if (TREE_SIDE_EFFECTS (omitted1))
3616 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3618 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3622 /* Return a simplified tree node for the truth-negation of ARG. This
3623 never alters ARG itself. We assume that ARG is an operation that
3624 returns a truth value (0 or 1).
3626 FIXME: one would think we would fold the result, but it causes
3627 problems with the dominator optimizer. */
3629 tree
3630 fold_truth_not_expr (tree arg)
3632 tree type = TREE_TYPE (arg);
3633 enum tree_code code = TREE_CODE (arg);
3635 /* If this is a comparison, we can simply invert it, except for
3636 floating-point non-equality comparisons, in which case we just
3637 enclose a TRUTH_NOT_EXPR around what we have. */
3639 if (TREE_CODE_CLASS (code) == tcc_comparison)
3641 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3642 if (FLOAT_TYPE_P (op_type)
3643 && flag_trapping_math
3644 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3645 && code != NE_EXPR && code != EQ_EXPR)
3646 return NULL_TREE;
3647 else
3649 code = invert_tree_comparison (code,
3650 HONOR_NANS (TYPE_MODE (op_type)));
3651 if (code == ERROR_MARK)
3652 return NULL_TREE;
3653 else
3654 return build2 (code, type,
3655 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3659 switch (code)
3661 case INTEGER_CST:
3662 return constant_boolean_node (integer_zerop (arg), type);
3664 case TRUTH_AND_EXPR:
3665 return build2 (TRUTH_OR_EXPR, type,
3666 invert_truthvalue (TREE_OPERAND (arg, 0)),
3667 invert_truthvalue (TREE_OPERAND (arg, 1)));
3669 case TRUTH_OR_EXPR:
3670 return build2 (TRUTH_AND_EXPR, type,
3671 invert_truthvalue (TREE_OPERAND (arg, 0)),
3672 invert_truthvalue (TREE_OPERAND (arg, 1)));
3674 case TRUTH_XOR_EXPR:
3675 /* Here we can invert either operand. We invert the first operand
3676 unless the second operand is a TRUTH_NOT_EXPR in which case our
3677 result is the XOR of the first operand with the inside of the
3678 negation of the second operand. */
3680 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3681 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3682 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3683 else
3684 return build2 (TRUTH_XOR_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 TREE_OPERAND (arg, 1));
3688 case TRUTH_ANDIF_EXPR:
3689 return build2 (TRUTH_ORIF_EXPR, type,
3690 invert_truthvalue (TREE_OPERAND (arg, 0)),
3691 invert_truthvalue (TREE_OPERAND (arg, 1)));
3693 case TRUTH_ORIF_EXPR:
3694 return build2 (TRUTH_ANDIF_EXPR, type,
3695 invert_truthvalue (TREE_OPERAND (arg, 0)),
3696 invert_truthvalue (TREE_OPERAND (arg, 1)));
3698 case TRUTH_NOT_EXPR:
3699 return TREE_OPERAND (arg, 0);
3701 case COND_EXPR:
3703 tree arg1 = TREE_OPERAND (arg, 1);
3704 tree arg2 = TREE_OPERAND (arg, 2);
3705 /* A COND_EXPR may have a throw as one operand, which
3706 then has void type. Just leave void operands
3707 as they are. */
3708 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3709 VOID_TYPE_P (TREE_TYPE (arg1))
3710 ? arg1 : invert_truthvalue (arg1),
3711 VOID_TYPE_P (TREE_TYPE (arg2))
3712 ? arg2 : invert_truthvalue (arg2));
3715 case COMPOUND_EXPR:
3716 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3717 invert_truthvalue (TREE_OPERAND (arg, 1)));
3719 case NON_LVALUE_EXPR:
3720 return invert_truthvalue (TREE_OPERAND (arg, 0));
3722 case NOP_EXPR:
3723 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3724 return build1 (TRUTH_NOT_EXPR, type, arg);
3726 case CONVERT_EXPR:
3727 case FLOAT_EXPR:
3728 return build1 (TREE_CODE (arg), type,
3729 invert_truthvalue (TREE_OPERAND (arg, 0)));
3731 case BIT_AND_EXPR:
3732 if (!integer_onep (TREE_OPERAND (arg, 1)))
3733 break;
3734 return build2 (EQ_EXPR, type, arg,
3735 build_int_cst (type, 0));
3737 case SAVE_EXPR:
3738 return build1 (TRUTH_NOT_EXPR, type, arg);
3740 case CLEANUP_POINT_EXPR:
3741 return build1 (CLEANUP_POINT_EXPR, type,
3742 invert_truthvalue (TREE_OPERAND (arg, 0)));
3744 default:
3745 break;
3748 return NULL_TREE;
3751 /* Return a simplified tree node for the truth-negation of ARG. This
3752 never alters ARG itself. We assume that ARG is an operation that
3753 returns a truth value (0 or 1).
3755 FIXME: one would think we would fold the result, but it causes
3756 problems with the dominator optimizer. */
3758 tree
3759 invert_truthvalue (tree arg)
3761 tree tem;
3763 if (TREE_CODE (arg) == ERROR_MARK)
3764 return arg;
3766 tem = fold_truth_not_expr (arg);
3767 if (!tem)
3768 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3770 return tem;
3773 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3774 operands are another bit-wise operation with a common input. If so,
3775 distribute the bit operations to save an operation and possibly two if
3776 constants are involved. For example, convert
3777 (A | B) & (A | C) into A | (B & C)
3778 Further simplification will occur if B and C are constants.
3780 If this optimization cannot be done, 0 will be returned. */
3782 static tree
3783 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3785 tree common;
3786 tree left, right;
3788 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3789 || TREE_CODE (arg0) == code
3790 || (TREE_CODE (arg0) != BIT_AND_EXPR
3791 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3792 return 0;
3794 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3796 common = TREE_OPERAND (arg0, 0);
3797 left = TREE_OPERAND (arg0, 1);
3798 right = TREE_OPERAND (arg1, 1);
3800 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3802 common = TREE_OPERAND (arg0, 0);
3803 left = TREE_OPERAND (arg0, 1);
3804 right = TREE_OPERAND (arg1, 0);
3806 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3808 common = TREE_OPERAND (arg0, 1);
3809 left = TREE_OPERAND (arg0, 0);
3810 right = TREE_OPERAND (arg1, 1);
3812 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3814 common = TREE_OPERAND (arg0, 1);
3815 left = TREE_OPERAND (arg0, 0);
3816 right = TREE_OPERAND (arg1, 0);
3818 else
3819 return 0;
3821 return fold_build2 (TREE_CODE (arg0), type, common,
3822 fold_build2 (code, type, left, right));
3825 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3826 with code CODE. This optimization is unsafe. */
3827 static tree
3828 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3830 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3831 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3833 /* (A / C) +- (B / C) -> (A +- B) / C. */
3834 if (mul0 == mul1
3835 && operand_equal_p (TREE_OPERAND (arg0, 1),
3836 TREE_OPERAND (arg1, 1), 0))
3837 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3838 fold_build2 (code, type,
3839 TREE_OPERAND (arg0, 0),
3840 TREE_OPERAND (arg1, 0)),
3841 TREE_OPERAND (arg0, 1));
3843 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3844 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3845 TREE_OPERAND (arg1, 0), 0)
3846 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3847 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3849 REAL_VALUE_TYPE r0, r1;
3850 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3851 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3852 if (!mul0)
3853 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3854 if (!mul1)
3855 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3856 real_arithmetic (&r0, code, &r0, &r1);
3857 return fold_build2 (MULT_EXPR, type,
3858 TREE_OPERAND (arg0, 0),
3859 build_real (type, r0));
3862 return NULL_TREE;
3865 /* Subroutine for fold_truthop: decode a field reference.
3867 If EXP is a comparison reference, we return the innermost reference.
3869 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3870 set to the starting bit number.
3872 If the innermost field can be completely contained in a mode-sized
3873 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3875 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3876 otherwise it is not changed.
3878 *PUNSIGNEDP is set to the signedness of the field.
3880 *PMASK is set to the mask used. This is either contained in a
3881 BIT_AND_EXPR or derived from the width of the field.
3883 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3885 Return 0 if this is not a component reference or is one that we can't
3886 do anything with. */
3888 static tree
3889 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3890 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3891 int *punsignedp, int *pvolatilep,
3892 tree *pmask, tree *pand_mask)
3894 tree outer_type = 0;
3895 tree and_mask = 0;
3896 tree mask, inner, offset;
3897 tree unsigned_type;
3898 unsigned int precision;
3900 /* All the optimizations using this function assume integer fields.
3901 There are problems with FP fields since the type_for_size call
3902 below can fail for, e.g., XFmode. */
3903 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3904 return 0;
3906 /* We are interested in the bare arrangement of bits, so strip everything
3907 that doesn't affect the machine mode. However, record the type of the
3908 outermost expression if it may matter below. */
3909 if (CONVERT_EXPR_P (exp)
3910 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3911 outer_type = TREE_TYPE (exp);
3912 STRIP_NOPS (exp);
3914 if (TREE_CODE (exp) == BIT_AND_EXPR)
3916 and_mask = TREE_OPERAND (exp, 1);
3917 exp = TREE_OPERAND (exp, 0);
3918 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3919 if (TREE_CODE (and_mask) != INTEGER_CST)
3920 return 0;
3923 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3924 punsignedp, pvolatilep, false);
3925 if ((inner == exp && and_mask == 0)
3926 || *pbitsize < 0 || offset != 0
3927 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3928 return 0;
3930 /* If the number of bits in the reference is the same as the bitsize of
3931 the outer type, then the outer type gives the signedness. Otherwise
3932 (in case of a small bitfield) the signedness is unchanged. */
3933 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3934 *punsignedp = TYPE_UNSIGNED (outer_type);
3936 /* Compute the mask to access the bitfield. */
3937 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3938 precision = TYPE_PRECISION (unsigned_type);
3940 mask = build_int_cst_type (unsigned_type, -1);
3942 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3943 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3945 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3946 if (and_mask != 0)
3947 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3948 fold_convert (unsigned_type, and_mask), mask);
3950 *pmask = mask;
3951 *pand_mask = and_mask;
3952 return inner;
3955 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3956 represents the sign bit of EXP's type. If EXP represents a sign
3957 or zero extension, also test VAL against the unextended type.
3958 The return value is the (sub)expression whose sign bit is VAL,
3959 or NULL_TREE otherwise. */
3961 static tree
3962 sign_bit_p (tree exp, const_tree val)
3964 unsigned HOST_WIDE_INT mask_lo, lo;
3965 HOST_WIDE_INT mask_hi, hi;
3966 int width;
3967 tree t;
3969 /* Tree EXP must have an integral type. */
3970 t = TREE_TYPE (exp);
3971 if (! INTEGRAL_TYPE_P (t))
3972 return NULL_TREE;
3974 /* Tree VAL must be an integer constant. */
3975 if (TREE_CODE (val) != INTEGER_CST
3976 || TREE_OVERFLOW (val))
3977 return NULL_TREE;
3979 width = TYPE_PRECISION (t);
3980 if (width > HOST_BITS_PER_WIDE_INT)
3982 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3983 lo = 0;
3985 mask_hi = ((unsigned HOST_WIDE_INT) -1
3986 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3987 mask_lo = -1;
3989 else
3991 hi = 0;
3992 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3994 mask_hi = 0;
3995 mask_lo = ((unsigned HOST_WIDE_INT) -1
3996 >> (HOST_BITS_PER_WIDE_INT - width));
3999 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4000 treat VAL as if it were unsigned. */
4001 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4002 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4003 return exp;
4005 /* Handle extension from a narrower type. */
4006 if (TREE_CODE (exp) == NOP_EXPR
4007 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4008 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4010 return NULL_TREE;
4013 /* Subroutine for fold_truthop: determine if an operand is simple enough
4014 to be evaluated unconditionally. */
4016 static int
4017 simple_operand_p (const_tree exp)
4019 /* Strip any conversions that don't change the machine mode. */
4020 STRIP_NOPS (exp);
4022 return (CONSTANT_CLASS_P (exp)
4023 || TREE_CODE (exp) == SSA_NAME
4024 || (DECL_P (exp)
4025 && ! TREE_ADDRESSABLE (exp)
4026 && ! TREE_THIS_VOLATILE (exp)
4027 && ! DECL_NONLOCAL (exp)
4028 /* Don't regard global variables as simple. They may be
4029 allocated in ways unknown to the compiler (shared memory,
4030 #pragma weak, etc). */
4031 && ! TREE_PUBLIC (exp)
4032 && ! DECL_EXTERNAL (exp)
4033 /* Loading a static variable is unduly expensive, but global
4034 registers aren't expensive. */
4035 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4038 /* The following functions are subroutines to fold_range_test and allow it to
4039 try to change a logical combination of comparisons into a range test.
4041 For example, both
4042 X == 2 || X == 3 || X == 4 || X == 5
4044 X >= 2 && X <= 5
4045 are converted to
4046 (unsigned) (X - 2) <= 3
4048 We describe each set of comparisons as being either inside or outside
4049 a range, using a variable named like IN_P, and then describe the
4050 range with a lower and upper bound. If one of the bounds is omitted,
4051 it represents either the highest or lowest value of the type.
4053 In the comments below, we represent a range by two numbers in brackets
4054 preceded by a "+" to designate being inside that range, or a "-" to
4055 designate being outside that range, so the condition can be inverted by
4056 flipping the prefix. An omitted bound is represented by a "-". For
4057 example, "- [-, 10]" means being outside the range starting at the lowest
4058 possible value and ending at 10, in other words, being greater than 10.
4059 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4060 always false.
4062 We set up things so that the missing bounds are handled in a consistent
4063 manner so neither a missing bound nor "true" and "false" need to be
4064 handled using a special case. */
4066 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4067 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4068 and UPPER1_P are nonzero if the respective argument is an upper bound
4069 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4070 must be specified for a comparison. ARG1 will be converted to ARG0's
4071 type if both are specified. */
4073 static tree
4074 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4075 tree arg1, int upper1_p)
4077 tree tem;
4078 int result;
4079 int sgn0, sgn1;
4081 /* If neither arg represents infinity, do the normal operation.
4082 Else, if not a comparison, return infinity. Else handle the special
4083 comparison rules. Note that most of the cases below won't occur, but
4084 are handled for consistency. */
4086 if (arg0 != 0 && arg1 != 0)
4088 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4089 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4090 STRIP_NOPS (tem);
4091 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4094 if (TREE_CODE_CLASS (code) != tcc_comparison)
4095 return 0;
4097 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4098 for neither. In real maths, we cannot assume open ended ranges are
4099 the same. But, this is computer arithmetic, where numbers are finite.
4100 We can therefore make the transformation of any unbounded range with
4101 the value Z, Z being greater than any representable number. This permits
4102 us to treat unbounded ranges as equal. */
4103 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4104 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4105 switch (code)
4107 case EQ_EXPR:
4108 result = sgn0 == sgn1;
4109 break;
4110 case NE_EXPR:
4111 result = sgn0 != sgn1;
4112 break;
4113 case LT_EXPR:
4114 result = sgn0 < sgn1;
4115 break;
4116 case LE_EXPR:
4117 result = sgn0 <= sgn1;
4118 break;
4119 case GT_EXPR:
4120 result = sgn0 > sgn1;
4121 break;
4122 case GE_EXPR:
4123 result = sgn0 >= sgn1;
4124 break;
4125 default:
4126 gcc_unreachable ();
4129 return constant_boolean_node (result, type);
4132 /* Given EXP, a logical expression, set the range it is testing into
4133 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4134 actually being tested. *PLOW and *PHIGH will be made of the same
4135 type as the returned expression. If EXP is not a comparison, we
4136 will most likely not be returning a useful value and range. Set
4137 *STRICT_OVERFLOW_P to true if the return value is only valid
4138 because signed overflow is undefined; otherwise, do not change
4139 *STRICT_OVERFLOW_P. */
4141 static tree
4142 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4143 bool *strict_overflow_p)
4145 enum tree_code code;
4146 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4147 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4148 int in_p, n_in_p;
4149 tree low, high, n_low, n_high;
4151 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4152 and see if we can refine the range. Some of the cases below may not
4153 happen, but it doesn't seem worth worrying about this. We "continue"
4154 the outer loop when we've changed something; otherwise we "break"
4155 the switch, which will "break" the while. */
4157 in_p = 0;
4158 low = high = build_int_cst (TREE_TYPE (exp), 0);
4160 while (1)
4162 code = TREE_CODE (exp);
4163 exp_type = TREE_TYPE (exp);
4165 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4167 if (TREE_OPERAND_LENGTH (exp) > 0)
4168 arg0 = TREE_OPERAND (exp, 0);
4169 if (TREE_CODE_CLASS (code) == tcc_comparison
4170 || TREE_CODE_CLASS (code) == tcc_unary
4171 || TREE_CODE_CLASS (code) == tcc_binary)
4172 arg0_type = TREE_TYPE (arg0);
4173 if (TREE_CODE_CLASS (code) == tcc_binary
4174 || TREE_CODE_CLASS (code) == tcc_comparison
4175 || (TREE_CODE_CLASS (code) == tcc_expression
4176 && TREE_OPERAND_LENGTH (exp) > 1))
4177 arg1 = TREE_OPERAND (exp, 1);
4180 switch (code)
4182 case TRUTH_NOT_EXPR:
4183 in_p = ! in_p, exp = arg0;
4184 continue;
4186 case EQ_EXPR: case NE_EXPR:
4187 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4188 /* We can only do something if the range is testing for zero
4189 and if the second operand is an integer constant. Note that
4190 saying something is "in" the range we make is done by
4191 complementing IN_P since it will set in the initial case of
4192 being not equal to zero; "out" is leaving it alone. */
4193 if (low == 0 || high == 0
4194 || ! integer_zerop (low) || ! integer_zerop (high)
4195 || TREE_CODE (arg1) != INTEGER_CST)
4196 break;
4198 switch (code)
4200 case NE_EXPR: /* - [c, c] */
4201 low = high = arg1;
4202 break;
4203 case EQ_EXPR: /* + [c, c] */
4204 in_p = ! in_p, low = high = arg1;
4205 break;
4206 case GT_EXPR: /* - [-, c] */
4207 low = 0, high = arg1;
4208 break;
4209 case GE_EXPR: /* + [c, -] */
4210 in_p = ! in_p, low = arg1, high = 0;
4211 break;
4212 case LT_EXPR: /* - [c, -] */
4213 low = arg1, high = 0;
4214 break;
4215 case LE_EXPR: /* + [-, c] */
4216 in_p = ! in_p, low = 0, high = arg1;
4217 break;
4218 default:
4219 gcc_unreachable ();
4222 /* If this is an unsigned comparison, we also know that EXP is
4223 greater than or equal to zero. We base the range tests we make
4224 on that fact, so we record it here so we can parse existing
4225 range tests. We test arg0_type since often the return type
4226 of, e.g. EQ_EXPR, is boolean. */
4227 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4229 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4230 in_p, low, high, 1,
4231 build_int_cst (arg0_type, 0),
4232 NULL_TREE))
4233 break;
4235 in_p = n_in_p, low = n_low, high = n_high;
4237 /* If the high bound is missing, but we have a nonzero low
4238 bound, reverse the range so it goes from zero to the low bound
4239 minus 1. */
4240 if (high == 0 && low && ! integer_zerop (low))
4242 in_p = ! in_p;
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4244 integer_one_node, 0);
4245 low = build_int_cst (arg0_type, 0);
4249 exp = arg0;
4250 continue;
4252 case NEGATE_EXPR:
4253 /* (-x) IN [a,b] -> x in [-b, -a] */
4254 n_low = range_binop (MINUS_EXPR, exp_type,
4255 build_int_cst (exp_type, 0),
4256 0, high, 1);
4257 n_high = range_binop (MINUS_EXPR, exp_type,
4258 build_int_cst (exp_type, 0),
4259 0, low, 0);
4260 low = n_low, high = n_high;
4261 exp = arg0;
4262 continue;
4264 case BIT_NOT_EXPR:
4265 /* ~ X -> -X - 1 */
4266 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4267 build_int_cst (exp_type, 1));
4268 continue;
4270 case PLUS_EXPR: case MINUS_EXPR:
4271 if (TREE_CODE (arg1) != INTEGER_CST)
4272 break;
4274 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4275 move a constant to the other side. */
4276 if (!TYPE_UNSIGNED (arg0_type)
4277 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4278 break;
4280 /* If EXP is signed, any overflow in the computation is undefined,
4281 so we don't worry about it so long as our computations on
4282 the bounds don't overflow. For unsigned, overflow is defined
4283 and this is exactly the right thing. */
4284 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4285 arg0_type, low, 0, arg1, 0);
4286 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4287 arg0_type, high, 1, arg1, 0);
4288 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4289 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4290 break;
4292 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4293 *strict_overflow_p = true;
4295 /* Check for an unsigned range which has wrapped around the maximum
4296 value thus making n_high < n_low, and normalize it. */
4297 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4299 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4300 integer_one_node, 0);
4301 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4302 integer_one_node, 0);
4304 /* If the range is of the form +/- [ x+1, x ], we won't
4305 be able to normalize it. But then, it represents the
4306 whole range or the empty set, so make it
4307 +/- [ -, - ]. */
4308 if (tree_int_cst_equal (n_low, low)
4309 && tree_int_cst_equal (n_high, high))
4310 low = high = 0;
4311 else
4312 in_p = ! in_p;
4314 else
4315 low = n_low, high = n_high;
4317 exp = arg0;
4318 continue;
4320 CASE_CONVERT: case NON_LVALUE_EXPR:
4321 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4322 break;
4324 if (! INTEGRAL_TYPE_P (arg0_type)
4325 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4326 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4327 break;
4329 n_low = low, n_high = high;
4331 if (n_low != 0)
4332 n_low = fold_convert (arg0_type, n_low);
4334 if (n_high != 0)
4335 n_high = fold_convert (arg0_type, n_high);
4338 /* If we're converting arg0 from an unsigned type, to exp,
4339 a signed type, we will be doing the comparison as unsigned.
4340 The tests above have already verified that LOW and HIGH
4341 are both positive.
4343 So we have to ensure that we will handle large unsigned
4344 values the same way that the current signed bounds treat
4345 negative values. */
4347 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4349 tree high_positive;
4350 tree equiv_type;
4351 /* For fixed-point modes, we need to pass the saturating flag
4352 as the 2nd parameter. */
4353 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4354 equiv_type = lang_hooks.types.type_for_mode
4355 (TYPE_MODE (arg0_type),
4356 TYPE_SATURATING (arg0_type));
4357 else
4358 equiv_type = lang_hooks.types.type_for_mode
4359 (TYPE_MODE (arg0_type), 1);
4361 /* A range without an upper bound is, naturally, unbounded.
4362 Since convert would have cropped a very large value, use
4363 the max value for the destination type. */
4364 high_positive
4365 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4366 : TYPE_MAX_VALUE (arg0_type);
4368 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4369 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4370 fold_convert (arg0_type,
4371 high_positive),
4372 build_int_cst (arg0_type, 1));
4374 /* If the low bound is specified, "and" the range with the
4375 range for which the original unsigned value will be
4376 positive. */
4377 if (low != 0)
4379 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4380 1, n_low, n_high, 1,
4381 fold_convert (arg0_type,
4382 integer_zero_node),
4383 high_positive))
4384 break;
4386 in_p = (n_in_p == in_p);
4388 else
4390 /* Otherwise, "or" the range with the range of the input
4391 that will be interpreted as negative. */
4392 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4393 0, n_low, n_high, 1,
4394 fold_convert (arg0_type,
4395 integer_zero_node),
4396 high_positive))
4397 break;
4399 in_p = (in_p != n_in_p);
4403 exp = arg0;
4404 low = n_low, high = n_high;
4405 continue;
4407 default:
4408 break;
4411 break;
4414 /* If EXP is a constant, we can evaluate whether this is true or false. */
4415 if (TREE_CODE (exp) == INTEGER_CST)
4417 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4418 exp, 0, low, 0))
4419 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4420 exp, 1, high, 1)));
4421 low = high = 0;
4422 exp = 0;
4425 *pin_p = in_p, *plow = low, *phigh = high;
4426 return exp;
4429 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4430 type, TYPE, return an expression to test if EXP is in (or out of, depending
4431 on IN_P) the range. Return 0 if the test couldn't be created. */
4433 static tree
4434 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4436 tree etype = TREE_TYPE (exp);
4437 tree value;
4439 #ifdef HAVE_canonicalize_funcptr_for_compare
4440 /* Disable this optimization for function pointer expressions
4441 on targets that require function pointer canonicalization. */
4442 if (HAVE_canonicalize_funcptr_for_compare
4443 && TREE_CODE (etype) == POINTER_TYPE
4444 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4445 return NULL_TREE;
4446 #endif
4448 if (! in_p)
4450 value = build_range_check (type, exp, 1, low, high);
4451 if (value != 0)
4452 return invert_truthvalue (value);
4454 return 0;
4457 if (low == 0 && high == 0)
4458 return build_int_cst (type, 1);
4460 if (low == 0)
4461 return fold_build2 (LE_EXPR, type, exp,
4462 fold_convert (etype, high));
4464 if (high == 0)
4465 return fold_build2 (GE_EXPR, type, exp,
4466 fold_convert (etype, low));
4468 if (operand_equal_p (low, high, 0))
4469 return fold_build2 (EQ_EXPR, type, exp,
4470 fold_convert (etype, low));
4472 if (integer_zerop (low))
4474 if (! TYPE_UNSIGNED (etype))
4476 etype = unsigned_type_for (etype);
4477 high = fold_convert (etype, high);
4478 exp = fold_convert (etype, exp);
4480 return build_range_check (type, exp, 1, 0, high);
4483 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4484 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4486 unsigned HOST_WIDE_INT lo;
4487 HOST_WIDE_INT hi;
4488 int prec;
4490 prec = TYPE_PRECISION (etype);
4491 if (prec <= HOST_BITS_PER_WIDE_INT)
4493 hi = 0;
4494 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4496 else
4498 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4499 lo = (unsigned HOST_WIDE_INT) -1;
4502 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4504 if (TYPE_UNSIGNED (etype))
4506 etype = signed_type_for (etype);
4507 exp = fold_convert (etype, exp);
4509 return fold_build2 (GT_EXPR, type, exp,
4510 build_int_cst (etype, 0));
4514 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4515 This requires wrap-around arithmetics for the type of the expression. */
4516 switch (TREE_CODE (etype))
4518 case INTEGER_TYPE:
4519 /* There is no requirement that LOW be within the range of ETYPE
4520 if the latter is a subtype. It must, however, be within the base
4521 type of ETYPE. So be sure we do the subtraction in that type. */
4522 if (TREE_TYPE (etype))
4523 etype = TREE_TYPE (etype);
4524 break;
4526 case ENUMERAL_TYPE:
4527 case BOOLEAN_TYPE:
4528 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4529 TYPE_UNSIGNED (etype));
4530 break;
4532 default:
4533 break;
4536 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4537 if (TREE_CODE (etype) == INTEGER_TYPE
4538 && !TYPE_OVERFLOW_WRAPS (etype))
4540 tree utype, minv, maxv;
4542 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4543 for the type in question, as we rely on this here. */
4544 utype = unsigned_type_for (etype);
4545 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4546 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4547 integer_one_node, 1);
4548 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4550 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4551 minv, 1, maxv, 1)))
4552 etype = utype;
4553 else
4554 return 0;
4557 high = fold_convert (etype, high);
4558 low = fold_convert (etype, low);
4559 exp = fold_convert (etype, exp);
4561 value = const_binop (MINUS_EXPR, high, low, 0);
4564 if (POINTER_TYPE_P (etype))
4566 if (value != 0 && !TREE_OVERFLOW (value))
4568 low = fold_convert (sizetype, low);
4569 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4570 return build_range_check (type,
4571 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4572 1, build_int_cst (etype, 0), value);
4574 return 0;
4577 if (value != 0 && !TREE_OVERFLOW (value))
4578 return build_range_check (type,
4579 fold_build2 (MINUS_EXPR, etype, exp, low),
4580 1, build_int_cst (etype, 0), value);
4582 return 0;
4585 /* Return the predecessor of VAL in its type, handling the infinite case. */
4587 static tree
4588 range_predecessor (tree val)
4590 tree type = TREE_TYPE (val);
4592 if (INTEGRAL_TYPE_P (type)
4593 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4594 return 0;
4595 else
4596 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4599 /* Return the successor of VAL in its type, handling the infinite case. */
4601 static tree
4602 range_successor (tree val)
4604 tree type = TREE_TYPE (val);
4606 if (INTEGRAL_TYPE_P (type)
4607 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4608 return 0;
4609 else
4610 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4613 /* Given two ranges, see if we can merge them into one. Return 1 if we
4614 can, 0 if we can't. Set the output range into the specified parameters. */
4616 static int
4617 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4618 tree high0, int in1_p, tree low1, tree high1)
4620 int no_overlap;
4621 int subset;
4622 int temp;
4623 tree tem;
4624 int in_p;
4625 tree low, high;
4626 int lowequal = ((low0 == 0 && low1 == 0)
4627 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4628 low0, 0, low1, 0)));
4629 int highequal = ((high0 == 0 && high1 == 0)
4630 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4631 high0, 1, high1, 1)));
4633 /* Make range 0 be the range that starts first, or ends last if they
4634 start at the same value. Swap them if it isn't. */
4635 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4636 low0, 0, low1, 0))
4637 || (lowequal
4638 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4639 high1, 1, high0, 1))))
4641 temp = in0_p, in0_p = in1_p, in1_p = temp;
4642 tem = low0, low0 = low1, low1 = tem;
4643 tem = high0, high0 = high1, high1 = tem;
4646 /* Now flag two cases, whether the ranges are disjoint or whether the
4647 second range is totally subsumed in the first. Note that the tests
4648 below are simplified by the ones above. */
4649 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4650 high0, 1, low1, 0));
4651 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4652 high1, 1, high0, 1));
4654 /* We now have four cases, depending on whether we are including or
4655 excluding the two ranges. */
4656 if (in0_p && in1_p)
4658 /* If they don't overlap, the result is false. If the second range
4659 is a subset it is the result. Otherwise, the range is from the start
4660 of the second to the end of the first. */
4661 if (no_overlap)
4662 in_p = 0, low = high = 0;
4663 else if (subset)
4664 in_p = 1, low = low1, high = high1;
4665 else
4666 in_p = 1, low = low1, high = high0;
4669 else if (in0_p && ! in1_p)
4671 /* If they don't overlap, the result is the first range. If they are
4672 equal, the result is false. If the second range is a subset of the
4673 first, and the ranges begin at the same place, we go from just after
4674 the end of the second range to the end of the first. If the second
4675 range is not a subset of the first, or if it is a subset and both
4676 ranges end at the same place, the range starts at the start of the
4677 first range and ends just before the second range.
4678 Otherwise, we can't describe this as a single range. */
4679 if (no_overlap)
4680 in_p = 1, low = low0, high = high0;
4681 else if (lowequal && highequal)
4682 in_p = 0, low = high = 0;
4683 else if (subset && lowequal)
4685 low = range_successor (high1);
4686 high = high0;
4687 in_p = 1;
4688 if (low == 0)
4690 /* We are in the weird situation where high0 > high1 but
4691 high1 has no successor. Punt. */
4692 return 0;
4695 else if (! subset || highequal)
4697 low = low0;
4698 high = range_predecessor (low1);
4699 in_p = 1;
4700 if (high == 0)
4702 /* low0 < low1 but low1 has no predecessor. Punt. */
4703 return 0;
4706 else
4707 return 0;
4710 else if (! in0_p && in1_p)
4712 /* If they don't overlap, the result is the second range. If the second
4713 is a subset of the first, the result is false. Otherwise,
4714 the range starts just after the first range and ends at the
4715 end of the second. */
4716 if (no_overlap)
4717 in_p = 1, low = low1, high = high1;
4718 else if (subset || highequal)
4719 in_p = 0, low = high = 0;
4720 else
4722 low = range_successor (high0);
4723 high = high1;
4724 in_p = 1;
4725 if (low == 0)
4727 /* high1 > high0 but high0 has no successor. Punt. */
4728 return 0;
4733 else
4735 /* The case where we are excluding both ranges. Here the complex case
4736 is if they don't overlap. In that case, the only time we have a
4737 range is if they are adjacent. If the second is a subset of the
4738 first, the result is the first. Otherwise, the range to exclude
4739 starts at the beginning of the first range and ends at the end of the
4740 second. */
4741 if (no_overlap)
4743 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4744 range_successor (high0),
4745 1, low1, 0)))
4746 in_p = 0, low = low0, high = high1;
4747 else
4749 /* Canonicalize - [min, x] into - [-, x]. */
4750 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4751 switch (TREE_CODE (TREE_TYPE (low0)))
4753 case ENUMERAL_TYPE:
4754 if (TYPE_PRECISION (TREE_TYPE (low0))
4755 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4756 break;
4757 /* FALLTHROUGH */
4758 case INTEGER_TYPE:
4759 if (tree_int_cst_equal (low0,
4760 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4761 low0 = 0;
4762 break;
4763 case POINTER_TYPE:
4764 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4765 && integer_zerop (low0))
4766 low0 = 0;
4767 break;
4768 default:
4769 break;
4772 /* Canonicalize - [x, max] into - [x, -]. */
4773 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4774 switch (TREE_CODE (TREE_TYPE (high1)))
4776 case ENUMERAL_TYPE:
4777 if (TYPE_PRECISION (TREE_TYPE (high1))
4778 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4779 break;
4780 /* FALLTHROUGH */
4781 case INTEGER_TYPE:
4782 if (tree_int_cst_equal (high1,
4783 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4784 high1 = 0;
4785 break;
4786 case POINTER_TYPE:
4787 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4788 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4789 high1, 1,
4790 integer_one_node, 1)))
4791 high1 = 0;
4792 break;
4793 default:
4794 break;
4797 /* The ranges might be also adjacent between the maximum and
4798 minimum values of the given type. For
4799 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4800 return + [x + 1, y - 1]. */
4801 if (low0 == 0 && high1 == 0)
4803 low = range_successor (high0);
4804 high = range_predecessor (low1);
4805 if (low == 0 || high == 0)
4806 return 0;
4808 in_p = 1;
4810 else
4811 return 0;
4814 else if (subset)
4815 in_p = 0, low = low0, high = high0;
4816 else
4817 in_p = 0, low = low0, high = high1;
4820 *pin_p = in_p, *plow = low, *phigh = high;
4821 return 1;
4825 /* Subroutine of fold, looking inside expressions of the form
4826 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4827 of the COND_EXPR. This function is being used also to optimize
4828 A op B ? C : A, by reversing the comparison first.
4830 Return a folded expression whose code is not a COND_EXPR
4831 anymore, or NULL_TREE if no folding opportunity is found. */
4833 static tree
4834 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4836 enum tree_code comp_code = TREE_CODE (arg0);
4837 tree arg00 = TREE_OPERAND (arg0, 0);
4838 tree arg01 = TREE_OPERAND (arg0, 1);
4839 tree arg1_type = TREE_TYPE (arg1);
4840 tree tem;
4842 STRIP_NOPS (arg1);
4843 STRIP_NOPS (arg2);
4845 /* If we have A op 0 ? A : -A, consider applying the following
4846 transformations:
4848 A == 0? A : -A same as -A
4849 A != 0? A : -A same as A
4850 A >= 0? A : -A same as abs (A)
4851 A > 0? A : -A same as abs (A)
4852 A <= 0? A : -A same as -abs (A)
4853 A < 0? A : -A same as -abs (A)
4855 None of these transformations work for modes with signed
4856 zeros. If A is +/-0, the first two transformations will
4857 change the sign of the result (from +0 to -0, or vice
4858 versa). The last four will fix the sign of the result,
4859 even though the original expressions could be positive or
4860 negative, depending on the sign of A.
4862 Note that all these transformations are correct if A is
4863 NaN, since the two alternatives (A and -A) are also NaNs. */
4864 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4865 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4866 ? real_zerop (arg01)
4867 : integer_zerop (arg01))
4868 && ((TREE_CODE (arg2) == NEGATE_EXPR
4869 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4870 /* In the case that A is of the form X-Y, '-A' (arg2) may
4871 have already been folded to Y-X, check for that. */
4872 || (TREE_CODE (arg1) == MINUS_EXPR
4873 && TREE_CODE (arg2) == MINUS_EXPR
4874 && operand_equal_p (TREE_OPERAND (arg1, 0),
4875 TREE_OPERAND (arg2, 1), 0)
4876 && operand_equal_p (TREE_OPERAND (arg1, 1),
4877 TREE_OPERAND (arg2, 0), 0))))
4878 switch (comp_code)
4880 case EQ_EXPR:
4881 case UNEQ_EXPR:
4882 tem = fold_convert (arg1_type, arg1);
4883 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4884 case NE_EXPR:
4885 case LTGT_EXPR:
4886 return pedantic_non_lvalue (fold_convert (type, arg1));
4887 case UNGE_EXPR:
4888 case UNGT_EXPR:
4889 if (flag_trapping_math)
4890 break;
4891 /* Fall through. */
4892 case GE_EXPR:
4893 case GT_EXPR:
4894 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4895 arg1 = fold_convert (signed_type_for
4896 (TREE_TYPE (arg1)), arg1);
4897 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4898 return pedantic_non_lvalue (fold_convert (type, tem));
4899 case UNLE_EXPR:
4900 case UNLT_EXPR:
4901 if (flag_trapping_math)
4902 break;
4903 case LE_EXPR:
4904 case LT_EXPR:
4905 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4906 arg1 = fold_convert (signed_type_for
4907 (TREE_TYPE (arg1)), arg1);
4908 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4909 return negate_expr (fold_convert (type, tem));
4910 default:
4911 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4912 break;
4915 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4916 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4917 both transformations are correct when A is NaN: A != 0
4918 is then true, and A == 0 is false. */
4920 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4921 && integer_zerop (arg01) && integer_zerop (arg2))
4923 if (comp_code == NE_EXPR)
4924 return pedantic_non_lvalue (fold_convert (type, arg1));
4925 else if (comp_code == EQ_EXPR)
4926 return build_int_cst (type, 0);
4929 /* Try some transformations of A op B ? A : B.
4931 A == B? A : B same as B
4932 A != B? A : B same as A
4933 A >= B? A : B same as max (A, B)
4934 A > B? A : B same as max (B, A)
4935 A <= B? A : B same as min (A, B)
4936 A < B? A : B same as min (B, A)
4938 As above, these transformations don't work in the presence
4939 of signed zeros. For example, if A and B are zeros of
4940 opposite sign, the first two transformations will change
4941 the sign of the result. In the last four, the original
4942 expressions give different results for (A=+0, B=-0) and
4943 (A=-0, B=+0), but the transformed expressions do not.
4945 The first two transformations are correct if either A or B
4946 is a NaN. In the first transformation, the condition will
4947 be false, and B will indeed be chosen. In the case of the
4948 second transformation, the condition A != B will be true,
4949 and A will be chosen.
4951 The conversions to max() and min() are not correct if B is
4952 a number and A is not. The conditions in the original
4953 expressions will be false, so all four give B. The min()
4954 and max() versions would give a NaN instead. */
4955 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4956 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4957 /* Avoid these transformations if the COND_EXPR may be used
4958 as an lvalue in the C++ front-end. PR c++/19199. */
4959 && (in_gimple_form
4960 || (strcmp (lang_hooks.name, "GNU C++") != 0
4961 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4962 || ! maybe_lvalue_p (arg1)
4963 || ! maybe_lvalue_p (arg2)))
4965 tree comp_op0 = arg00;
4966 tree comp_op1 = arg01;
4967 tree comp_type = TREE_TYPE (comp_op0);
4969 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4970 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4972 comp_type = type;
4973 comp_op0 = arg1;
4974 comp_op1 = arg2;
4977 switch (comp_code)
4979 case EQ_EXPR:
4980 return pedantic_non_lvalue (fold_convert (type, arg2));
4981 case NE_EXPR:
4982 return pedantic_non_lvalue (fold_convert (type, arg1));
4983 case LE_EXPR:
4984 case LT_EXPR:
4985 case UNLE_EXPR:
4986 case UNLT_EXPR:
4987 /* In C++ a ?: expression can be an lvalue, so put the
4988 operand which will be used if they are equal first
4989 so that we can convert this back to the
4990 corresponding COND_EXPR. */
4991 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4993 comp_op0 = fold_convert (comp_type, comp_op0);
4994 comp_op1 = fold_convert (comp_type, comp_op1);
4995 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4996 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4997 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4998 return pedantic_non_lvalue (fold_convert (type, tem));
5000 break;
5001 case GE_EXPR:
5002 case GT_EXPR:
5003 case UNGE_EXPR:
5004 case UNGT_EXPR:
5005 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5007 comp_op0 = fold_convert (comp_type, comp_op0);
5008 comp_op1 = fold_convert (comp_type, comp_op1);
5009 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5010 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5011 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5012 return pedantic_non_lvalue (fold_convert (type, tem));
5014 break;
5015 case UNEQ_EXPR:
5016 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5017 return pedantic_non_lvalue (fold_convert (type, arg2));
5018 break;
5019 case LTGT_EXPR:
5020 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5021 return pedantic_non_lvalue (fold_convert (type, arg1));
5022 break;
5023 default:
5024 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5025 break;
5029 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5030 we might still be able to simplify this. For example,
5031 if C1 is one less or one more than C2, this might have started
5032 out as a MIN or MAX and been transformed by this function.
5033 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5035 if (INTEGRAL_TYPE_P (type)
5036 && TREE_CODE (arg01) == INTEGER_CST
5037 && TREE_CODE (arg2) == INTEGER_CST)
5038 switch (comp_code)
5040 case EQ_EXPR:
5041 /* We can replace A with C1 in this case. */
5042 arg1 = fold_convert (type, arg01);
5043 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5045 case LT_EXPR:
5046 /* If C1 is C2 + 1, this is min(A, C2). */
5047 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5048 OEP_ONLY_CONST)
5049 && operand_equal_p (arg01,
5050 const_binop (PLUS_EXPR, arg2,
5051 build_int_cst (type, 1), 0),
5052 OEP_ONLY_CONST))
5053 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5054 type,
5055 fold_convert (type, arg1),
5056 arg2));
5057 break;
5059 case LE_EXPR:
5060 /* If C1 is C2 - 1, this is min(A, C2). */
5061 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5062 OEP_ONLY_CONST)
5063 && operand_equal_p (arg01,
5064 const_binop (MINUS_EXPR, arg2,
5065 build_int_cst (type, 1), 0),
5066 OEP_ONLY_CONST))
5067 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5068 type,
5069 fold_convert (type, arg1),
5070 arg2));
5071 break;
5073 case GT_EXPR:
5074 /* If C1 is C2 - 1, this is max(A, C2). */
5075 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5076 OEP_ONLY_CONST)
5077 && operand_equal_p (arg01,
5078 const_binop (MINUS_EXPR, arg2,
5079 build_int_cst (type, 1), 0),
5080 OEP_ONLY_CONST))
5081 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5082 type,
5083 fold_convert (type, arg1),
5084 arg2));
5085 break;
5087 case GE_EXPR:
5088 /* If C1 is C2 + 1, this is max(A, C2). */
5089 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5090 OEP_ONLY_CONST)
5091 && operand_equal_p (arg01,
5092 const_binop (PLUS_EXPR, arg2,
5093 build_int_cst (type, 1), 0),
5094 OEP_ONLY_CONST))
5095 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5096 type,
5097 fold_convert (type, arg1),
5098 arg2));
5099 break;
5100 case NE_EXPR:
5101 break;
5102 default:
5103 gcc_unreachable ();
5106 return NULL_TREE;
5111 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5112 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5113 #endif
5115 /* EXP is some logical combination of boolean tests. See if we can
5116 merge it into some range test. Return the new tree if so. */
5118 static tree
5119 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5121 int or_op = (code == TRUTH_ORIF_EXPR
5122 || code == TRUTH_OR_EXPR);
5123 int in0_p, in1_p, in_p;
5124 tree low0, low1, low, high0, high1, high;
5125 bool strict_overflow_p = false;
5126 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5127 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5128 tree tem;
5129 const char * const warnmsg = G_("assuming signed overflow does not occur "
5130 "when simplifying range test");
5132 /* If this is an OR operation, invert both sides; we will invert
5133 again at the end. */
5134 if (or_op)
5135 in0_p = ! in0_p, in1_p = ! in1_p;
5137 /* If both expressions are the same, if we can merge the ranges, and we
5138 can build the range test, return it or it inverted. If one of the
5139 ranges is always true or always false, consider it to be the same
5140 expression as the other. */
5141 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5142 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5143 in1_p, low1, high1)
5144 && 0 != (tem = (build_range_check (type,
5145 lhs != 0 ? lhs
5146 : rhs != 0 ? rhs : integer_zero_node,
5147 in_p, low, high))))
5149 if (strict_overflow_p)
5150 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5151 return or_op ? invert_truthvalue (tem) : tem;
5154 /* On machines where the branch cost is expensive, if this is a
5155 short-circuited branch and the underlying object on both sides
5156 is the same, make a non-short-circuit operation. */
5157 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5158 && lhs != 0 && rhs != 0
5159 && (code == TRUTH_ANDIF_EXPR
5160 || code == TRUTH_ORIF_EXPR)
5161 && operand_equal_p (lhs, rhs, 0))
5163 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5164 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5165 which cases we can't do this. */
5166 if (simple_operand_p (lhs))
5167 return build2 (code == TRUTH_ANDIF_EXPR
5168 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5169 type, op0, op1);
5171 else if (lang_hooks.decls.global_bindings_p () == 0
5172 && ! CONTAINS_PLACEHOLDER_P (lhs))
5174 tree common = save_expr (lhs);
5176 if (0 != (lhs = build_range_check (type, common,
5177 or_op ? ! in0_p : in0_p,
5178 low0, high0))
5179 && (0 != (rhs = build_range_check (type, common,
5180 or_op ? ! in1_p : in1_p,
5181 low1, high1))))
5183 if (strict_overflow_p)
5184 fold_overflow_warning (warnmsg,
5185 WARN_STRICT_OVERFLOW_COMPARISON);
5186 return build2 (code == TRUTH_ANDIF_EXPR
5187 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5188 type, lhs, rhs);
5193 return 0;
5196 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5197 bit value. Arrange things so the extra bits will be set to zero if and
5198 only if C is signed-extended to its full width. If MASK is nonzero,
5199 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5201 static tree
5202 unextend (tree c, int p, int unsignedp, tree mask)
5204 tree type = TREE_TYPE (c);
5205 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5206 tree temp;
5208 if (p == modesize || unsignedp)
5209 return c;
5211 /* We work by getting just the sign bit into the low-order bit, then
5212 into the high-order bit, then sign-extend. We then XOR that value
5213 with C. */
5214 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5215 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5217 /* We must use a signed type in order to get an arithmetic right shift.
5218 However, we must also avoid introducing accidental overflows, so that
5219 a subsequent call to integer_zerop will work. Hence we must
5220 do the type conversion here. At this point, the constant is either
5221 zero or one, and the conversion to a signed type can never overflow.
5222 We could get an overflow if this conversion is done anywhere else. */
5223 if (TYPE_UNSIGNED (type))
5224 temp = fold_convert (signed_type_for (type), temp);
5226 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5227 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5228 if (mask != 0)
5229 temp = const_binop (BIT_AND_EXPR, temp,
5230 fold_convert (TREE_TYPE (c), mask), 0);
5231 /* If necessary, convert the type back to match the type of C. */
5232 if (TYPE_UNSIGNED (type))
5233 temp = fold_convert (type, temp);
5235 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5238 /* Find ways of folding logical expressions of LHS and RHS:
5239 Try to merge two comparisons to the same innermost item.
5240 Look for range tests like "ch >= '0' && ch <= '9'".
5241 Look for combinations of simple terms on machines with expensive branches
5242 and evaluate the RHS unconditionally.
5244 For example, if we have p->a == 2 && p->b == 4 and we can make an
5245 object large enough to span both A and B, we can do this with a comparison
5246 against the object ANDed with the a mask.
5248 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5249 operations to do this with one comparison.
5251 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5252 function and the one above.
5254 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5255 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5257 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5258 two operands.
5260 We return the simplified tree or 0 if no optimization is possible. */
5262 static tree
5263 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5265 /* If this is the "or" of two comparisons, we can do something if
5266 the comparisons are NE_EXPR. If this is the "and", we can do something
5267 if the comparisons are EQ_EXPR. I.e.,
5268 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5270 WANTED_CODE is this operation code. For single bit fields, we can
5271 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5272 comparison for one-bit fields. */
5274 enum tree_code wanted_code;
5275 enum tree_code lcode, rcode;
5276 tree ll_arg, lr_arg, rl_arg, rr_arg;
5277 tree ll_inner, lr_inner, rl_inner, rr_inner;
5278 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5279 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5280 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5281 HOST_WIDE_INT lnbitsize, lnbitpos;
5282 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5283 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5284 enum machine_mode lnmode;
5285 tree ll_mask, lr_mask, rl_mask, rr_mask;
5286 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5287 tree l_const, r_const;
5288 tree lntype, result;
5289 int first_bit, end_bit;
5290 int volatilep;
5291 tree orig_lhs = lhs, orig_rhs = rhs;
5292 enum tree_code orig_code = code;
5294 /* Start by getting the comparison codes. Fail if anything is volatile.
5295 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5296 it were surrounded with a NE_EXPR. */
5298 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5299 return 0;
5301 lcode = TREE_CODE (lhs);
5302 rcode = TREE_CODE (rhs);
5304 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5306 lhs = build2 (NE_EXPR, truth_type, lhs,
5307 build_int_cst (TREE_TYPE (lhs), 0));
5308 lcode = NE_EXPR;
5311 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5313 rhs = build2 (NE_EXPR, truth_type, rhs,
5314 build_int_cst (TREE_TYPE (rhs), 0));
5315 rcode = NE_EXPR;
5318 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5319 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5320 return 0;
5322 ll_arg = TREE_OPERAND (lhs, 0);
5323 lr_arg = TREE_OPERAND (lhs, 1);
5324 rl_arg = TREE_OPERAND (rhs, 0);
5325 rr_arg = TREE_OPERAND (rhs, 1);
5327 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5328 if (simple_operand_p (ll_arg)
5329 && simple_operand_p (lr_arg))
5331 tree result;
5332 if (operand_equal_p (ll_arg, rl_arg, 0)
5333 && operand_equal_p (lr_arg, rr_arg, 0))
5335 result = combine_comparisons (code, lcode, rcode,
5336 truth_type, ll_arg, lr_arg);
5337 if (result)
5338 return result;
5340 else if (operand_equal_p (ll_arg, rr_arg, 0)
5341 && operand_equal_p (lr_arg, rl_arg, 0))
5343 result = combine_comparisons (code, lcode,
5344 swap_tree_comparison (rcode),
5345 truth_type, ll_arg, lr_arg);
5346 if (result)
5347 return result;
5351 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5352 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5354 /* If the RHS can be evaluated unconditionally and its operands are
5355 simple, it wins to evaluate the RHS unconditionally on machines
5356 with expensive branches. In this case, this isn't a comparison
5357 that can be merged. Avoid doing this if the RHS is a floating-point
5358 comparison since those can trap. */
5360 if (BRANCH_COST >= 2
5361 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5362 && simple_operand_p (rl_arg)
5363 && simple_operand_p (rr_arg))
5365 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5366 if (code == TRUTH_OR_EXPR
5367 && lcode == NE_EXPR && integer_zerop (lr_arg)
5368 && rcode == NE_EXPR && integer_zerop (rr_arg)
5369 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5370 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5371 return build2 (NE_EXPR, truth_type,
5372 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5373 ll_arg, rl_arg),
5374 build_int_cst (TREE_TYPE (ll_arg), 0));
5376 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5377 if (code == TRUTH_AND_EXPR
5378 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5379 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5380 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5381 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5382 return build2 (EQ_EXPR, truth_type,
5383 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5384 ll_arg, rl_arg),
5385 build_int_cst (TREE_TYPE (ll_arg), 0));
5387 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5389 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5390 return build2 (code, truth_type, lhs, rhs);
5391 return NULL_TREE;
5395 /* See if the comparisons can be merged. Then get all the parameters for
5396 each side. */
5398 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5399 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5400 return 0;
5402 volatilep = 0;
5403 ll_inner = decode_field_reference (ll_arg,
5404 &ll_bitsize, &ll_bitpos, &ll_mode,
5405 &ll_unsignedp, &volatilep, &ll_mask,
5406 &ll_and_mask);
5407 lr_inner = decode_field_reference (lr_arg,
5408 &lr_bitsize, &lr_bitpos, &lr_mode,
5409 &lr_unsignedp, &volatilep, &lr_mask,
5410 &lr_and_mask);
5411 rl_inner = decode_field_reference (rl_arg,
5412 &rl_bitsize, &rl_bitpos, &rl_mode,
5413 &rl_unsignedp, &volatilep, &rl_mask,
5414 &rl_and_mask);
5415 rr_inner = decode_field_reference (rr_arg,
5416 &rr_bitsize, &rr_bitpos, &rr_mode,
5417 &rr_unsignedp, &volatilep, &rr_mask,
5418 &rr_and_mask);
5420 /* It must be true that the inner operation on the lhs of each
5421 comparison must be the same if we are to be able to do anything.
5422 Then see if we have constants. If not, the same must be true for
5423 the rhs's. */
5424 if (volatilep || ll_inner == 0 || rl_inner == 0
5425 || ! operand_equal_p (ll_inner, rl_inner, 0))
5426 return 0;
5428 if (TREE_CODE (lr_arg) == INTEGER_CST
5429 && TREE_CODE (rr_arg) == INTEGER_CST)
5430 l_const = lr_arg, r_const = rr_arg;
5431 else if (lr_inner == 0 || rr_inner == 0
5432 || ! operand_equal_p (lr_inner, rr_inner, 0))
5433 return 0;
5434 else
5435 l_const = r_const = 0;
5437 /* If either comparison code is not correct for our logical operation,
5438 fail. However, we can convert a one-bit comparison against zero into
5439 the opposite comparison against that bit being set in the field. */
5441 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5442 if (lcode != wanted_code)
5444 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5446 /* Make the left operand unsigned, since we are only interested
5447 in the value of one bit. Otherwise we are doing the wrong
5448 thing below. */
5449 ll_unsignedp = 1;
5450 l_const = ll_mask;
5452 else
5453 return 0;
5456 /* This is analogous to the code for l_const above. */
5457 if (rcode != wanted_code)
5459 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5461 rl_unsignedp = 1;
5462 r_const = rl_mask;
5464 else
5465 return 0;
5468 /* See if we can find a mode that contains both fields being compared on
5469 the left. If we can't, fail. Otherwise, update all constants and masks
5470 to be relative to a field of that size. */
5471 first_bit = MIN (ll_bitpos, rl_bitpos);
5472 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5473 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5474 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5475 volatilep);
5476 if (lnmode == VOIDmode)
5477 return 0;
5479 lnbitsize = GET_MODE_BITSIZE (lnmode);
5480 lnbitpos = first_bit & ~ (lnbitsize - 1);
5481 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5482 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5484 if (BYTES_BIG_ENDIAN)
5486 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5487 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5490 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5491 size_int (xll_bitpos), 0);
5492 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5493 size_int (xrl_bitpos), 0);
5495 if (l_const)
5497 l_const = fold_convert (lntype, l_const);
5498 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5499 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5500 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5501 fold_build1 (BIT_NOT_EXPR,
5502 lntype, ll_mask),
5503 0)))
5505 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5507 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5510 if (r_const)
5512 r_const = fold_convert (lntype, r_const);
5513 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5514 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5515 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5516 fold_build1 (BIT_NOT_EXPR,
5517 lntype, rl_mask),
5518 0)))
5520 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5522 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5526 /* Handle the case of comparisons with constants. If there is something in
5527 common between the masks, those bits of the constants must be the same.
5528 If not, the condition is always false. Test for this to avoid generating
5529 incorrect code below. */
5530 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5531 if (! integer_zerop (result)
5532 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5533 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5535 if (wanted_code == NE_EXPR)
5537 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5538 return constant_boolean_node (true, truth_type);
5540 else
5542 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5543 return constant_boolean_node (false, truth_type);
5547 return NULL_TREE;
5550 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5551 constant. */
5553 static tree
5554 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5556 tree arg0 = op0;
5557 enum tree_code op_code;
5558 tree comp_const;
5559 tree minmax_const;
5560 int consts_equal, consts_lt;
5561 tree inner;
5563 STRIP_SIGN_NOPS (arg0);
5565 op_code = TREE_CODE (arg0);
5566 minmax_const = TREE_OPERAND (arg0, 1);
5567 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5568 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5569 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5570 inner = TREE_OPERAND (arg0, 0);
5572 /* If something does not permit us to optimize, return the original tree. */
5573 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5574 || TREE_CODE (comp_const) != INTEGER_CST
5575 || TREE_OVERFLOW (comp_const)
5576 || TREE_CODE (minmax_const) != INTEGER_CST
5577 || TREE_OVERFLOW (minmax_const))
5578 return NULL_TREE;
5580 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5581 and GT_EXPR, doing the rest with recursive calls using logical
5582 simplifications. */
5583 switch (code)
5585 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5587 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5588 type, op0, op1);
5589 if (tem)
5590 return invert_truthvalue (tem);
5591 return NULL_TREE;
5594 case GE_EXPR:
5595 return
5596 fold_build2 (TRUTH_ORIF_EXPR, type,
5597 optimize_minmax_comparison
5598 (EQ_EXPR, type, arg0, comp_const),
5599 optimize_minmax_comparison
5600 (GT_EXPR, type, arg0, comp_const));
5602 case EQ_EXPR:
5603 if (op_code == MAX_EXPR && consts_equal)
5604 /* MAX (X, 0) == 0 -> X <= 0 */
5605 return fold_build2 (LE_EXPR, type, inner, comp_const);
5607 else if (op_code == MAX_EXPR && consts_lt)
5608 /* MAX (X, 0) == 5 -> X == 5 */
5609 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5611 else if (op_code == MAX_EXPR)
5612 /* MAX (X, 0) == -1 -> false */
5613 return omit_one_operand (type, integer_zero_node, inner);
5615 else if (consts_equal)
5616 /* MIN (X, 0) == 0 -> X >= 0 */
5617 return fold_build2 (GE_EXPR, type, inner, comp_const);
5619 else if (consts_lt)
5620 /* MIN (X, 0) == 5 -> false */
5621 return omit_one_operand (type, integer_zero_node, inner);
5623 else
5624 /* MIN (X, 0) == -1 -> X == -1 */
5625 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5627 case GT_EXPR:
5628 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5629 /* MAX (X, 0) > 0 -> X > 0
5630 MAX (X, 0) > 5 -> X > 5 */
5631 return fold_build2 (GT_EXPR, type, inner, comp_const);
5633 else if (op_code == MAX_EXPR)
5634 /* MAX (X, 0) > -1 -> true */
5635 return omit_one_operand (type, integer_one_node, inner);
5637 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5638 /* MIN (X, 0) > 0 -> false
5639 MIN (X, 0) > 5 -> false */
5640 return omit_one_operand (type, integer_zero_node, inner);
5642 else
5643 /* MIN (X, 0) > -1 -> X > -1 */
5644 return fold_build2 (GT_EXPR, type, inner, comp_const);
5646 default:
5647 return NULL_TREE;
5651 /* T is an integer expression that is being multiplied, divided, or taken a
5652 modulus (CODE says which and what kind of divide or modulus) by a
5653 constant C. See if we can eliminate that operation by folding it with
5654 other operations already in T. WIDE_TYPE, if non-null, is a type that
5655 should be used for the computation if wider than our type.
5657 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5658 (X * 2) + (Y * 4). We must, however, be assured that either the original
5659 expression would not overflow or that overflow is undefined for the type
5660 in the language in question.
5662 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5663 the machine has a multiply-accumulate insn or that this is part of an
5664 addressing calculation.
5666 If we return a non-null expression, it is an equivalent form of the
5667 original computation, but need not be in the original type.
5669 We set *STRICT_OVERFLOW_P to true if the return values depends on
5670 signed overflow being undefined. Otherwise we do not change
5671 *STRICT_OVERFLOW_P. */
5673 static tree
5674 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5675 bool *strict_overflow_p)
5677 /* To avoid exponential search depth, refuse to allow recursion past
5678 three levels. Beyond that (1) it's highly unlikely that we'll find
5679 something interesting and (2) we've probably processed it before
5680 when we built the inner expression. */
5682 static int depth;
5683 tree ret;
5685 if (depth > 3)
5686 return NULL;
5688 depth++;
5689 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5690 depth--;
5692 return ret;
5695 static tree
5696 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5697 bool *strict_overflow_p)
5699 tree type = TREE_TYPE (t);
5700 enum tree_code tcode = TREE_CODE (t);
5701 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5702 > GET_MODE_SIZE (TYPE_MODE (type)))
5703 ? wide_type : type);
5704 tree t1, t2;
5705 int same_p = tcode == code;
5706 tree op0 = NULL_TREE, op1 = NULL_TREE;
5707 bool sub_strict_overflow_p;
5709 /* Don't deal with constants of zero here; they confuse the code below. */
5710 if (integer_zerop (c))
5711 return NULL_TREE;
5713 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5714 op0 = TREE_OPERAND (t, 0);
5716 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5717 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5719 /* Note that we need not handle conditional operations here since fold
5720 already handles those cases. So just do arithmetic here. */
5721 switch (tcode)
5723 case INTEGER_CST:
5724 /* For a constant, we can always simplify if we are a multiply
5725 or (for divide and modulus) if it is a multiple of our constant. */
5726 if (code == MULT_EXPR
5727 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5728 return const_binop (code, fold_convert (ctype, t),
5729 fold_convert (ctype, c), 0);
5730 break;
5732 CASE_CONVERT: case NON_LVALUE_EXPR:
5733 /* If op0 is an expression ... */
5734 if ((COMPARISON_CLASS_P (op0)
5735 || UNARY_CLASS_P (op0)
5736 || BINARY_CLASS_P (op0)
5737 || VL_EXP_CLASS_P (op0)
5738 || EXPRESSION_CLASS_P (op0))
5739 /* ... and has wrapping overflow, and its type is smaller
5740 than ctype, then we cannot pass through as widening. */
5741 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5742 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5743 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5744 && (TYPE_PRECISION (ctype)
5745 > TYPE_PRECISION (TREE_TYPE (op0))))
5746 /* ... or this is a truncation (t is narrower than op0),
5747 then we cannot pass through this narrowing. */
5748 || (TYPE_PRECISION (type)
5749 < TYPE_PRECISION (TREE_TYPE (op0)))
5750 /* ... or signedness changes for division or modulus,
5751 then we cannot pass through this conversion. */
5752 || (code != MULT_EXPR
5753 && (TYPE_UNSIGNED (ctype)
5754 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5755 /* ... or has undefined overflow while the converted to
5756 type has not, we cannot do the operation in the inner type
5757 as that would introduce undefined overflow. */
5758 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5759 && !TYPE_OVERFLOW_UNDEFINED (type))))
5760 break;
5762 /* Pass the constant down and see if we can make a simplification. If
5763 we can, replace this expression with the inner simplification for
5764 possible later conversion to our or some other type. */
5765 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5766 && TREE_CODE (t2) == INTEGER_CST
5767 && !TREE_OVERFLOW (t2)
5768 && (0 != (t1 = extract_muldiv (op0, t2, code,
5769 code == MULT_EXPR
5770 ? ctype : NULL_TREE,
5771 strict_overflow_p))))
5772 return t1;
5773 break;
5775 case ABS_EXPR:
5776 /* If widening the type changes it from signed to unsigned, then we
5777 must avoid building ABS_EXPR itself as unsigned. */
5778 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5780 tree cstype = (*signed_type_for) (ctype);
5781 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5782 != 0)
5784 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5785 return fold_convert (ctype, t1);
5787 break;
5789 /* If the constant is negative, we cannot simplify this. */
5790 if (tree_int_cst_sgn (c) == -1)
5791 break;
5792 /* FALLTHROUGH */
5793 case NEGATE_EXPR:
5794 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5795 != 0)
5796 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5797 break;
5799 case MIN_EXPR: case MAX_EXPR:
5800 /* If widening the type changes the signedness, then we can't perform
5801 this optimization as that changes the result. */
5802 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5803 break;
5805 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5806 sub_strict_overflow_p = false;
5807 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5808 &sub_strict_overflow_p)) != 0
5809 && (t2 = extract_muldiv (op1, c, code, wide_type,
5810 &sub_strict_overflow_p)) != 0)
5812 if (tree_int_cst_sgn (c) < 0)
5813 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5814 if (sub_strict_overflow_p)
5815 *strict_overflow_p = true;
5816 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5817 fold_convert (ctype, t2));
5819 break;
5821 case LSHIFT_EXPR: case RSHIFT_EXPR:
5822 /* If the second operand is constant, this is a multiplication
5823 or floor division, by a power of two, so we can treat it that
5824 way unless the multiplier or divisor overflows. Signed
5825 left-shift overflow is implementation-defined rather than
5826 undefined in C90, so do not convert signed left shift into
5827 multiplication. */
5828 if (TREE_CODE (op1) == INTEGER_CST
5829 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5830 /* const_binop may not detect overflow correctly,
5831 so check for it explicitly here. */
5832 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5833 && TREE_INT_CST_HIGH (op1) == 0
5834 && 0 != (t1 = fold_convert (ctype,
5835 const_binop (LSHIFT_EXPR,
5836 size_one_node,
5837 op1, 0)))
5838 && !TREE_OVERFLOW (t1))
5839 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5840 ? MULT_EXPR : FLOOR_DIV_EXPR,
5841 ctype, fold_convert (ctype, op0), t1),
5842 c, code, wide_type, strict_overflow_p);
5843 break;
5845 case PLUS_EXPR: case MINUS_EXPR:
5846 /* See if we can eliminate the operation on both sides. If we can, we
5847 can return a new PLUS or MINUS. If we can't, the only remaining
5848 cases where we can do anything are if the second operand is a
5849 constant. */
5850 sub_strict_overflow_p = false;
5851 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5852 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5853 if (t1 != 0 && t2 != 0
5854 && (code == MULT_EXPR
5855 /* If not multiplication, we can only do this if both operands
5856 are divisible by c. */
5857 || (multiple_of_p (ctype, op0, c)
5858 && multiple_of_p (ctype, op1, c))))
5860 if (sub_strict_overflow_p)
5861 *strict_overflow_p = true;
5862 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5863 fold_convert (ctype, t2));
5866 /* If this was a subtraction, negate OP1 and set it to be an addition.
5867 This simplifies the logic below. */
5868 if (tcode == MINUS_EXPR)
5869 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5871 if (TREE_CODE (op1) != INTEGER_CST)
5872 break;
5874 /* If either OP1 or C are negative, this optimization is not safe for
5875 some of the division and remainder types while for others we need
5876 to change the code. */
5877 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5879 if (code == CEIL_DIV_EXPR)
5880 code = FLOOR_DIV_EXPR;
5881 else if (code == FLOOR_DIV_EXPR)
5882 code = CEIL_DIV_EXPR;
5883 else if (code != MULT_EXPR
5884 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5885 break;
5888 /* If it's a multiply or a division/modulus operation of a multiple
5889 of our constant, do the operation and verify it doesn't overflow. */
5890 if (code == MULT_EXPR
5891 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5893 op1 = const_binop (code, fold_convert (ctype, op1),
5894 fold_convert (ctype, c), 0);
5895 /* We allow the constant to overflow with wrapping semantics. */
5896 if (op1 == 0
5897 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5898 break;
5900 else
5901 break;
5903 /* If we have an unsigned type is not a sizetype, we cannot widen
5904 the operation since it will change the result if the original
5905 computation overflowed. */
5906 if (TYPE_UNSIGNED (ctype)
5907 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5908 && ctype != type)
5909 break;
5911 /* If we were able to eliminate our operation from the first side,
5912 apply our operation to the second side and reform the PLUS. */
5913 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5916 /* The last case is if we are a multiply. In that case, we can
5917 apply the distributive law to commute the multiply and addition
5918 if the multiplication of the constants doesn't overflow. */
5919 if (code == MULT_EXPR)
5920 return fold_build2 (tcode, ctype,
5921 fold_build2 (code, ctype,
5922 fold_convert (ctype, op0),
5923 fold_convert (ctype, c)),
5924 op1);
5926 break;
5928 case MULT_EXPR:
5929 /* We have a special case here if we are doing something like
5930 (C * 8) % 4 since we know that's zero. */
5931 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5932 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5933 /* If the multiplication can overflow we cannot optimize this.
5934 ??? Until we can properly mark individual operations as
5935 not overflowing we need to treat sizetype special here as
5936 stor-layout relies on this opimization to make
5937 DECL_FIELD_BIT_OFFSET always a constant. */
5938 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5939 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5940 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5941 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5942 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5944 *strict_overflow_p = true;
5945 return omit_one_operand (type, integer_zero_node, op0);
5948 /* ... fall through ... */
5950 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5951 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5952 /* If we can extract our operation from the LHS, do so and return a
5953 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5954 do something only if the second operand is a constant. */
5955 if (same_p
5956 && (t1 = extract_muldiv (op0, c, code, wide_type,
5957 strict_overflow_p)) != 0)
5958 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5959 fold_convert (ctype, op1));
5960 else if (tcode == MULT_EXPR && code == MULT_EXPR
5961 && (t1 = extract_muldiv (op1, c, code, wide_type,
5962 strict_overflow_p)) != 0)
5963 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5964 fold_convert (ctype, t1));
5965 else if (TREE_CODE (op1) != INTEGER_CST)
5966 return 0;
5968 /* If these are the same operation types, we can associate them
5969 assuming no overflow. */
5970 if (tcode == code
5971 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5972 fold_convert (ctype, c), 1))
5973 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5974 TREE_INT_CST_HIGH (t1),
5975 (TYPE_UNSIGNED (ctype)
5976 && tcode != MULT_EXPR) ? -1 : 1,
5977 TREE_OVERFLOW (t1)))
5978 && !TREE_OVERFLOW (t1))
5979 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5981 /* If these operations "cancel" each other, we have the main
5982 optimizations of this pass, which occur when either constant is a
5983 multiple of the other, in which case we replace this with either an
5984 operation or CODE or TCODE.
5986 If we have an unsigned type that is not a sizetype, we cannot do
5987 this since it will change the result if the original computation
5988 overflowed. */
5989 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5990 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5991 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5992 || (tcode == MULT_EXPR
5993 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5994 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5995 && code != MULT_EXPR)))
5997 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5999 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6000 *strict_overflow_p = true;
6001 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6002 fold_convert (ctype,
6003 const_binop (TRUNC_DIV_EXPR,
6004 op1, c, 0)));
6006 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6008 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6009 *strict_overflow_p = true;
6010 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6011 fold_convert (ctype,
6012 const_binop (TRUNC_DIV_EXPR,
6013 c, op1, 0)));
6016 break;
6018 default:
6019 break;
6022 return 0;
6025 /* Return a node which has the indicated constant VALUE (either 0 or
6026 1), and is of the indicated TYPE. */
6028 tree
6029 constant_boolean_node (int value, tree type)
6031 if (type == integer_type_node)
6032 return value ? integer_one_node : integer_zero_node;
6033 else if (type == boolean_type_node)
6034 return value ? boolean_true_node : boolean_false_node;
6035 else
6036 return build_int_cst (type, value);
6040 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6041 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6042 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6043 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6044 COND is the first argument to CODE; otherwise (as in the example
6045 given here), it is the second argument. TYPE is the type of the
6046 original expression. Return NULL_TREE if no simplification is
6047 possible. */
6049 static tree
6050 fold_binary_op_with_conditional_arg (enum tree_code code,
6051 tree type, tree op0, tree op1,
6052 tree cond, tree arg, int cond_first_p)
6054 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6055 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6056 tree test, true_value, false_value;
6057 tree lhs = NULL_TREE;
6058 tree rhs = NULL_TREE;
6060 /* This transformation is only worthwhile if we don't have to wrap
6061 arg in a SAVE_EXPR, and the operation can be simplified on at least
6062 one of the branches once its pushed inside the COND_EXPR. */
6063 if (!TREE_CONSTANT (arg))
6064 return NULL_TREE;
6066 if (TREE_CODE (cond) == COND_EXPR)
6068 test = TREE_OPERAND (cond, 0);
6069 true_value = TREE_OPERAND (cond, 1);
6070 false_value = TREE_OPERAND (cond, 2);
6071 /* If this operand throws an expression, then it does not make
6072 sense to try to perform a logical or arithmetic operation
6073 involving it. */
6074 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6075 lhs = true_value;
6076 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6077 rhs = false_value;
6079 else
6081 tree testtype = TREE_TYPE (cond);
6082 test = cond;
6083 true_value = constant_boolean_node (true, testtype);
6084 false_value = constant_boolean_node (false, testtype);
6087 arg = fold_convert (arg_type, arg);
6088 if (lhs == 0)
6090 true_value = fold_convert (cond_type, true_value);
6091 if (cond_first_p)
6092 lhs = fold_build2 (code, type, true_value, arg);
6093 else
6094 lhs = fold_build2 (code, type, arg, true_value);
6096 if (rhs == 0)
6098 false_value = fold_convert (cond_type, false_value);
6099 if (cond_first_p)
6100 rhs = fold_build2 (code, type, false_value, arg);
6101 else
6102 rhs = fold_build2 (code, type, arg, false_value);
6105 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6106 return fold_convert (type, test);
6110 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6112 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6113 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6114 ADDEND is the same as X.
6116 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6117 and finite. The problematic cases are when X is zero, and its mode
6118 has signed zeros. In the case of rounding towards -infinity,
6119 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6120 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6122 bool
6123 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6125 if (!real_zerop (addend))
6126 return false;
6128 /* Don't allow the fold with -fsignaling-nans. */
6129 if (HONOR_SNANS (TYPE_MODE (type)))
6130 return false;
6132 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6133 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6134 return true;
6136 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6137 if (TREE_CODE (addend) == REAL_CST
6138 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6139 negate = !negate;
6141 /* The mode has signed zeros, and we have to honor their sign.
6142 In this situation, there is only one case we can return true for.
6143 X - 0 is the same as X unless rounding towards -infinity is
6144 supported. */
6145 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6148 /* Subroutine of fold() that checks comparisons of built-in math
6149 functions against real constants.
6151 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6152 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6153 is the type of the result and ARG0 and ARG1 are the operands of the
6154 comparison. ARG1 must be a TREE_REAL_CST.
6156 The function returns the constant folded tree if a simplification
6157 can be made, and NULL_TREE otherwise. */
6159 static tree
6160 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6161 tree type, tree arg0, tree arg1)
6163 REAL_VALUE_TYPE c;
6165 if (BUILTIN_SQRT_P (fcode))
6167 tree arg = CALL_EXPR_ARG (arg0, 0);
6168 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6170 c = TREE_REAL_CST (arg1);
6171 if (REAL_VALUE_NEGATIVE (c))
6173 /* sqrt(x) < y is always false, if y is negative. */
6174 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6175 return omit_one_operand (type, integer_zero_node, arg);
6177 /* sqrt(x) > y is always true, if y is negative and we
6178 don't care about NaNs, i.e. negative values of x. */
6179 if (code == NE_EXPR || !HONOR_NANS (mode))
6180 return omit_one_operand (type, integer_one_node, arg);
6182 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6183 return fold_build2 (GE_EXPR, type, arg,
6184 build_real (TREE_TYPE (arg), dconst0));
6186 else if (code == GT_EXPR || code == GE_EXPR)
6188 REAL_VALUE_TYPE c2;
6190 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6191 real_convert (&c2, mode, &c2);
6193 if (REAL_VALUE_ISINF (c2))
6195 /* sqrt(x) > y is x == +Inf, when y is very large. */
6196 if (HONOR_INFINITIES (mode))
6197 return fold_build2 (EQ_EXPR, type, arg,
6198 build_real (TREE_TYPE (arg), c2));
6200 /* sqrt(x) > y is always false, when y is very large
6201 and we don't care about infinities. */
6202 return omit_one_operand (type, integer_zero_node, arg);
6205 /* sqrt(x) > c is the same as x > c*c. */
6206 return fold_build2 (code, type, arg,
6207 build_real (TREE_TYPE (arg), c2));
6209 else if (code == LT_EXPR || code == LE_EXPR)
6211 REAL_VALUE_TYPE c2;
6213 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6214 real_convert (&c2, mode, &c2);
6216 if (REAL_VALUE_ISINF (c2))
6218 /* sqrt(x) < y is always true, when y is a very large
6219 value and we don't care about NaNs or Infinities. */
6220 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6221 return omit_one_operand (type, integer_one_node, arg);
6223 /* sqrt(x) < y is x != +Inf when y is very large and we
6224 don't care about NaNs. */
6225 if (! HONOR_NANS (mode))
6226 return fold_build2 (NE_EXPR, type, arg,
6227 build_real (TREE_TYPE (arg), c2));
6229 /* sqrt(x) < y is x >= 0 when y is very large and we
6230 don't care about Infinities. */
6231 if (! HONOR_INFINITIES (mode))
6232 return fold_build2 (GE_EXPR, type, arg,
6233 build_real (TREE_TYPE (arg), dconst0));
6235 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6236 if (lang_hooks.decls.global_bindings_p () != 0
6237 || CONTAINS_PLACEHOLDER_P (arg))
6238 return NULL_TREE;
6240 arg = save_expr (arg);
6241 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6242 fold_build2 (GE_EXPR, type, arg,
6243 build_real (TREE_TYPE (arg),
6244 dconst0)),
6245 fold_build2 (NE_EXPR, type, arg,
6246 build_real (TREE_TYPE (arg),
6247 c2)));
6250 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6251 if (! HONOR_NANS (mode))
6252 return fold_build2 (code, type, arg,
6253 build_real (TREE_TYPE (arg), c2));
6255 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6256 if (lang_hooks.decls.global_bindings_p () == 0
6257 && ! CONTAINS_PLACEHOLDER_P (arg))
6259 arg = save_expr (arg);
6260 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6261 fold_build2 (GE_EXPR, type, arg,
6262 build_real (TREE_TYPE (arg),
6263 dconst0)),
6264 fold_build2 (code, type, arg,
6265 build_real (TREE_TYPE (arg),
6266 c2)));
6271 return NULL_TREE;
6274 /* Subroutine of fold() that optimizes comparisons against Infinities,
6275 either +Inf or -Inf.
6277 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6278 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6279 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6281 The function returns the constant folded tree if a simplification
6282 can be made, and NULL_TREE otherwise. */
6284 static tree
6285 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6287 enum machine_mode mode;
6288 REAL_VALUE_TYPE max;
6289 tree temp;
6290 bool neg;
6292 mode = TYPE_MODE (TREE_TYPE (arg0));
6294 /* For negative infinity swap the sense of the comparison. */
6295 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6296 if (neg)
6297 code = swap_tree_comparison (code);
6299 switch (code)
6301 case GT_EXPR:
6302 /* x > +Inf is always false, if with ignore sNANs. */
6303 if (HONOR_SNANS (mode))
6304 return NULL_TREE;
6305 return omit_one_operand (type, integer_zero_node, arg0);
6307 case LE_EXPR:
6308 /* x <= +Inf is always true, if we don't case about NaNs. */
6309 if (! HONOR_NANS (mode))
6310 return omit_one_operand (type, integer_one_node, arg0);
6312 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6313 if (lang_hooks.decls.global_bindings_p () == 0
6314 && ! CONTAINS_PLACEHOLDER_P (arg0))
6316 arg0 = save_expr (arg0);
6317 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6319 break;
6321 case EQ_EXPR:
6322 case GE_EXPR:
6323 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6324 real_maxval (&max, neg, mode);
6325 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6326 arg0, build_real (TREE_TYPE (arg0), max));
6328 case LT_EXPR:
6329 /* x < +Inf is always equal to x <= DBL_MAX. */
6330 real_maxval (&max, neg, mode);
6331 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6332 arg0, build_real (TREE_TYPE (arg0), max));
6334 case NE_EXPR:
6335 /* x != +Inf is always equal to !(x > DBL_MAX). */
6336 real_maxval (&max, neg, mode);
6337 if (! HONOR_NANS (mode))
6338 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6339 arg0, build_real (TREE_TYPE (arg0), max));
6341 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6343 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6345 default:
6346 break;
6349 return NULL_TREE;
6352 /* Subroutine of fold() that optimizes comparisons of a division by
6353 a nonzero integer constant against an integer constant, i.e.
6354 X/C1 op C2.
6356 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6357 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6358 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6360 The function returns the constant folded tree if a simplification
6361 can be made, and NULL_TREE otherwise. */
6363 static tree
6364 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6366 tree prod, tmp, hi, lo;
6367 tree arg00 = TREE_OPERAND (arg0, 0);
6368 tree arg01 = TREE_OPERAND (arg0, 1);
6369 unsigned HOST_WIDE_INT lpart;
6370 HOST_WIDE_INT hpart;
6371 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6372 bool neg_overflow;
6373 int overflow;
6375 /* We have to do this the hard way to detect unsigned overflow.
6376 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6377 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6378 TREE_INT_CST_HIGH (arg01),
6379 TREE_INT_CST_LOW (arg1),
6380 TREE_INT_CST_HIGH (arg1),
6381 &lpart, &hpart, unsigned_p);
6382 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6383 -1, overflow);
6384 neg_overflow = false;
6386 if (unsigned_p)
6388 tmp = int_const_binop (MINUS_EXPR, arg01,
6389 build_int_cst (TREE_TYPE (arg01), 1), 0);
6390 lo = prod;
6392 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6393 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6394 TREE_INT_CST_HIGH (prod),
6395 TREE_INT_CST_LOW (tmp),
6396 TREE_INT_CST_HIGH (tmp),
6397 &lpart, &hpart, unsigned_p);
6398 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6399 -1, overflow | TREE_OVERFLOW (prod));
6401 else if (tree_int_cst_sgn (arg01) >= 0)
6403 tmp = int_const_binop (MINUS_EXPR, arg01,
6404 build_int_cst (TREE_TYPE (arg01), 1), 0);
6405 switch (tree_int_cst_sgn (arg1))
6407 case -1:
6408 neg_overflow = true;
6409 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6410 hi = prod;
6411 break;
6413 case 0:
6414 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6415 hi = tmp;
6416 break;
6418 case 1:
6419 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6420 lo = prod;
6421 break;
6423 default:
6424 gcc_unreachable ();
6427 else
6429 /* A negative divisor reverses the relational operators. */
6430 code = swap_tree_comparison (code);
6432 tmp = int_const_binop (PLUS_EXPR, arg01,
6433 build_int_cst (TREE_TYPE (arg01), 1), 0);
6434 switch (tree_int_cst_sgn (arg1))
6436 case -1:
6437 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6438 lo = prod;
6439 break;
6441 case 0:
6442 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6443 lo = tmp;
6444 break;
6446 case 1:
6447 neg_overflow = true;
6448 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6449 hi = prod;
6450 break;
6452 default:
6453 gcc_unreachable ();
6457 switch (code)
6459 case EQ_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand (type, integer_zero_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2 (GE_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2 (LE_EXPR, type, arg00, hi);
6466 return build_range_check (type, arg00, 1, lo, hi);
6468 case NE_EXPR:
6469 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6470 return omit_one_operand (type, integer_one_node, arg00);
6471 if (TREE_OVERFLOW (hi))
6472 return fold_build2 (LT_EXPR, type, arg00, lo);
6473 if (TREE_OVERFLOW (lo))
6474 return fold_build2 (GT_EXPR, type, arg00, hi);
6475 return build_range_check (type, arg00, 0, lo, hi);
6477 case LT_EXPR:
6478 if (TREE_OVERFLOW (lo))
6480 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6481 return omit_one_operand (type, tmp, arg00);
6483 return fold_build2 (LT_EXPR, type, arg00, lo);
6485 case LE_EXPR:
6486 if (TREE_OVERFLOW (hi))
6488 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6489 return omit_one_operand (type, tmp, arg00);
6491 return fold_build2 (LE_EXPR, type, arg00, hi);
6493 case GT_EXPR:
6494 if (TREE_OVERFLOW (hi))
6496 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6497 return omit_one_operand (type, tmp, arg00);
6499 return fold_build2 (GT_EXPR, type, arg00, hi);
6501 case GE_EXPR:
6502 if (TREE_OVERFLOW (lo))
6504 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6505 return omit_one_operand (type, tmp, arg00);
6507 return fold_build2 (GE_EXPR, type, arg00, lo);
6509 default:
6510 break;
6513 return NULL_TREE;
6517 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6518 equality/inequality test, then return a simplified form of the test
6519 using a sign testing. Otherwise return NULL. TYPE is the desired
6520 result type. */
6522 static tree
6523 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6524 tree result_type)
6526 /* If this is testing a single bit, we can optimize the test. */
6527 if ((code == NE_EXPR || code == EQ_EXPR)
6528 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6529 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6531 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6532 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6533 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6535 if (arg00 != NULL_TREE
6536 /* This is only a win if casting to a signed type is cheap,
6537 i.e. when arg00's type is not a partial mode. */
6538 && TYPE_PRECISION (TREE_TYPE (arg00))
6539 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6541 tree stype = signed_type_for (TREE_TYPE (arg00));
6542 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6543 result_type, fold_convert (stype, arg00),
6544 build_int_cst (stype, 0));
6548 return NULL_TREE;
6551 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6552 equality/inequality test, then return a simplified form of
6553 the test using shifts and logical operations. Otherwise return
6554 NULL. TYPE is the desired result type. */
6556 tree
6557 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6558 tree result_type)
6560 /* If this is testing a single bit, we can optimize the test. */
6561 if ((code == NE_EXPR || code == EQ_EXPR)
6562 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6563 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6565 tree inner = TREE_OPERAND (arg0, 0);
6566 tree type = TREE_TYPE (arg0);
6567 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6568 enum machine_mode operand_mode = TYPE_MODE (type);
6569 int ops_unsigned;
6570 tree signed_type, unsigned_type, intermediate_type;
6571 tree tem, one;
6573 /* First, see if we can fold the single bit test into a sign-bit
6574 test. */
6575 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6576 result_type);
6577 if (tem)
6578 return tem;
6580 /* Otherwise we have (A & C) != 0 where C is a single bit,
6581 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6582 Similarly for (A & C) == 0. */
6584 /* If INNER is a right shift of a constant and it plus BITNUM does
6585 not overflow, adjust BITNUM and INNER. */
6586 if (TREE_CODE (inner) == RSHIFT_EXPR
6587 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6588 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6589 && bitnum < TYPE_PRECISION (type)
6590 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6591 bitnum - TYPE_PRECISION (type)))
6593 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6594 inner = TREE_OPERAND (inner, 0);
6597 /* If we are going to be able to omit the AND below, we must do our
6598 operations as unsigned. If we must use the AND, we have a choice.
6599 Normally unsigned is faster, but for some machines signed is. */
6600 #ifdef LOAD_EXTEND_OP
6601 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6602 && !flag_syntax_only) ? 0 : 1;
6603 #else
6604 ops_unsigned = 1;
6605 #endif
6607 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6608 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6609 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6610 inner = fold_convert (intermediate_type, inner);
6612 if (bitnum != 0)
6613 inner = build2 (RSHIFT_EXPR, intermediate_type,
6614 inner, size_int (bitnum));
6616 one = build_int_cst (intermediate_type, 1);
6618 if (code == EQ_EXPR)
6619 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6621 /* Put the AND last so it can combine with more things. */
6622 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6624 /* Make sure to return the proper type. */
6625 inner = fold_convert (result_type, inner);
6627 return inner;
6629 return NULL_TREE;
6632 /* Check whether we are allowed to reorder operands arg0 and arg1,
6633 such that the evaluation of arg1 occurs before arg0. */
6635 static bool
6636 reorder_operands_p (const_tree arg0, const_tree arg1)
6638 if (! flag_evaluation_order)
6639 return true;
6640 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6641 return true;
6642 return ! TREE_SIDE_EFFECTS (arg0)
6643 && ! TREE_SIDE_EFFECTS (arg1);
6646 /* Test whether it is preferable two swap two operands, ARG0 and
6647 ARG1, for example because ARG0 is an integer constant and ARG1
6648 isn't. If REORDER is true, only recommend swapping if we can
6649 evaluate the operands in reverse order. */
6651 bool
6652 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6654 STRIP_SIGN_NOPS (arg0);
6655 STRIP_SIGN_NOPS (arg1);
6657 if (TREE_CODE (arg1) == INTEGER_CST)
6658 return 0;
6659 if (TREE_CODE (arg0) == INTEGER_CST)
6660 return 1;
6662 if (TREE_CODE (arg1) == REAL_CST)
6663 return 0;
6664 if (TREE_CODE (arg0) == REAL_CST)
6665 return 1;
6667 if (TREE_CODE (arg1) == FIXED_CST)
6668 return 0;
6669 if (TREE_CODE (arg0) == FIXED_CST)
6670 return 1;
6672 if (TREE_CODE (arg1) == COMPLEX_CST)
6673 return 0;
6674 if (TREE_CODE (arg0) == COMPLEX_CST)
6675 return 1;
6677 if (TREE_CONSTANT (arg1))
6678 return 0;
6679 if (TREE_CONSTANT (arg0))
6680 return 1;
6682 if (optimize_size)
6683 return 0;
6685 if (reorder && flag_evaluation_order
6686 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6687 return 0;
6689 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6690 for commutative and comparison operators. Ensuring a canonical
6691 form allows the optimizers to find additional redundancies without
6692 having to explicitly check for both orderings. */
6693 if (TREE_CODE (arg0) == SSA_NAME
6694 && TREE_CODE (arg1) == SSA_NAME
6695 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6696 return 1;
6698 /* Put SSA_NAMEs last. */
6699 if (TREE_CODE (arg1) == SSA_NAME)
6700 return 0;
6701 if (TREE_CODE (arg0) == SSA_NAME)
6702 return 1;
6704 /* Put variables last. */
6705 if (DECL_P (arg1))
6706 return 0;
6707 if (DECL_P (arg0))
6708 return 1;
6710 return 0;
6713 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6714 ARG0 is extended to a wider type. */
6716 static tree
6717 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6719 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6720 tree arg1_unw;
6721 tree shorter_type, outer_type;
6722 tree min, max;
6723 bool above, below;
6725 if (arg0_unw == arg0)
6726 return NULL_TREE;
6727 shorter_type = TREE_TYPE (arg0_unw);
6729 #ifdef HAVE_canonicalize_funcptr_for_compare
6730 /* Disable this optimization if we're casting a function pointer
6731 type on targets that require function pointer canonicalization. */
6732 if (HAVE_canonicalize_funcptr_for_compare
6733 && TREE_CODE (shorter_type) == POINTER_TYPE
6734 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6735 return NULL_TREE;
6736 #endif
6738 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6739 return NULL_TREE;
6741 arg1_unw = get_unwidened (arg1, NULL_TREE);
6743 /* If possible, express the comparison in the shorter mode. */
6744 if ((code == EQ_EXPR || code == NE_EXPR
6745 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6746 && (TREE_TYPE (arg1_unw) == shorter_type
6747 || ((TYPE_PRECISION (shorter_type)
6748 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6749 && (TYPE_UNSIGNED (shorter_type)
6750 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6751 || (TREE_CODE (arg1_unw) == INTEGER_CST
6752 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6753 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6754 && int_fits_type_p (arg1_unw, shorter_type))))
6755 return fold_build2 (code, type, arg0_unw,
6756 fold_convert (shorter_type, arg1_unw));
6758 if (TREE_CODE (arg1_unw) != INTEGER_CST
6759 || TREE_CODE (shorter_type) != INTEGER_TYPE
6760 || !int_fits_type_p (arg1_unw, shorter_type))
6761 return NULL_TREE;
6763 /* If we are comparing with the integer that does not fit into the range
6764 of the shorter type, the result is known. */
6765 outer_type = TREE_TYPE (arg1_unw);
6766 min = lower_bound_in_type (outer_type, shorter_type);
6767 max = upper_bound_in_type (outer_type, shorter_type);
6769 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6770 max, arg1_unw));
6771 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6772 arg1_unw, min));
6774 switch (code)
6776 case EQ_EXPR:
6777 if (above || below)
6778 return omit_one_operand (type, integer_zero_node, arg0);
6779 break;
6781 case NE_EXPR:
6782 if (above || below)
6783 return omit_one_operand (type, integer_one_node, arg0);
6784 break;
6786 case LT_EXPR:
6787 case LE_EXPR:
6788 if (above)
6789 return omit_one_operand (type, integer_one_node, arg0);
6790 else if (below)
6791 return omit_one_operand (type, integer_zero_node, arg0);
6793 case GT_EXPR:
6794 case GE_EXPR:
6795 if (above)
6796 return omit_one_operand (type, integer_zero_node, arg0);
6797 else if (below)
6798 return omit_one_operand (type, integer_one_node, arg0);
6800 default:
6801 break;
6804 return NULL_TREE;
6807 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6808 ARG0 just the signedness is changed. */
6810 static tree
6811 fold_sign_changed_comparison (enum tree_code code, tree type,
6812 tree arg0, tree arg1)
6814 tree arg0_inner;
6815 tree inner_type, outer_type;
6817 if (!CONVERT_EXPR_P (arg0))
6818 return NULL_TREE;
6820 outer_type = TREE_TYPE (arg0);
6821 arg0_inner = TREE_OPERAND (arg0, 0);
6822 inner_type = TREE_TYPE (arg0_inner);
6824 #ifdef HAVE_canonicalize_funcptr_for_compare
6825 /* Disable this optimization if we're casting a function pointer
6826 type on targets that require function pointer canonicalization. */
6827 if (HAVE_canonicalize_funcptr_for_compare
6828 && TREE_CODE (inner_type) == POINTER_TYPE
6829 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6830 return NULL_TREE;
6831 #endif
6833 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6834 return NULL_TREE;
6836 /* If the conversion is from an integral subtype to its basetype
6837 leave it alone. */
6838 if (TREE_TYPE (inner_type) == outer_type)
6839 return NULL_TREE;
6841 if (TREE_CODE (arg1) != INTEGER_CST
6842 && !(CONVERT_EXPR_P (arg1)
6843 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6844 return NULL_TREE;
6846 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6847 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6848 && code != NE_EXPR
6849 && code != EQ_EXPR)
6850 return NULL_TREE;
6852 if (TREE_CODE (arg1) == INTEGER_CST)
6853 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6854 TREE_INT_CST_HIGH (arg1), 0,
6855 TREE_OVERFLOW (arg1));
6856 else
6857 arg1 = fold_convert (inner_type, arg1);
6859 return fold_build2 (code, type, arg0_inner, arg1);
6862 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6863 step of the array. Reconstructs s and delta in the case of s * delta
6864 being an integer constant (and thus already folded).
6865 ADDR is the address. MULT is the multiplicative expression.
6866 If the function succeeds, the new address expression is returned. Otherwise
6867 NULL_TREE is returned. */
6869 static tree
6870 try_move_mult_to_index (tree addr, tree op1)
6872 tree s, delta, step;
6873 tree ref = TREE_OPERAND (addr, 0), pref;
6874 tree ret, pos;
6875 tree itype;
6876 bool mdim = false;
6878 /* Strip the nops that might be added when converting op1 to sizetype. */
6879 STRIP_NOPS (op1);
6881 /* Canonicalize op1 into a possibly non-constant delta
6882 and an INTEGER_CST s. */
6883 if (TREE_CODE (op1) == MULT_EXPR)
6885 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6887 STRIP_NOPS (arg0);
6888 STRIP_NOPS (arg1);
6890 if (TREE_CODE (arg0) == INTEGER_CST)
6892 s = arg0;
6893 delta = arg1;
6895 else if (TREE_CODE (arg1) == INTEGER_CST)
6897 s = arg1;
6898 delta = arg0;
6900 else
6901 return NULL_TREE;
6903 else if (TREE_CODE (op1) == INTEGER_CST)
6905 delta = op1;
6906 s = NULL_TREE;
6908 else
6910 /* Simulate we are delta * 1. */
6911 delta = op1;
6912 s = integer_one_node;
6915 for (;; ref = TREE_OPERAND (ref, 0))
6917 if (TREE_CODE (ref) == ARRAY_REF)
6919 /* Remember if this was a multi-dimensional array. */
6920 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6921 mdim = true;
6923 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6924 if (! itype)
6925 continue;
6927 step = array_ref_element_size (ref);
6928 if (TREE_CODE (step) != INTEGER_CST)
6929 continue;
6931 if (s)
6933 if (! tree_int_cst_equal (step, s))
6934 continue;
6936 else
6938 /* Try if delta is a multiple of step. */
6939 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6940 if (! tmp)
6941 continue;
6942 delta = tmp;
6945 /* Only fold here if we can verify we do not overflow one
6946 dimension of a multi-dimensional array. */
6947 if (mdim)
6949 tree tmp;
6951 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6952 || !INTEGRAL_TYPE_P (itype)
6953 || !TYPE_MAX_VALUE (itype)
6954 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6955 continue;
6957 tmp = fold_binary (PLUS_EXPR, itype,
6958 fold_convert (itype,
6959 TREE_OPERAND (ref, 1)),
6960 fold_convert (itype, delta));
6961 if (!tmp
6962 || TREE_CODE (tmp) != INTEGER_CST
6963 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6964 continue;
6967 break;
6969 else
6970 mdim = false;
6972 if (!handled_component_p (ref))
6973 return NULL_TREE;
6976 /* We found the suitable array reference. So copy everything up to it,
6977 and replace the index. */
6979 pref = TREE_OPERAND (addr, 0);
6980 ret = copy_node (pref);
6981 pos = ret;
6983 while (pref != ref)
6985 pref = TREE_OPERAND (pref, 0);
6986 TREE_OPERAND (pos, 0) = copy_node (pref);
6987 pos = TREE_OPERAND (pos, 0);
6990 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6991 fold_convert (itype,
6992 TREE_OPERAND (pos, 1)),
6993 fold_convert (itype, delta));
6995 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6999 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7000 means A >= Y && A != MAX, but in this case we know that
7001 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7003 static tree
7004 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7006 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7008 if (TREE_CODE (bound) == LT_EXPR)
7009 a = TREE_OPERAND (bound, 0);
7010 else if (TREE_CODE (bound) == GT_EXPR)
7011 a = TREE_OPERAND (bound, 1);
7012 else
7013 return NULL_TREE;
7015 typea = TREE_TYPE (a);
7016 if (!INTEGRAL_TYPE_P (typea)
7017 && !POINTER_TYPE_P (typea))
7018 return NULL_TREE;
7020 if (TREE_CODE (ineq) == LT_EXPR)
7022 a1 = TREE_OPERAND (ineq, 1);
7023 y = TREE_OPERAND (ineq, 0);
7025 else if (TREE_CODE (ineq) == GT_EXPR)
7027 a1 = TREE_OPERAND (ineq, 0);
7028 y = TREE_OPERAND (ineq, 1);
7030 else
7031 return NULL_TREE;
7033 if (TREE_TYPE (a1) != typea)
7034 return NULL_TREE;
7036 if (POINTER_TYPE_P (typea))
7038 /* Convert the pointer types into integer before taking the difference. */
7039 tree ta = fold_convert (ssizetype, a);
7040 tree ta1 = fold_convert (ssizetype, a1);
7041 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7043 else
7044 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7046 if (!diff || !integer_onep (diff))
7047 return NULL_TREE;
7049 return fold_build2 (GE_EXPR, type, a, y);
7052 /* Fold a sum or difference of at least one multiplication.
7053 Returns the folded tree or NULL if no simplification could be made. */
7055 static tree
7056 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7058 tree arg00, arg01, arg10, arg11;
7059 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7061 /* (A * C) +- (B * C) -> (A+-B) * C.
7062 (A * C) +- A -> A * (C+-1).
7063 We are most concerned about the case where C is a constant,
7064 but other combinations show up during loop reduction. Since
7065 it is not difficult, try all four possibilities. */
7067 if (TREE_CODE (arg0) == MULT_EXPR)
7069 arg00 = TREE_OPERAND (arg0, 0);
7070 arg01 = TREE_OPERAND (arg0, 1);
7072 else if (TREE_CODE (arg0) == INTEGER_CST)
7074 arg00 = build_one_cst (type);
7075 arg01 = arg0;
7077 else
7079 /* We cannot generate constant 1 for fract. */
7080 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7081 return NULL_TREE;
7082 arg00 = arg0;
7083 arg01 = build_one_cst (type);
7085 if (TREE_CODE (arg1) == MULT_EXPR)
7087 arg10 = TREE_OPERAND (arg1, 0);
7088 arg11 = TREE_OPERAND (arg1, 1);
7090 else if (TREE_CODE (arg1) == INTEGER_CST)
7092 arg10 = build_one_cst (type);
7093 arg11 = arg1;
7095 else
7097 /* We cannot generate constant 1 for fract. */
7098 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7099 return NULL_TREE;
7100 arg10 = arg1;
7101 arg11 = build_one_cst (type);
7103 same = NULL_TREE;
7105 if (operand_equal_p (arg01, arg11, 0))
7106 same = arg01, alt0 = arg00, alt1 = arg10;
7107 else if (operand_equal_p (arg00, arg10, 0))
7108 same = arg00, alt0 = arg01, alt1 = arg11;
7109 else if (operand_equal_p (arg00, arg11, 0))
7110 same = arg00, alt0 = arg01, alt1 = arg10;
7111 else if (operand_equal_p (arg01, arg10, 0))
7112 same = arg01, alt0 = arg00, alt1 = arg11;
7114 /* No identical multiplicands; see if we can find a common
7115 power-of-two factor in non-power-of-two multiplies. This
7116 can help in multi-dimensional array access. */
7117 else if (host_integerp (arg01, 0)
7118 && host_integerp (arg11, 0))
7120 HOST_WIDE_INT int01, int11, tmp;
7121 bool swap = false;
7122 tree maybe_same;
7123 int01 = TREE_INT_CST_LOW (arg01);
7124 int11 = TREE_INT_CST_LOW (arg11);
7126 /* Move min of absolute values to int11. */
7127 if ((int01 >= 0 ? int01 : -int01)
7128 < (int11 >= 0 ? int11 : -int11))
7130 tmp = int01, int01 = int11, int11 = tmp;
7131 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7132 maybe_same = arg01;
7133 swap = true;
7135 else
7136 maybe_same = arg11;
7138 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7140 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7141 build_int_cst (TREE_TYPE (arg00),
7142 int01 / int11));
7143 alt1 = arg10;
7144 same = maybe_same;
7145 if (swap)
7146 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7150 if (same)
7151 return fold_build2 (MULT_EXPR, type,
7152 fold_build2 (code, type,
7153 fold_convert (type, alt0),
7154 fold_convert (type, alt1)),
7155 fold_convert (type, same));
7157 return NULL_TREE;
7160 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7161 specified by EXPR into the buffer PTR of length LEN bytes.
7162 Return the number of bytes placed in the buffer, or zero
7163 upon failure. */
7165 static int
7166 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7168 tree type = TREE_TYPE (expr);
7169 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7170 int byte, offset, word, words;
7171 unsigned char value;
7173 if (total_bytes > len)
7174 return 0;
7175 words = total_bytes / UNITS_PER_WORD;
7177 for (byte = 0; byte < total_bytes; byte++)
7179 int bitpos = byte * BITS_PER_UNIT;
7180 if (bitpos < HOST_BITS_PER_WIDE_INT)
7181 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7182 else
7183 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7184 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7186 if (total_bytes > UNITS_PER_WORD)
7188 word = byte / UNITS_PER_WORD;
7189 if (WORDS_BIG_ENDIAN)
7190 word = (words - 1) - word;
7191 offset = word * UNITS_PER_WORD;
7192 if (BYTES_BIG_ENDIAN)
7193 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7194 else
7195 offset += byte % UNITS_PER_WORD;
7197 else
7198 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7199 ptr[offset] = value;
7201 return total_bytes;
7205 /* Subroutine of native_encode_expr. Encode the REAL_CST
7206 specified by EXPR into the buffer PTR of length LEN bytes.
7207 Return the number of bytes placed in the buffer, or zero
7208 upon failure. */
7210 static int
7211 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7213 tree type = TREE_TYPE (expr);
7214 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7215 int byte, offset, word, words, bitpos;
7216 unsigned char value;
7218 /* There are always 32 bits in each long, no matter the size of
7219 the hosts long. We handle floating point representations with
7220 up to 192 bits. */
7221 long tmp[6];
7223 if (total_bytes > len)
7224 return 0;
7225 words = 32 / UNITS_PER_WORD;
7227 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7229 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7230 bitpos += BITS_PER_UNIT)
7232 byte = (bitpos / BITS_PER_UNIT) & 3;
7233 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7235 if (UNITS_PER_WORD < 4)
7237 word = byte / UNITS_PER_WORD;
7238 if (WORDS_BIG_ENDIAN)
7239 word = (words - 1) - word;
7240 offset = word * UNITS_PER_WORD;
7241 if (BYTES_BIG_ENDIAN)
7242 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7243 else
7244 offset += byte % UNITS_PER_WORD;
7246 else
7247 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7248 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7250 return total_bytes;
7253 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7254 specified by EXPR into the buffer PTR of length LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero
7256 upon failure. */
7258 static int
7259 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7261 int rsize, isize;
7262 tree part;
7264 part = TREE_REALPART (expr);
7265 rsize = native_encode_expr (part, ptr, len);
7266 if (rsize == 0)
7267 return 0;
7268 part = TREE_IMAGPART (expr);
7269 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7270 if (isize != rsize)
7271 return 0;
7272 return rsize + isize;
7276 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7277 specified by EXPR into the buffer PTR of length LEN bytes.
7278 Return the number of bytes placed in the buffer, or zero
7279 upon failure. */
7281 static int
7282 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7284 int i, size, offset, count;
7285 tree itype, elem, elements;
7287 offset = 0;
7288 elements = TREE_VECTOR_CST_ELTS (expr);
7289 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7290 itype = TREE_TYPE (TREE_TYPE (expr));
7291 size = GET_MODE_SIZE (TYPE_MODE (itype));
7292 for (i = 0; i < count; i++)
7294 if (elements)
7296 elem = TREE_VALUE (elements);
7297 elements = TREE_CHAIN (elements);
7299 else
7300 elem = NULL_TREE;
7302 if (elem)
7304 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7305 return 0;
7307 else
7309 if (offset + size > len)
7310 return 0;
7311 memset (ptr+offset, 0, size);
7313 offset += size;
7315 return offset;
7319 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7320 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7321 buffer PTR of length LEN bytes. Return the number of bytes
7322 placed in the buffer, or zero upon failure. */
7325 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7327 switch (TREE_CODE (expr))
7329 case INTEGER_CST:
7330 return native_encode_int (expr, ptr, len);
7332 case REAL_CST:
7333 return native_encode_real (expr, ptr, len);
7335 case COMPLEX_CST:
7336 return native_encode_complex (expr, ptr, len);
7338 case VECTOR_CST:
7339 return native_encode_vector (expr, ptr, len);
7341 default:
7342 return 0;
7347 /* Subroutine of native_interpret_expr. Interpret the contents of
7348 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7349 If the buffer cannot be interpreted, return NULL_TREE. */
7351 static tree
7352 native_interpret_int (tree type, const unsigned char *ptr, int len)
7354 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7355 int byte, offset, word, words;
7356 unsigned char value;
7357 unsigned int HOST_WIDE_INT lo = 0;
7358 HOST_WIDE_INT hi = 0;
7360 if (total_bytes > len)
7361 return NULL_TREE;
7362 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7363 return NULL_TREE;
7364 words = total_bytes / UNITS_PER_WORD;
7366 for (byte = 0; byte < total_bytes; byte++)
7368 int bitpos = byte * BITS_PER_UNIT;
7369 if (total_bytes > UNITS_PER_WORD)
7371 word = byte / UNITS_PER_WORD;
7372 if (WORDS_BIG_ENDIAN)
7373 word = (words - 1) - word;
7374 offset = word * UNITS_PER_WORD;
7375 if (BYTES_BIG_ENDIAN)
7376 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7377 else
7378 offset += byte % UNITS_PER_WORD;
7380 else
7381 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7382 value = ptr[offset];
7384 if (bitpos < HOST_BITS_PER_WIDE_INT)
7385 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7386 else
7387 hi |= (unsigned HOST_WIDE_INT) value
7388 << (bitpos - HOST_BITS_PER_WIDE_INT);
7391 return build_int_cst_wide_type (type, lo, hi);
7395 /* Subroutine of native_interpret_expr. Interpret the contents of
7396 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7397 If the buffer cannot be interpreted, return NULL_TREE. */
7399 static tree
7400 native_interpret_real (tree type, const unsigned char *ptr, int len)
7402 enum machine_mode mode = TYPE_MODE (type);
7403 int total_bytes = GET_MODE_SIZE (mode);
7404 int byte, offset, word, words, bitpos;
7405 unsigned char value;
7406 /* There are always 32 bits in each long, no matter the size of
7407 the hosts long. We handle floating point representations with
7408 up to 192 bits. */
7409 REAL_VALUE_TYPE r;
7410 long tmp[6];
7412 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7413 if (total_bytes > len || total_bytes > 24)
7414 return NULL_TREE;
7415 words = 32 / UNITS_PER_WORD;
7417 memset (tmp, 0, sizeof (tmp));
7418 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7419 bitpos += BITS_PER_UNIT)
7421 byte = (bitpos / BITS_PER_UNIT) & 3;
7422 if (UNITS_PER_WORD < 4)
7424 word = byte / UNITS_PER_WORD;
7425 if (WORDS_BIG_ENDIAN)
7426 word = (words - 1) - word;
7427 offset = word * UNITS_PER_WORD;
7428 if (BYTES_BIG_ENDIAN)
7429 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7430 else
7431 offset += byte % UNITS_PER_WORD;
7433 else
7434 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7435 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7437 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7440 real_from_target (&r, tmp, mode);
7441 return build_real (type, r);
7445 /* Subroutine of native_interpret_expr. Interpret the contents of
7446 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7447 If the buffer cannot be interpreted, return NULL_TREE. */
7449 static tree
7450 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7452 tree etype, rpart, ipart;
7453 int size;
7455 etype = TREE_TYPE (type);
7456 size = GET_MODE_SIZE (TYPE_MODE (etype));
7457 if (size * 2 > len)
7458 return NULL_TREE;
7459 rpart = native_interpret_expr (etype, ptr, size);
7460 if (!rpart)
7461 return NULL_TREE;
7462 ipart = native_interpret_expr (etype, ptr+size, size);
7463 if (!ipart)
7464 return NULL_TREE;
7465 return build_complex (type, rpart, ipart);
7469 /* Subroutine of native_interpret_expr. Interpret the contents of
7470 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7471 If the buffer cannot be interpreted, return NULL_TREE. */
7473 static tree
7474 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7476 tree etype, elem, elements;
7477 int i, size, count;
7479 etype = TREE_TYPE (type);
7480 size = GET_MODE_SIZE (TYPE_MODE (etype));
7481 count = TYPE_VECTOR_SUBPARTS (type);
7482 if (size * count > len)
7483 return NULL_TREE;
7485 elements = NULL_TREE;
7486 for (i = count - 1; i >= 0; i--)
7488 elem = native_interpret_expr (etype, ptr+(i*size), size);
7489 if (!elem)
7490 return NULL_TREE;
7491 elements = tree_cons (NULL_TREE, elem, elements);
7493 return build_vector (type, elements);
7497 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7498 the buffer PTR of length LEN as a constant of type TYPE. For
7499 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7500 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7501 return NULL_TREE. */
7503 tree
7504 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7506 switch (TREE_CODE (type))
7508 case INTEGER_TYPE:
7509 case ENUMERAL_TYPE:
7510 case BOOLEAN_TYPE:
7511 return native_interpret_int (type, ptr, len);
7513 case REAL_TYPE:
7514 return native_interpret_real (type, ptr, len);
7516 case COMPLEX_TYPE:
7517 return native_interpret_complex (type, ptr, len);
7519 case VECTOR_TYPE:
7520 return native_interpret_vector (type, ptr, len);
7522 default:
7523 return NULL_TREE;
7528 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7529 TYPE at compile-time. If we're unable to perform the conversion
7530 return NULL_TREE. */
7532 static tree
7533 fold_view_convert_expr (tree type, tree expr)
7535 /* We support up to 512-bit values (for V8DFmode). */
7536 unsigned char buffer[64];
7537 int len;
7539 /* Check that the host and target are sane. */
7540 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7541 return NULL_TREE;
7543 len = native_encode_expr (expr, buffer, sizeof (buffer));
7544 if (len == 0)
7545 return NULL_TREE;
7547 return native_interpret_expr (type, buffer, len);
7550 /* Build an expression for the address of T. Folds away INDIRECT_REF
7551 to avoid confusing the gimplify process. When IN_FOLD is true
7552 avoid modifications of T. */
7554 static tree
7555 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7557 /* The size of the object is not relevant when talking about its address. */
7558 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7559 t = TREE_OPERAND (t, 0);
7561 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7562 if (TREE_CODE (t) == INDIRECT_REF
7563 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7565 t = TREE_OPERAND (t, 0);
7567 if (TREE_TYPE (t) != ptrtype)
7568 t = build1 (NOP_EXPR, ptrtype, t);
7570 else if (!in_fold)
7572 tree base = t;
7574 while (handled_component_p (base))
7575 base = TREE_OPERAND (base, 0);
7577 if (DECL_P (base))
7578 TREE_ADDRESSABLE (base) = 1;
7580 t = build1 (ADDR_EXPR, ptrtype, t);
7582 else
7583 t = build1 (ADDR_EXPR, ptrtype, t);
7585 return t;
7588 /* Build an expression for the address of T with type PTRTYPE. This
7589 function modifies the input parameter 'T' by sometimes setting the
7590 TREE_ADDRESSABLE flag. */
7592 tree
7593 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7595 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7598 /* Build an expression for the address of T. This function modifies
7599 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7600 flag. When called from fold functions, use fold_addr_expr instead. */
7602 tree
7603 build_fold_addr_expr (tree t)
7605 return build_fold_addr_expr_with_type_1 (t,
7606 build_pointer_type (TREE_TYPE (t)),
7607 false);
7610 /* Same as build_fold_addr_expr, builds an expression for the address
7611 of T, but avoids touching the input node 't'. Fold functions
7612 should use this version. */
7614 static tree
7615 fold_addr_expr (tree t)
7617 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7619 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7622 /* Fold a unary expression of code CODE and type TYPE with operand
7623 OP0. Return the folded expression if folding is successful.
7624 Otherwise, return NULL_TREE. */
7626 tree
7627 fold_unary (enum tree_code code, tree type, tree op0)
7629 tree tem;
7630 tree arg0;
7631 enum tree_code_class kind = TREE_CODE_CLASS (code);
7633 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7634 && TREE_CODE_LENGTH (code) == 1);
7636 arg0 = op0;
7637 if (arg0)
7639 if (CONVERT_EXPR_CODE_P (code)
7640 || code == FLOAT_EXPR || code == ABS_EXPR)
7642 /* Don't use STRIP_NOPS, because signedness of argument type
7643 matters. */
7644 STRIP_SIGN_NOPS (arg0);
7646 else
7648 /* Strip any conversions that don't change the mode. This
7649 is safe for every expression, except for a comparison
7650 expression because its signedness is derived from its
7651 operands.
7653 Note that this is done as an internal manipulation within
7654 the constant folder, in order to find the simplest
7655 representation of the arguments so that their form can be
7656 studied. In any cases, the appropriate type conversions
7657 should be put back in the tree that will get out of the
7658 constant folder. */
7659 STRIP_NOPS (arg0);
7663 if (TREE_CODE_CLASS (code) == tcc_unary)
7665 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7666 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7667 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7668 else if (TREE_CODE (arg0) == COND_EXPR)
7670 tree arg01 = TREE_OPERAND (arg0, 1);
7671 tree arg02 = TREE_OPERAND (arg0, 2);
7672 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7673 arg01 = fold_build1 (code, type, arg01);
7674 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7675 arg02 = fold_build1 (code, type, arg02);
7676 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7677 arg01, arg02);
7679 /* If this was a conversion, and all we did was to move into
7680 inside the COND_EXPR, bring it back out. But leave it if
7681 it is a conversion from integer to integer and the
7682 result precision is no wider than a word since such a
7683 conversion is cheap and may be optimized away by combine,
7684 while it couldn't if it were outside the COND_EXPR. Then return
7685 so we don't get into an infinite recursion loop taking the
7686 conversion out and then back in. */
7688 if ((CONVERT_EXPR_CODE_P (code)
7689 || code == NON_LVALUE_EXPR)
7690 && TREE_CODE (tem) == COND_EXPR
7691 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7692 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7693 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7694 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7695 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7696 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7697 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7698 && (INTEGRAL_TYPE_P
7699 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7700 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7701 || flag_syntax_only))
7702 tem = build1 (code, type,
7703 build3 (COND_EXPR,
7704 TREE_TYPE (TREE_OPERAND
7705 (TREE_OPERAND (tem, 1), 0)),
7706 TREE_OPERAND (tem, 0),
7707 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7708 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7709 return tem;
7711 else if (COMPARISON_CLASS_P (arg0))
7713 if (TREE_CODE (type) == BOOLEAN_TYPE)
7715 arg0 = copy_node (arg0);
7716 TREE_TYPE (arg0) = type;
7717 return arg0;
7719 else if (TREE_CODE (type) != INTEGER_TYPE)
7720 return fold_build3 (COND_EXPR, type, arg0,
7721 fold_build1 (code, type,
7722 integer_one_node),
7723 fold_build1 (code, type,
7724 integer_zero_node));
7728 switch (code)
7730 case PAREN_EXPR:
7731 /* Re-association barriers around constants and other re-association
7732 barriers can be removed. */
7733 if (CONSTANT_CLASS_P (op0)
7734 || TREE_CODE (op0) == PAREN_EXPR)
7735 return fold_convert (type, op0);
7736 return NULL_TREE;
7738 CASE_CONVERT:
7739 case FLOAT_EXPR:
7740 case FIX_TRUNC_EXPR:
7741 if (TREE_TYPE (op0) == type)
7742 return op0;
7744 /* If we have (type) (a CMP b) and type is an integral type, return
7745 new expression involving the new type. */
7746 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7747 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7748 TREE_OPERAND (op0, 1));
7750 /* Handle cases of two conversions in a row. */
7751 if (CONVERT_EXPR_P (op0))
7753 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7754 tree inter_type = TREE_TYPE (op0);
7755 int inside_int = INTEGRAL_TYPE_P (inside_type);
7756 int inside_ptr = POINTER_TYPE_P (inside_type);
7757 int inside_float = FLOAT_TYPE_P (inside_type);
7758 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7759 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7760 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7761 int inter_int = INTEGRAL_TYPE_P (inter_type);
7762 int inter_ptr = POINTER_TYPE_P (inter_type);
7763 int inter_float = FLOAT_TYPE_P (inter_type);
7764 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7765 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7766 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7767 int final_int = INTEGRAL_TYPE_P (type);
7768 int final_ptr = POINTER_TYPE_P (type);
7769 int final_float = FLOAT_TYPE_P (type);
7770 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7771 unsigned int final_prec = TYPE_PRECISION (type);
7772 int final_unsignedp = TYPE_UNSIGNED (type);
7774 /* In addition to the cases of two conversions in a row
7775 handled below, if we are converting something to its own
7776 type via an object of identical or wider precision, neither
7777 conversion is needed. */
7778 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7779 && (((inter_int || inter_ptr) && final_int)
7780 || (inter_float && final_float))
7781 && inter_prec >= final_prec)
7782 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7784 /* Likewise, if the intermediate and final types are either both
7785 float or both integer, we don't need the middle conversion if
7786 it is wider than the final type and doesn't change the signedness
7787 (for integers). Avoid this if the final type is a pointer
7788 since then we sometimes need the inner conversion. Likewise if
7789 the outer has a precision not equal to the size of its mode. */
7790 if (((inter_int && inside_int)
7791 || (inter_float && inside_float)
7792 || (inter_vec && inside_vec))
7793 && inter_prec >= inside_prec
7794 && (inter_float || inter_vec
7795 || inter_unsignedp == inside_unsignedp)
7796 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7797 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7798 && ! final_ptr
7799 && (! final_vec || inter_prec == inside_prec))
7800 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7802 /* If we have a sign-extension of a zero-extended value, we can
7803 replace that by a single zero-extension. */
7804 if (inside_int && inter_int && final_int
7805 && inside_prec < inter_prec && inter_prec < final_prec
7806 && inside_unsignedp && !inter_unsignedp)
7807 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7809 /* Two conversions in a row are not needed unless:
7810 - some conversion is floating-point (overstrict for now), or
7811 - some conversion is a vector (overstrict for now), or
7812 - the intermediate type is narrower than both initial and
7813 final, or
7814 - the intermediate type and innermost type differ in signedness,
7815 and the outermost type is wider than the intermediate, or
7816 - the initial type is a pointer type and the precisions of the
7817 intermediate and final types differ, or
7818 - the final type is a pointer type and the precisions of the
7819 initial and intermediate types differ. */
7820 if (! inside_float && ! inter_float && ! final_float
7821 && ! inside_vec && ! inter_vec && ! final_vec
7822 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7823 && ! (inside_int && inter_int
7824 && inter_unsignedp != inside_unsignedp
7825 && inter_prec < final_prec)
7826 && ((inter_unsignedp && inter_prec > inside_prec)
7827 == (final_unsignedp && final_prec > inter_prec))
7828 && ! (inside_ptr && inter_prec != final_prec)
7829 && ! (final_ptr && inside_prec != inter_prec)
7830 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7831 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7832 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7835 /* Handle (T *)&A.B.C for A being of type T and B and C
7836 living at offset zero. This occurs frequently in
7837 C++ upcasting and then accessing the base. */
7838 if (TREE_CODE (op0) == ADDR_EXPR
7839 && POINTER_TYPE_P (type)
7840 && handled_component_p (TREE_OPERAND (op0, 0)))
7842 HOST_WIDE_INT bitsize, bitpos;
7843 tree offset;
7844 enum machine_mode mode;
7845 int unsignedp, volatilep;
7846 tree base = TREE_OPERAND (op0, 0);
7847 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7848 &mode, &unsignedp, &volatilep, false);
7849 /* If the reference was to a (constant) zero offset, we can use
7850 the address of the base if it has the same base type
7851 as the result type. */
7852 if (! offset && bitpos == 0
7853 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7854 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7855 return fold_convert (type, fold_addr_expr (base));
7858 if (TREE_CODE (op0) == MODIFY_EXPR
7859 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7860 /* Detect assigning a bitfield. */
7861 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7862 && DECL_BIT_FIELD
7863 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7865 /* Don't leave an assignment inside a conversion
7866 unless assigning a bitfield. */
7867 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7868 /* First do the assignment, then return converted constant. */
7869 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7870 TREE_NO_WARNING (tem) = 1;
7871 TREE_USED (tem) = 1;
7872 return tem;
7875 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7876 constants (if x has signed type, the sign bit cannot be set
7877 in c). This folds extension into the BIT_AND_EXPR.
7878 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7879 very likely don't have maximal range for their precision and this
7880 transformation effectively doesn't preserve non-maximal ranges. */
7881 if (TREE_CODE (type) == INTEGER_TYPE
7882 && TREE_CODE (op0) == BIT_AND_EXPR
7883 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7885 tree and = op0;
7886 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7887 int change = 0;
7889 if (TYPE_UNSIGNED (TREE_TYPE (and))
7890 || (TYPE_PRECISION (type)
7891 <= TYPE_PRECISION (TREE_TYPE (and))))
7892 change = 1;
7893 else if (TYPE_PRECISION (TREE_TYPE (and1))
7894 <= HOST_BITS_PER_WIDE_INT
7895 && host_integerp (and1, 1))
7897 unsigned HOST_WIDE_INT cst;
7899 cst = tree_low_cst (and1, 1);
7900 cst &= (HOST_WIDE_INT) -1
7901 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7902 change = (cst == 0);
7903 #ifdef LOAD_EXTEND_OP
7904 if (change
7905 && !flag_syntax_only
7906 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7907 == ZERO_EXTEND))
7909 tree uns = unsigned_type_for (TREE_TYPE (and0));
7910 and0 = fold_convert (uns, and0);
7911 and1 = fold_convert (uns, and1);
7913 #endif
7915 if (change)
7917 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7918 TREE_INT_CST_HIGH (and1), 0,
7919 TREE_OVERFLOW (and1));
7920 return fold_build2 (BIT_AND_EXPR, type,
7921 fold_convert (type, and0), tem);
7925 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7926 when one of the new casts will fold away. Conservatively we assume
7927 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7928 if (POINTER_TYPE_P (type)
7929 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7930 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7931 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7932 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7934 tree arg00 = TREE_OPERAND (arg0, 0);
7935 tree arg01 = TREE_OPERAND (arg0, 1);
7937 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7938 fold_convert (sizetype, arg01));
7941 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7942 of the same precision, and X is an integer type not narrower than
7943 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7944 if (INTEGRAL_TYPE_P (type)
7945 && TREE_CODE (op0) == BIT_NOT_EXPR
7946 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7947 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7948 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7950 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7951 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7952 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7953 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7956 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7957 type of X and Y (integer types only). */
7958 if (INTEGRAL_TYPE_P (type)
7959 && TREE_CODE (op0) == MULT_EXPR
7960 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7961 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7963 /* Be careful not to introduce new overflows. */
7964 tree mult_type;
7965 if (TYPE_OVERFLOW_WRAPS (type))
7966 mult_type = type;
7967 else
7968 mult_type = unsigned_type_for (type);
7970 tem = fold_build2 (MULT_EXPR, mult_type,
7971 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7972 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7973 return fold_convert (type, tem);
7976 tem = fold_convert_const (code, type, op0);
7977 return tem ? tem : NULL_TREE;
7979 case FIXED_CONVERT_EXPR:
7980 tem = fold_convert_const (code, type, arg0);
7981 return tem ? tem : NULL_TREE;
7983 case VIEW_CONVERT_EXPR:
7984 if (TREE_TYPE (op0) == type)
7985 return op0;
7986 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7987 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7989 /* For integral conversions with the same precision or pointer
7990 conversions use a NOP_EXPR instead. */
7991 if ((INTEGRAL_TYPE_P (type)
7992 || POINTER_TYPE_P (type))
7993 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7994 || POINTER_TYPE_P (TREE_TYPE (op0)))
7995 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7996 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7997 a sub-type to its base type as generated by the Ada FE. */
7998 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7999 && TREE_TYPE (TREE_TYPE (op0))))
8000 return fold_convert (type, op0);
8002 /* Strip inner integral conversions that do not change the precision. */
8003 if (CONVERT_EXPR_P (op0)
8004 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8005 || POINTER_TYPE_P (TREE_TYPE (op0)))
8006 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8007 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8008 && (TYPE_PRECISION (TREE_TYPE (op0))
8009 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8010 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8012 return fold_view_convert_expr (type, op0);
8014 case NEGATE_EXPR:
8015 tem = fold_negate_expr (arg0);
8016 if (tem)
8017 return fold_convert (type, tem);
8018 return NULL_TREE;
8020 case ABS_EXPR:
8021 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8022 return fold_abs_const (arg0, type);
8023 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8024 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8025 /* Convert fabs((double)float) into (double)fabsf(float). */
8026 else if (TREE_CODE (arg0) == NOP_EXPR
8027 && TREE_CODE (type) == REAL_TYPE)
8029 tree targ0 = strip_float_extensions (arg0);
8030 if (targ0 != arg0)
8031 return fold_convert (type, fold_build1 (ABS_EXPR,
8032 TREE_TYPE (targ0),
8033 targ0));
8035 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8036 else if (TREE_CODE (arg0) == ABS_EXPR)
8037 return arg0;
8038 else if (tree_expr_nonnegative_p (arg0))
8039 return arg0;
8041 /* Strip sign ops from argument. */
8042 if (TREE_CODE (type) == REAL_TYPE)
8044 tem = fold_strip_sign_ops (arg0);
8045 if (tem)
8046 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8048 return NULL_TREE;
8050 case CONJ_EXPR:
8051 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8052 return fold_convert (type, arg0);
8053 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8055 tree itype = TREE_TYPE (type);
8056 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8057 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8058 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8060 if (TREE_CODE (arg0) == COMPLEX_CST)
8062 tree itype = TREE_TYPE (type);
8063 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8064 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8065 return build_complex (type, rpart, negate_expr (ipart));
8067 if (TREE_CODE (arg0) == CONJ_EXPR)
8068 return fold_convert (type, TREE_OPERAND (arg0, 0));
8069 return NULL_TREE;
8071 case BIT_NOT_EXPR:
8072 if (TREE_CODE (arg0) == INTEGER_CST)
8073 return fold_not_const (arg0, type);
8074 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8075 return fold_convert (type, TREE_OPERAND (arg0, 0));
8076 /* Convert ~ (-A) to A - 1. */
8077 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8078 return fold_build2 (MINUS_EXPR, type,
8079 fold_convert (type, TREE_OPERAND (arg0, 0)),
8080 build_int_cst (type, 1));
8081 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8082 else if (INTEGRAL_TYPE_P (type)
8083 && ((TREE_CODE (arg0) == MINUS_EXPR
8084 && integer_onep (TREE_OPERAND (arg0, 1)))
8085 || (TREE_CODE (arg0) == PLUS_EXPR
8086 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8087 return fold_build1 (NEGATE_EXPR, type,
8088 fold_convert (type, TREE_OPERAND (arg0, 0)));
8089 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8090 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8091 && (tem = fold_unary (BIT_NOT_EXPR, type,
8092 fold_convert (type,
8093 TREE_OPERAND (arg0, 0)))))
8094 return fold_build2 (BIT_XOR_EXPR, type, tem,
8095 fold_convert (type, TREE_OPERAND (arg0, 1)));
8096 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8097 && (tem = fold_unary (BIT_NOT_EXPR, type,
8098 fold_convert (type,
8099 TREE_OPERAND (arg0, 1)))))
8100 return fold_build2 (BIT_XOR_EXPR, type,
8101 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8102 /* Perform BIT_NOT_EXPR on each element individually. */
8103 else if (TREE_CODE (arg0) == VECTOR_CST)
8105 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8106 int count = TYPE_VECTOR_SUBPARTS (type), i;
8108 for (i = 0; i < count; i++)
8110 if (elements)
8112 elem = TREE_VALUE (elements);
8113 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8114 if (elem == NULL_TREE)
8115 break;
8116 elements = TREE_CHAIN (elements);
8118 else
8119 elem = build_int_cst (TREE_TYPE (type), -1);
8120 list = tree_cons (NULL_TREE, elem, list);
8122 if (i == count)
8123 return build_vector (type, nreverse (list));
8126 return NULL_TREE;
8128 case TRUTH_NOT_EXPR:
8129 /* The argument to invert_truthvalue must have Boolean type. */
8130 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8131 arg0 = fold_convert (boolean_type_node, arg0);
8133 /* Note that the operand of this must be an int
8134 and its values must be 0 or 1.
8135 ("true" is a fixed value perhaps depending on the language,
8136 but we don't handle values other than 1 correctly yet.) */
8137 tem = fold_truth_not_expr (arg0);
8138 if (!tem)
8139 return NULL_TREE;
8140 return fold_convert (type, tem);
8142 case REALPART_EXPR:
8143 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8144 return fold_convert (type, arg0);
8145 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8146 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8147 TREE_OPERAND (arg0, 1));
8148 if (TREE_CODE (arg0) == COMPLEX_CST)
8149 return fold_convert (type, TREE_REALPART (arg0));
8150 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8152 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8153 tem = fold_build2 (TREE_CODE (arg0), itype,
8154 fold_build1 (REALPART_EXPR, itype,
8155 TREE_OPERAND (arg0, 0)),
8156 fold_build1 (REALPART_EXPR, itype,
8157 TREE_OPERAND (arg0, 1)));
8158 return fold_convert (type, tem);
8160 if (TREE_CODE (arg0) == CONJ_EXPR)
8162 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8163 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8164 return fold_convert (type, tem);
8166 if (TREE_CODE (arg0) == CALL_EXPR)
8168 tree fn = get_callee_fndecl (arg0);
8169 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8170 switch (DECL_FUNCTION_CODE (fn))
8172 CASE_FLT_FN (BUILT_IN_CEXPI):
8173 fn = mathfn_built_in (type, BUILT_IN_COS);
8174 if (fn)
8175 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8176 break;
8178 default:
8179 break;
8182 return NULL_TREE;
8184 case IMAGPART_EXPR:
8185 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8186 return fold_convert (type, integer_zero_node);
8187 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8188 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8189 TREE_OPERAND (arg0, 0));
8190 if (TREE_CODE (arg0) == COMPLEX_CST)
8191 return fold_convert (type, TREE_IMAGPART (arg0));
8192 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8194 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8195 tem = fold_build2 (TREE_CODE (arg0), itype,
8196 fold_build1 (IMAGPART_EXPR, itype,
8197 TREE_OPERAND (arg0, 0)),
8198 fold_build1 (IMAGPART_EXPR, itype,
8199 TREE_OPERAND (arg0, 1)));
8200 return fold_convert (type, tem);
8202 if (TREE_CODE (arg0) == CONJ_EXPR)
8204 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8205 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8206 return fold_convert (type, negate_expr (tem));
8208 if (TREE_CODE (arg0) == CALL_EXPR)
8210 tree fn = get_callee_fndecl (arg0);
8211 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8212 switch (DECL_FUNCTION_CODE (fn))
8214 CASE_FLT_FN (BUILT_IN_CEXPI):
8215 fn = mathfn_built_in (type, BUILT_IN_SIN);
8216 if (fn)
8217 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8218 break;
8220 default:
8221 break;
8224 return NULL_TREE;
8226 default:
8227 return NULL_TREE;
8228 } /* switch (code) */
8231 /* Fold a binary expression of code CODE and type TYPE with operands
8232 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8233 Return the folded expression if folding is successful. Otherwise,
8234 return NULL_TREE. */
8236 static tree
8237 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8239 enum tree_code compl_code;
8241 if (code == MIN_EXPR)
8242 compl_code = MAX_EXPR;
8243 else if (code == MAX_EXPR)
8244 compl_code = MIN_EXPR;
8245 else
8246 gcc_unreachable ();
8248 /* MIN (MAX (a, b), b) == b. */
8249 if (TREE_CODE (op0) == compl_code
8250 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8251 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8253 /* MIN (MAX (b, a), b) == b. */
8254 if (TREE_CODE (op0) == compl_code
8255 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8256 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8257 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8259 /* MIN (a, MAX (a, b)) == a. */
8260 if (TREE_CODE (op1) == compl_code
8261 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8262 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8263 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8265 /* MIN (a, MAX (b, a)) == a. */
8266 if (TREE_CODE (op1) == compl_code
8267 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8268 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8269 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8271 return NULL_TREE;
8274 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8275 by changing CODE to reduce the magnitude of constants involved in
8276 ARG0 of the comparison.
8277 Returns a canonicalized comparison tree if a simplification was
8278 possible, otherwise returns NULL_TREE.
8279 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8280 valid if signed overflow is undefined. */
8282 static tree
8283 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8284 tree arg0, tree arg1,
8285 bool *strict_overflow_p)
8287 enum tree_code code0 = TREE_CODE (arg0);
8288 tree t, cst0 = NULL_TREE;
8289 int sgn0;
8290 bool swap = false;
8292 /* Match A +- CST code arg1 and CST code arg1. */
8293 if (!(((code0 == MINUS_EXPR
8294 || code0 == PLUS_EXPR)
8295 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8296 || code0 == INTEGER_CST))
8297 return NULL_TREE;
8299 /* Identify the constant in arg0 and its sign. */
8300 if (code0 == INTEGER_CST)
8301 cst0 = arg0;
8302 else
8303 cst0 = TREE_OPERAND (arg0, 1);
8304 sgn0 = tree_int_cst_sgn (cst0);
8306 /* Overflowed constants and zero will cause problems. */
8307 if (integer_zerop (cst0)
8308 || TREE_OVERFLOW (cst0))
8309 return NULL_TREE;
8311 /* See if we can reduce the magnitude of the constant in
8312 arg0 by changing the comparison code. */
8313 if (code0 == INTEGER_CST)
8315 /* CST <= arg1 -> CST-1 < arg1. */
8316 if (code == LE_EXPR && sgn0 == 1)
8317 code = LT_EXPR;
8318 /* -CST < arg1 -> -CST-1 <= arg1. */
8319 else if (code == LT_EXPR && sgn0 == -1)
8320 code = LE_EXPR;
8321 /* CST > arg1 -> CST-1 >= arg1. */
8322 else if (code == GT_EXPR && sgn0 == 1)
8323 code = GE_EXPR;
8324 /* -CST >= arg1 -> -CST-1 > arg1. */
8325 else if (code == GE_EXPR && sgn0 == -1)
8326 code = GT_EXPR;
8327 else
8328 return NULL_TREE;
8329 /* arg1 code' CST' might be more canonical. */
8330 swap = true;
8332 else
8334 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8335 if (code == LT_EXPR
8336 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8337 code = LE_EXPR;
8338 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8339 else if (code == GT_EXPR
8340 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8341 code = GE_EXPR;
8342 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8343 else if (code == LE_EXPR
8344 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8345 code = LT_EXPR;
8346 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8347 else if (code == GE_EXPR
8348 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8349 code = GT_EXPR;
8350 else
8351 return NULL_TREE;
8352 *strict_overflow_p = true;
8355 /* Now build the constant reduced in magnitude. */
8356 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8357 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8358 if (code0 != INTEGER_CST)
8359 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8361 /* If swapping might yield to a more canonical form, do so. */
8362 if (swap)
8363 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8364 else
8365 return fold_build2 (code, type, t, arg1);
8368 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8369 overflow further. Try to decrease the magnitude of constants involved
8370 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8371 and put sole constants at the second argument position.
8372 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8374 static tree
8375 maybe_canonicalize_comparison (enum tree_code code, tree type,
8376 tree arg0, tree arg1)
8378 tree t;
8379 bool strict_overflow_p;
8380 const char * const warnmsg = G_("assuming signed overflow does not occur "
8381 "when reducing constant in comparison");
8383 /* In principle pointers also have undefined overflow behavior,
8384 but that causes problems elsewhere. */
8385 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8386 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8387 return NULL_TREE;
8389 /* Try canonicalization by simplifying arg0. */
8390 strict_overflow_p = false;
8391 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8392 &strict_overflow_p);
8393 if (t)
8395 if (strict_overflow_p)
8396 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8397 return t;
8400 /* Try canonicalization by simplifying arg1 using the swapped
8401 comparison. */
8402 code = swap_tree_comparison (code);
8403 strict_overflow_p = false;
8404 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8405 &strict_overflow_p);
8406 if (t && strict_overflow_p)
8407 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8408 return t;
8411 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8412 space. This is used to avoid issuing overflow warnings for
8413 expressions like &p->x which can not wrap. */
8415 static bool
8416 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8418 unsigned HOST_WIDE_INT offset_low, total_low;
8419 HOST_WIDE_INT size, offset_high, total_high;
8421 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8422 return true;
8424 if (bitpos < 0)
8425 return true;
8427 if (offset == NULL_TREE)
8429 offset_low = 0;
8430 offset_high = 0;
8432 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8433 return true;
8434 else
8436 offset_low = TREE_INT_CST_LOW (offset);
8437 offset_high = TREE_INT_CST_HIGH (offset);
8440 if (add_double_with_sign (offset_low, offset_high,
8441 bitpos / BITS_PER_UNIT, 0,
8442 &total_low, &total_high,
8443 true))
8444 return true;
8446 if (total_high != 0)
8447 return true;
8449 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8450 if (size <= 0)
8451 return true;
8453 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8454 array. */
8455 if (TREE_CODE (base) == ADDR_EXPR)
8457 HOST_WIDE_INT base_size;
8459 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8460 if (base_size > 0 && size < base_size)
8461 size = base_size;
8464 return total_low > (unsigned HOST_WIDE_INT) size;
8467 /* Subroutine of fold_binary. This routine performs all of the
8468 transformations that are common to the equality/inequality
8469 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8470 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8471 fold_binary should call fold_binary. Fold a comparison with
8472 tree code CODE and type TYPE with operands OP0 and OP1. Return
8473 the folded comparison or NULL_TREE. */
8475 static tree
8476 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8478 tree arg0, arg1, tem;
8480 arg0 = op0;
8481 arg1 = op1;
8483 STRIP_SIGN_NOPS (arg0);
8484 STRIP_SIGN_NOPS (arg1);
8486 tem = fold_relational_const (code, type, arg0, arg1);
8487 if (tem != NULL_TREE)
8488 return tem;
8490 /* If one arg is a real or integer constant, put it last. */
8491 if (tree_swap_operands_p (arg0, arg1, true))
8492 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8494 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8495 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8496 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8497 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8498 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8499 && (TREE_CODE (arg1) == INTEGER_CST
8500 && !TREE_OVERFLOW (arg1)))
8502 tree const1 = TREE_OPERAND (arg0, 1);
8503 tree const2 = arg1;
8504 tree variable = TREE_OPERAND (arg0, 0);
8505 tree lhs;
8506 int lhs_add;
8507 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8509 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8510 TREE_TYPE (arg1), const2, const1);
8512 /* If the constant operation overflowed this can be
8513 simplified as a comparison against INT_MAX/INT_MIN. */
8514 if (TREE_CODE (lhs) == INTEGER_CST
8515 && TREE_OVERFLOW (lhs))
8517 int const1_sgn = tree_int_cst_sgn (const1);
8518 enum tree_code code2 = code;
8520 /* Get the sign of the constant on the lhs if the
8521 operation were VARIABLE + CONST1. */
8522 if (TREE_CODE (arg0) == MINUS_EXPR)
8523 const1_sgn = -const1_sgn;
8525 /* The sign of the constant determines if we overflowed
8526 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8527 Canonicalize to the INT_MIN overflow by swapping the comparison
8528 if necessary. */
8529 if (const1_sgn == -1)
8530 code2 = swap_tree_comparison (code);
8532 /* We now can look at the canonicalized case
8533 VARIABLE + 1 CODE2 INT_MIN
8534 and decide on the result. */
8535 if (code2 == LT_EXPR
8536 || code2 == LE_EXPR
8537 || code2 == EQ_EXPR)
8538 return omit_one_operand (type, boolean_false_node, variable);
8539 else if (code2 == NE_EXPR
8540 || code2 == GE_EXPR
8541 || code2 == GT_EXPR)
8542 return omit_one_operand (type, boolean_true_node, variable);
8545 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8546 && (TREE_CODE (lhs) != INTEGER_CST
8547 || !TREE_OVERFLOW (lhs)))
8549 fold_overflow_warning (("assuming signed overflow does not occur "
8550 "when changing X +- C1 cmp C2 to "
8551 "X cmp C1 +- C2"),
8552 WARN_STRICT_OVERFLOW_COMPARISON);
8553 return fold_build2 (code, type, variable, lhs);
8557 /* For comparisons of pointers we can decompose it to a compile time
8558 comparison of the base objects and the offsets into the object.
8559 This requires at least one operand being an ADDR_EXPR or a
8560 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8561 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8562 && (TREE_CODE (arg0) == ADDR_EXPR
8563 || TREE_CODE (arg1) == ADDR_EXPR
8564 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8565 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8567 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8568 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8569 enum machine_mode mode;
8570 int volatilep, unsignedp;
8571 bool indirect_base0 = false, indirect_base1 = false;
8573 /* Get base and offset for the access. Strip ADDR_EXPR for
8574 get_inner_reference, but put it back by stripping INDIRECT_REF
8575 off the base object if possible. indirect_baseN will be true
8576 if baseN is not an address but refers to the object itself. */
8577 base0 = arg0;
8578 if (TREE_CODE (arg0) == ADDR_EXPR)
8580 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8581 &bitsize, &bitpos0, &offset0, &mode,
8582 &unsignedp, &volatilep, false);
8583 if (TREE_CODE (base0) == INDIRECT_REF)
8584 base0 = TREE_OPERAND (base0, 0);
8585 else
8586 indirect_base0 = true;
8588 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8590 base0 = TREE_OPERAND (arg0, 0);
8591 offset0 = TREE_OPERAND (arg0, 1);
8594 base1 = arg1;
8595 if (TREE_CODE (arg1) == ADDR_EXPR)
8597 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8598 &bitsize, &bitpos1, &offset1, &mode,
8599 &unsignedp, &volatilep, false);
8600 if (TREE_CODE (base1) == INDIRECT_REF)
8601 base1 = TREE_OPERAND (base1, 0);
8602 else
8603 indirect_base1 = true;
8605 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8607 base1 = TREE_OPERAND (arg1, 0);
8608 offset1 = TREE_OPERAND (arg1, 1);
8611 /* If we have equivalent bases we might be able to simplify. */
8612 if (indirect_base0 == indirect_base1
8613 && operand_equal_p (base0, base1, 0))
8615 /* We can fold this expression to a constant if the non-constant
8616 offset parts are equal. */
8617 if ((offset0 == offset1
8618 || (offset0 && offset1
8619 && operand_equal_p (offset0, offset1, 0)))
8620 && (code == EQ_EXPR
8621 || code == NE_EXPR
8622 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8625 if (code != EQ_EXPR
8626 && code != NE_EXPR
8627 && bitpos0 != bitpos1
8628 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8629 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8630 fold_overflow_warning (("assuming pointer wraparound does not "
8631 "occur when comparing P +- C1 with "
8632 "P +- C2"),
8633 WARN_STRICT_OVERFLOW_CONDITIONAL);
8635 switch (code)
8637 case EQ_EXPR:
8638 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8639 case NE_EXPR:
8640 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8641 case LT_EXPR:
8642 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8643 case LE_EXPR:
8644 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8645 case GE_EXPR:
8646 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8647 case GT_EXPR:
8648 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8649 default:;
8652 /* We can simplify the comparison to a comparison of the variable
8653 offset parts if the constant offset parts are equal.
8654 Be careful to use signed size type here because otherwise we
8655 mess with array offsets in the wrong way. This is possible
8656 because pointer arithmetic is restricted to retain within an
8657 object and overflow on pointer differences is undefined as of
8658 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8659 else if (bitpos0 == bitpos1
8660 && ((code == EQ_EXPR || code == NE_EXPR)
8661 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8663 tree signed_size_type_node;
8664 signed_size_type_node = signed_type_for (size_type_node);
8666 /* By converting to signed size type we cover middle-end pointer
8667 arithmetic which operates on unsigned pointer types of size
8668 type size and ARRAY_REF offsets which are properly sign or
8669 zero extended from their type in case it is narrower than
8670 size type. */
8671 if (offset0 == NULL_TREE)
8672 offset0 = build_int_cst (signed_size_type_node, 0);
8673 else
8674 offset0 = fold_convert (signed_size_type_node, offset0);
8675 if (offset1 == NULL_TREE)
8676 offset1 = build_int_cst (signed_size_type_node, 0);
8677 else
8678 offset1 = fold_convert (signed_size_type_node, offset1);
8680 if (code != EQ_EXPR
8681 && code != NE_EXPR
8682 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8683 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8684 fold_overflow_warning (("assuming pointer wraparound does not "
8685 "occur when comparing P +- C1 with "
8686 "P +- C2"),
8687 WARN_STRICT_OVERFLOW_COMPARISON);
8689 return fold_build2 (code, type, offset0, offset1);
8692 /* For non-equal bases we can simplify if they are addresses
8693 of local binding decls or constants. */
8694 else if (indirect_base0 && indirect_base1
8695 /* We know that !operand_equal_p (base0, base1, 0)
8696 because the if condition was false. But make
8697 sure two decls are not the same. */
8698 && base0 != base1
8699 && TREE_CODE (arg0) == ADDR_EXPR
8700 && TREE_CODE (arg1) == ADDR_EXPR
8701 && (((TREE_CODE (base0) == VAR_DECL
8702 || TREE_CODE (base0) == PARM_DECL)
8703 && (targetm.binds_local_p (base0)
8704 || CONSTANT_CLASS_P (base1)))
8705 || CONSTANT_CLASS_P (base0))
8706 && (((TREE_CODE (base1) == VAR_DECL
8707 || TREE_CODE (base1) == PARM_DECL)
8708 && (targetm.binds_local_p (base1)
8709 || CONSTANT_CLASS_P (base0)))
8710 || CONSTANT_CLASS_P (base1)))
8712 if (code == EQ_EXPR)
8713 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8714 else if (code == NE_EXPR)
8715 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8717 /* For equal offsets we can simplify to a comparison of the
8718 base addresses. */
8719 else if (bitpos0 == bitpos1
8720 && (indirect_base0
8721 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8722 && (indirect_base1
8723 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8724 && ((offset0 == offset1)
8725 || (offset0 && offset1
8726 && operand_equal_p (offset0, offset1, 0))))
8728 if (indirect_base0)
8729 base0 = fold_addr_expr (base0);
8730 if (indirect_base1)
8731 base1 = fold_addr_expr (base1);
8732 return fold_build2 (code, type, base0, base1);
8736 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8737 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8738 the resulting offset is smaller in absolute value than the
8739 original one. */
8740 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8741 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8742 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8743 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8744 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8745 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8746 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8748 tree const1 = TREE_OPERAND (arg0, 1);
8749 tree const2 = TREE_OPERAND (arg1, 1);
8750 tree variable1 = TREE_OPERAND (arg0, 0);
8751 tree variable2 = TREE_OPERAND (arg1, 0);
8752 tree cst;
8753 const char * const warnmsg = G_("assuming signed overflow does not "
8754 "occur when combining constants around "
8755 "a comparison");
8757 /* Put the constant on the side where it doesn't overflow and is
8758 of lower absolute value than before. */
8759 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8760 ? MINUS_EXPR : PLUS_EXPR,
8761 const2, const1, 0);
8762 if (!TREE_OVERFLOW (cst)
8763 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8765 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8766 return fold_build2 (code, type,
8767 variable1,
8768 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8769 variable2, cst));
8772 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8773 ? MINUS_EXPR : PLUS_EXPR,
8774 const1, const2, 0);
8775 if (!TREE_OVERFLOW (cst)
8776 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8778 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8779 return fold_build2 (code, type,
8780 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8781 variable1, cst),
8782 variable2);
8786 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8787 signed arithmetic case. That form is created by the compiler
8788 often enough for folding it to be of value. One example is in
8789 computing loop trip counts after Operator Strength Reduction. */
8790 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8791 && TREE_CODE (arg0) == MULT_EXPR
8792 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8793 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8794 && integer_zerop (arg1))
8796 tree const1 = TREE_OPERAND (arg0, 1);
8797 tree const2 = arg1; /* zero */
8798 tree variable1 = TREE_OPERAND (arg0, 0);
8799 enum tree_code cmp_code = code;
8801 gcc_assert (!integer_zerop (const1));
8803 fold_overflow_warning (("assuming signed overflow does not occur when "
8804 "eliminating multiplication in comparison "
8805 "with zero"),
8806 WARN_STRICT_OVERFLOW_COMPARISON);
8808 /* If const1 is negative we swap the sense of the comparison. */
8809 if (tree_int_cst_sgn (const1) < 0)
8810 cmp_code = swap_tree_comparison (cmp_code);
8812 return fold_build2 (cmp_code, type, variable1, const2);
8815 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8816 if (tem)
8817 return tem;
8819 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8821 tree targ0 = strip_float_extensions (arg0);
8822 tree targ1 = strip_float_extensions (arg1);
8823 tree newtype = TREE_TYPE (targ0);
8825 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8826 newtype = TREE_TYPE (targ1);
8828 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8829 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8830 return fold_build2 (code, type, fold_convert (newtype, targ0),
8831 fold_convert (newtype, targ1));
8833 /* (-a) CMP (-b) -> b CMP a */
8834 if (TREE_CODE (arg0) == NEGATE_EXPR
8835 && TREE_CODE (arg1) == NEGATE_EXPR)
8836 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8837 TREE_OPERAND (arg0, 0));
8839 if (TREE_CODE (arg1) == REAL_CST)
8841 REAL_VALUE_TYPE cst;
8842 cst = TREE_REAL_CST (arg1);
8844 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8845 if (TREE_CODE (arg0) == NEGATE_EXPR)
8846 return fold_build2 (swap_tree_comparison (code), type,
8847 TREE_OPERAND (arg0, 0),
8848 build_real (TREE_TYPE (arg1),
8849 REAL_VALUE_NEGATE (cst)));
8851 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8852 /* a CMP (-0) -> a CMP 0 */
8853 if (REAL_VALUE_MINUS_ZERO (cst))
8854 return fold_build2 (code, type, arg0,
8855 build_real (TREE_TYPE (arg1), dconst0));
8857 /* x != NaN is always true, other ops are always false. */
8858 if (REAL_VALUE_ISNAN (cst)
8859 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8861 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8862 return omit_one_operand (type, tem, arg0);
8865 /* Fold comparisons against infinity. */
8866 if (REAL_VALUE_ISINF (cst))
8868 tem = fold_inf_compare (code, type, arg0, arg1);
8869 if (tem != NULL_TREE)
8870 return tem;
8874 /* If this is a comparison of a real constant with a PLUS_EXPR
8875 or a MINUS_EXPR of a real constant, we can convert it into a
8876 comparison with a revised real constant as long as no overflow
8877 occurs when unsafe_math_optimizations are enabled. */
8878 if (flag_unsafe_math_optimizations
8879 && TREE_CODE (arg1) == REAL_CST
8880 && (TREE_CODE (arg0) == PLUS_EXPR
8881 || TREE_CODE (arg0) == MINUS_EXPR)
8882 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8883 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8884 ? MINUS_EXPR : PLUS_EXPR,
8885 arg1, TREE_OPERAND (arg0, 1), 0))
8886 && !TREE_OVERFLOW (tem))
8887 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8889 /* Likewise, we can simplify a comparison of a real constant with
8890 a MINUS_EXPR whose first operand is also a real constant, i.e.
8891 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8892 floating-point types only if -fassociative-math is set. */
8893 if (flag_associative_math
8894 && TREE_CODE (arg1) == REAL_CST
8895 && TREE_CODE (arg0) == MINUS_EXPR
8896 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8897 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8898 arg1, 0))
8899 && !TREE_OVERFLOW (tem))
8900 return fold_build2 (swap_tree_comparison (code), type,
8901 TREE_OPERAND (arg0, 1), tem);
8903 /* Fold comparisons against built-in math functions. */
8904 if (TREE_CODE (arg1) == REAL_CST
8905 && flag_unsafe_math_optimizations
8906 && ! flag_errno_math)
8908 enum built_in_function fcode = builtin_mathfn_code (arg0);
8910 if (fcode != END_BUILTINS)
8912 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8913 if (tem != NULL_TREE)
8914 return tem;
8919 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8920 && CONVERT_EXPR_P (arg0))
8922 /* If we are widening one operand of an integer comparison,
8923 see if the other operand is similarly being widened. Perhaps we
8924 can do the comparison in the narrower type. */
8925 tem = fold_widened_comparison (code, type, arg0, arg1);
8926 if (tem)
8927 return tem;
8929 /* Or if we are changing signedness. */
8930 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8931 if (tem)
8932 return tem;
8935 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8936 constant, we can simplify it. */
8937 if (TREE_CODE (arg1) == INTEGER_CST
8938 && (TREE_CODE (arg0) == MIN_EXPR
8939 || TREE_CODE (arg0) == MAX_EXPR)
8940 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8942 tem = optimize_minmax_comparison (code, type, op0, op1);
8943 if (tem)
8944 return tem;
8947 /* Simplify comparison of something with itself. (For IEEE
8948 floating-point, we can only do some of these simplifications.) */
8949 if (operand_equal_p (arg0, arg1, 0))
8951 switch (code)
8953 case EQ_EXPR:
8954 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8955 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8956 return constant_boolean_node (1, type);
8957 break;
8959 case GE_EXPR:
8960 case LE_EXPR:
8961 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8962 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8963 return constant_boolean_node (1, type);
8964 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8966 case NE_EXPR:
8967 /* For NE, we can only do this simplification if integer
8968 or we don't honor IEEE floating point NaNs. */
8969 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8970 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8971 break;
8972 /* ... fall through ... */
8973 case GT_EXPR:
8974 case LT_EXPR:
8975 return constant_boolean_node (0, type);
8976 default:
8977 gcc_unreachable ();
8981 /* If we are comparing an expression that just has comparisons
8982 of two integer values, arithmetic expressions of those comparisons,
8983 and constants, we can simplify it. There are only three cases
8984 to check: the two values can either be equal, the first can be
8985 greater, or the second can be greater. Fold the expression for
8986 those three values. Since each value must be 0 or 1, we have
8987 eight possibilities, each of which corresponds to the constant 0
8988 or 1 or one of the six possible comparisons.
8990 This handles common cases like (a > b) == 0 but also handles
8991 expressions like ((x > y) - (y > x)) > 0, which supposedly
8992 occur in macroized code. */
8994 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8996 tree cval1 = 0, cval2 = 0;
8997 int save_p = 0;
8999 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9000 /* Don't handle degenerate cases here; they should already
9001 have been handled anyway. */
9002 && cval1 != 0 && cval2 != 0
9003 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9004 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9005 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9006 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9007 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9008 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9009 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9011 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9012 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9014 /* We can't just pass T to eval_subst in case cval1 or cval2
9015 was the same as ARG1. */
9017 tree high_result
9018 = fold_build2 (code, type,
9019 eval_subst (arg0, cval1, maxval,
9020 cval2, minval),
9021 arg1);
9022 tree equal_result
9023 = fold_build2 (code, type,
9024 eval_subst (arg0, cval1, maxval,
9025 cval2, maxval),
9026 arg1);
9027 tree low_result
9028 = fold_build2 (code, type,
9029 eval_subst (arg0, cval1, minval,
9030 cval2, maxval),
9031 arg1);
9033 /* All three of these results should be 0 or 1. Confirm they are.
9034 Then use those values to select the proper code to use. */
9036 if (TREE_CODE (high_result) == INTEGER_CST
9037 && TREE_CODE (equal_result) == INTEGER_CST
9038 && TREE_CODE (low_result) == INTEGER_CST)
9040 /* Make a 3-bit mask with the high-order bit being the
9041 value for `>', the next for '=', and the low for '<'. */
9042 switch ((integer_onep (high_result) * 4)
9043 + (integer_onep (equal_result) * 2)
9044 + integer_onep (low_result))
9046 case 0:
9047 /* Always false. */
9048 return omit_one_operand (type, integer_zero_node, arg0);
9049 case 1:
9050 code = LT_EXPR;
9051 break;
9052 case 2:
9053 code = EQ_EXPR;
9054 break;
9055 case 3:
9056 code = LE_EXPR;
9057 break;
9058 case 4:
9059 code = GT_EXPR;
9060 break;
9061 case 5:
9062 code = NE_EXPR;
9063 break;
9064 case 6:
9065 code = GE_EXPR;
9066 break;
9067 case 7:
9068 /* Always true. */
9069 return omit_one_operand (type, integer_one_node, arg0);
9072 if (save_p)
9073 return save_expr (build2 (code, type, cval1, cval2));
9074 return fold_build2 (code, type, cval1, cval2);
9079 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9080 into a single range test. */
9081 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9082 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9083 && TREE_CODE (arg1) == INTEGER_CST
9084 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9085 && !integer_zerop (TREE_OPERAND (arg0, 1))
9086 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9087 && !TREE_OVERFLOW (arg1))
9089 tem = fold_div_compare (code, type, arg0, arg1);
9090 if (tem != NULL_TREE)
9091 return tem;
9094 /* Fold ~X op ~Y as Y op X. */
9095 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9096 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9098 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9099 return fold_build2 (code, type,
9100 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9101 TREE_OPERAND (arg0, 0));
9104 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9105 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9106 && TREE_CODE (arg1) == INTEGER_CST)
9108 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9109 return fold_build2 (swap_tree_comparison (code), type,
9110 TREE_OPERAND (arg0, 0),
9111 fold_build1 (BIT_NOT_EXPR, cmp_type,
9112 fold_convert (cmp_type, arg1)));
9115 return NULL_TREE;
9119 /* Subroutine of fold_binary. Optimize complex multiplications of the
9120 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9121 argument EXPR represents the expression "z" of type TYPE. */
9123 static tree
9124 fold_mult_zconjz (tree type, tree expr)
9126 tree itype = TREE_TYPE (type);
9127 tree rpart, ipart, tem;
9129 if (TREE_CODE (expr) == COMPLEX_EXPR)
9131 rpart = TREE_OPERAND (expr, 0);
9132 ipart = TREE_OPERAND (expr, 1);
9134 else if (TREE_CODE (expr) == COMPLEX_CST)
9136 rpart = TREE_REALPART (expr);
9137 ipart = TREE_IMAGPART (expr);
9139 else
9141 expr = save_expr (expr);
9142 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9143 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9146 rpart = save_expr (rpart);
9147 ipart = save_expr (ipart);
9148 tem = fold_build2 (PLUS_EXPR, itype,
9149 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9150 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9151 return fold_build2 (COMPLEX_EXPR, type, tem,
9152 fold_convert (itype, integer_zero_node));
9156 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9157 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9158 guarantees that P and N have the same least significant log2(M) bits.
9159 N is not otherwise constrained. In particular, N is not normalized to
9160 0 <= N < M as is common. In general, the precise value of P is unknown.
9161 M is chosen as large as possible such that constant N can be determined.
9163 Returns M and sets *RESIDUE to N. */
9165 static unsigned HOST_WIDE_INT
9166 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9168 enum tree_code code;
9170 *residue = 0;
9172 code = TREE_CODE (expr);
9173 if (code == ADDR_EXPR)
9175 expr = TREE_OPERAND (expr, 0);
9176 if (handled_component_p (expr))
9178 HOST_WIDE_INT bitsize, bitpos;
9179 tree offset;
9180 enum machine_mode mode;
9181 int unsignedp, volatilep;
9183 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9184 &mode, &unsignedp, &volatilep, false);
9185 *residue = bitpos / BITS_PER_UNIT;
9186 if (offset)
9188 if (TREE_CODE (offset) == INTEGER_CST)
9189 *residue += TREE_INT_CST_LOW (offset);
9190 else
9191 /* We don't handle more complicated offset expressions. */
9192 return 1;
9196 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9197 return DECL_ALIGN_UNIT (expr);
9199 else if (code == POINTER_PLUS_EXPR)
9201 tree op0, op1;
9202 unsigned HOST_WIDE_INT modulus;
9203 enum tree_code inner_code;
9205 op0 = TREE_OPERAND (expr, 0);
9206 STRIP_NOPS (op0);
9207 modulus = get_pointer_modulus_and_residue (op0, residue);
9209 op1 = TREE_OPERAND (expr, 1);
9210 STRIP_NOPS (op1);
9211 inner_code = TREE_CODE (op1);
9212 if (inner_code == INTEGER_CST)
9214 *residue += TREE_INT_CST_LOW (op1);
9215 return modulus;
9217 else if (inner_code == MULT_EXPR)
9219 op1 = TREE_OPERAND (op1, 1);
9220 if (TREE_CODE (op1) == INTEGER_CST)
9222 unsigned HOST_WIDE_INT align;
9224 /* Compute the greatest power-of-2 divisor of op1. */
9225 align = TREE_INT_CST_LOW (op1);
9226 align &= -align;
9228 /* If align is non-zero and less than *modulus, replace
9229 *modulus with align., If align is 0, then either op1 is 0
9230 or the greatest power-of-2 divisor of op1 doesn't fit in an
9231 unsigned HOST_WIDE_INT. In either case, no additional
9232 constraint is imposed. */
9233 if (align)
9234 modulus = MIN (modulus, align);
9236 return modulus;
9241 /* If we get here, we were unable to determine anything useful about the
9242 expression. */
9243 return 1;
9247 /* Fold a binary expression of code CODE and type TYPE with operands
9248 OP0 and OP1. Return the folded expression if folding is
9249 successful. Otherwise, return NULL_TREE. */
9251 tree
9252 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9254 enum tree_code_class kind = TREE_CODE_CLASS (code);
9255 tree arg0, arg1, tem;
9256 tree t1 = NULL_TREE;
9257 bool strict_overflow_p;
9259 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9260 && TREE_CODE_LENGTH (code) == 2
9261 && op0 != NULL_TREE
9262 && op1 != NULL_TREE);
9264 arg0 = op0;
9265 arg1 = op1;
9267 /* Strip any conversions that don't change the mode. This is
9268 safe for every expression, except for a comparison expression
9269 because its signedness is derived from its operands. So, in
9270 the latter case, only strip conversions that don't change the
9271 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9272 preserved.
9274 Note that this is done as an internal manipulation within the
9275 constant folder, in order to find the simplest representation
9276 of the arguments so that their form can be studied. In any
9277 cases, the appropriate type conversions should be put back in
9278 the tree that will get out of the constant folder. */
9280 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9282 STRIP_SIGN_NOPS (arg0);
9283 STRIP_SIGN_NOPS (arg1);
9285 else
9287 STRIP_NOPS (arg0);
9288 STRIP_NOPS (arg1);
9291 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9292 constant but we can't do arithmetic on them. */
9293 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9294 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9295 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9296 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9297 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9298 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9300 if (kind == tcc_binary)
9302 /* Make sure type and arg0 have the same saturating flag. */
9303 gcc_assert (TYPE_SATURATING (type)
9304 == TYPE_SATURATING (TREE_TYPE (arg0)));
9305 tem = const_binop (code, arg0, arg1, 0);
9307 else if (kind == tcc_comparison)
9308 tem = fold_relational_const (code, type, arg0, arg1);
9309 else
9310 tem = NULL_TREE;
9312 if (tem != NULL_TREE)
9314 if (TREE_TYPE (tem) != type)
9315 tem = fold_convert (type, tem);
9316 return tem;
9320 /* If this is a commutative operation, and ARG0 is a constant, move it
9321 to ARG1 to reduce the number of tests below. */
9322 if (commutative_tree_code (code)
9323 && tree_swap_operands_p (arg0, arg1, true))
9324 return fold_build2 (code, type, op1, op0);
9326 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9328 First check for cases where an arithmetic operation is applied to a
9329 compound, conditional, or comparison operation. Push the arithmetic
9330 operation inside the compound or conditional to see if any folding
9331 can then be done. Convert comparison to conditional for this purpose.
9332 The also optimizes non-constant cases that used to be done in
9333 expand_expr.
9335 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9336 one of the operands is a comparison and the other is a comparison, a
9337 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9338 code below would make the expression more complex. Change it to a
9339 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9340 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9342 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9343 || code == EQ_EXPR || code == NE_EXPR)
9344 && ((truth_value_p (TREE_CODE (arg0))
9345 && (truth_value_p (TREE_CODE (arg1))
9346 || (TREE_CODE (arg1) == BIT_AND_EXPR
9347 && integer_onep (TREE_OPERAND (arg1, 1)))))
9348 || (truth_value_p (TREE_CODE (arg1))
9349 && (truth_value_p (TREE_CODE (arg0))
9350 || (TREE_CODE (arg0) == BIT_AND_EXPR
9351 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9353 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9354 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9355 : TRUTH_XOR_EXPR,
9356 boolean_type_node,
9357 fold_convert (boolean_type_node, arg0),
9358 fold_convert (boolean_type_node, arg1));
9360 if (code == EQ_EXPR)
9361 tem = invert_truthvalue (tem);
9363 return fold_convert (type, tem);
9366 if (TREE_CODE_CLASS (code) == tcc_binary
9367 || TREE_CODE_CLASS (code) == tcc_comparison)
9369 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9370 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9371 fold_build2 (code, type,
9372 fold_convert (TREE_TYPE (op0),
9373 TREE_OPERAND (arg0, 1)),
9374 op1));
9375 if (TREE_CODE (arg1) == COMPOUND_EXPR
9376 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9377 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9378 fold_build2 (code, type, op0,
9379 fold_convert (TREE_TYPE (op1),
9380 TREE_OPERAND (arg1, 1))));
9382 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9384 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9385 arg0, arg1,
9386 /*cond_first_p=*/1);
9387 if (tem != NULL_TREE)
9388 return tem;
9391 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9393 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9394 arg1, arg0,
9395 /*cond_first_p=*/0);
9396 if (tem != NULL_TREE)
9397 return tem;
9401 switch (code)
9403 case POINTER_PLUS_EXPR:
9404 /* 0 +p index -> (type)index */
9405 if (integer_zerop (arg0))
9406 return non_lvalue (fold_convert (type, arg1));
9408 /* PTR +p 0 -> PTR */
9409 if (integer_zerop (arg1))
9410 return non_lvalue (fold_convert (type, arg0));
9412 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9413 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9414 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9415 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9416 fold_convert (sizetype, arg1),
9417 fold_convert (sizetype, arg0)));
9419 /* index +p PTR -> PTR +p index */
9420 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9421 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9422 return fold_build2 (POINTER_PLUS_EXPR, type,
9423 fold_convert (type, arg1),
9424 fold_convert (sizetype, arg0));
9426 /* (PTR +p B) +p A -> PTR +p (B + A) */
9427 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9429 tree inner;
9430 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9431 tree arg00 = TREE_OPERAND (arg0, 0);
9432 inner = fold_build2 (PLUS_EXPR, sizetype,
9433 arg01, fold_convert (sizetype, arg1));
9434 return fold_convert (type,
9435 fold_build2 (POINTER_PLUS_EXPR,
9436 TREE_TYPE (arg00), arg00, inner));
9439 /* PTR_CST +p CST -> CST1 */
9440 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9441 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9443 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9444 of the array. Loop optimizer sometimes produce this type of
9445 expressions. */
9446 if (TREE_CODE (arg0) == ADDR_EXPR)
9448 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9449 if (tem)
9450 return fold_convert (type, tem);
9453 return NULL_TREE;
9455 case PLUS_EXPR:
9456 /* PTR + INT -> (INT)(PTR p+ INT) */
9457 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9458 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9459 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9460 TREE_TYPE (arg0),
9461 arg0,
9462 fold_convert (sizetype, arg1)));
9463 /* INT + PTR -> (INT)(PTR p+ INT) */
9464 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9465 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9466 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9467 TREE_TYPE (arg1),
9468 arg1,
9469 fold_convert (sizetype, arg0)));
9470 /* A + (-B) -> A - B */
9471 if (TREE_CODE (arg1) == NEGATE_EXPR)
9472 return fold_build2 (MINUS_EXPR, type,
9473 fold_convert (type, arg0),
9474 fold_convert (type, TREE_OPERAND (arg1, 0)));
9475 /* (-A) + B -> B - A */
9476 if (TREE_CODE (arg0) == NEGATE_EXPR
9477 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9478 return fold_build2 (MINUS_EXPR, type,
9479 fold_convert (type, arg1),
9480 fold_convert (type, TREE_OPERAND (arg0, 0)));
9482 if (INTEGRAL_TYPE_P (type))
9484 /* Convert ~A + 1 to -A. */
9485 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9486 && integer_onep (arg1))
9487 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9489 /* ~X + X is -1. */
9490 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9491 && !TYPE_OVERFLOW_TRAPS (type))
9493 tree tem = TREE_OPERAND (arg0, 0);
9495 STRIP_NOPS (tem);
9496 if (operand_equal_p (tem, arg1, 0))
9498 t1 = build_int_cst_type (type, -1);
9499 return omit_one_operand (type, t1, arg1);
9503 /* X + ~X is -1. */
9504 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9505 && !TYPE_OVERFLOW_TRAPS (type))
9507 tree tem = TREE_OPERAND (arg1, 0);
9509 STRIP_NOPS (tem);
9510 if (operand_equal_p (arg0, tem, 0))
9512 t1 = build_int_cst_type (type, -1);
9513 return omit_one_operand (type, t1, arg0);
9517 /* X + (X / CST) * -CST is X % CST. */
9518 if (TREE_CODE (arg1) == MULT_EXPR
9519 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9520 && operand_equal_p (arg0,
9521 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9523 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9524 tree cst1 = TREE_OPERAND (arg1, 1);
9525 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9526 if (sum && integer_zerop (sum))
9527 return fold_convert (type,
9528 fold_build2 (TRUNC_MOD_EXPR,
9529 TREE_TYPE (arg0), arg0, cst0));
9533 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9534 same or one. Make sure type is not saturating.
9535 fold_plusminus_mult_expr will re-associate. */
9536 if ((TREE_CODE (arg0) == MULT_EXPR
9537 || TREE_CODE (arg1) == MULT_EXPR)
9538 && !TYPE_SATURATING (type)
9539 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9541 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9542 if (tem)
9543 return tem;
9546 if (! FLOAT_TYPE_P (type))
9548 if (integer_zerop (arg1))
9549 return non_lvalue (fold_convert (type, arg0));
9551 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9552 with a constant, and the two constants have no bits in common,
9553 we should treat this as a BIT_IOR_EXPR since this may produce more
9554 simplifications. */
9555 if (TREE_CODE (arg0) == BIT_AND_EXPR
9556 && TREE_CODE (arg1) == BIT_AND_EXPR
9557 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9558 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9559 && integer_zerop (const_binop (BIT_AND_EXPR,
9560 TREE_OPERAND (arg0, 1),
9561 TREE_OPERAND (arg1, 1), 0)))
9563 code = BIT_IOR_EXPR;
9564 goto bit_ior;
9567 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9568 (plus (plus (mult) (mult)) (foo)) so that we can
9569 take advantage of the factoring cases below. */
9570 if (((TREE_CODE (arg0) == PLUS_EXPR
9571 || TREE_CODE (arg0) == MINUS_EXPR)
9572 && TREE_CODE (arg1) == MULT_EXPR)
9573 || ((TREE_CODE (arg1) == PLUS_EXPR
9574 || TREE_CODE (arg1) == MINUS_EXPR)
9575 && TREE_CODE (arg0) == MULT_EXPR))
9577 tree parg0, parg1, parg, marg;
9578 enum tree_code pcode;
9580 if (TREE_CODE (arg1) == MULT_EXPR)
9581 parg = arg0, marg = arg1;
9582 else
9583 parg = arg1, marg = arg0;
9584 pcode = TREE_CODE (parg);
9585 parg0 = TREE_OPERAND (parg, 0);
9586 parg1 = TREE_OPERAND (parg, 1);
9587 STRIP_NOPS (parg0);
9588 STRIP_NOPS (parg1);
9590 if (TREE_CODE (parg0) == MULT_EXPR
9591 && TREE_CODE (parg1) != MULT_EXPR)
9592 return fold_build2 (pcode, type,
9593 fold_build2 (PLUS_EXPR, type,
9594 fold_convert (type, parg0),
9595 fold_convert (type, marg)),
9596 fold_convert (type, parg1));
9597 if (TREE_CODE (parg0) != MULT_EXPR
9598 && TREE_CODE (parg1) == MULT_EXPR)
9599 return fold_build2 (PLUS_EXPR, type,
9600 fold_convert (type, parg0),
9601 fold_build2 (pcode, type,
9602 fold_convert (type, marg),
9603 fold_convert (type,
9604 parg1)));
9607 else
9609 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9610 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9611 return non_lvalue (fold_convert (type, arg0));
9613 /* Likewise if the operands are reversed. */
9614 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9615 return non_lvalue (fold_convert (type, arg1));
9617 /* Convert X + -C into X - C. */
9618 if (TREE_CODE (arg1) == REAL_CST
9619 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9621 tem = fold_negate_const (arg1, type);
9622 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9623 return fold_build2 (MINUS_EXPR, type,
9624 fold_convert (type, arg0),
9625 fold_convert (type, tem));
9628 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9629 to __complex__ ( x, y ). This is not the same for SNaNs or
9630 if signed zeros are involved. */
9631 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9633 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9635 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9636 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9637 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9638 bool arg0rz = false, arg0iz = false;
9639 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9640 || (arg0i && (arg0iz = real_zerop (arg0i))))
9642 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9643 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9644 if (arg0rz && arg1i && real_zerop (arg1i))
9646 tree rp = arg1r ? arg1r
9647 : build1 (REALPART_EXPR, rtype, arg1);
9648 tree ip = arg0i ? arg0i
9649 : build1 (IMAGPART_EXPR, rtype, arg0);
9650 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9652 else if (arg0iz && arg1r && real_zerop (arg1r))
9654 tree rp = arg0r ? arg0r
9655 : build1 (REALPART_EXPR, rtype, arg0);
9656 tree ip = arg1i ? arg1i
9657 : build1 (IMAGPART_EXPR, rtype, arg1);
9658 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9663 if (flag_unsafe_math_optimizations
9664 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9665 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9666 && (tem = distribute_real_division (code, type, arg0, arg1)))
9667 return tem;
9669 /* Convert x+x into x*2.0. */
9670 if (operand_equal_p (arg0, arg1, 0)
9671 && SCALAR_FLOAT_TYPE_P (type))
9672 return fold_build2 (MULT_EXPR, type, arg0,
9673 build_real (type, dconst2));
9675 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9676 We associate floats only if the user has specified
9677 -fassociative-math. */
9678 if (flag_associative_math
9679 && TREE_CODE (arg1) == PLUS_EXPR
9680 && TREE_CODE (arg0) != MULT_EXPR)
9682 tree tree10 = TREE_OPERAND (arg1, 0);
9683 tree tree11 = TREE_OPERAND (arg1, 1);
9684 if (TREE_CODE (tree11) == MULT_EXPR
9685 && TREE_CODE (tree10) == MULT_EXPR)
9687 tree tree0;
9688 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9689 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9692 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9693 We associate floats only if the user has specified
9694 -fassociative-math. */
9695 if (flag_associative_math
9696 && TREE_CODE (arg0) == PLUS_EXPR
9697 && TREE_CODE (arg1) != MULT_EXPR)
9699 tree tree00 = TREE_OPERAND (arg0, 0);
9700 tree tree01 = TREE_OPERAND (arg0, 1);
9701 if (TREE_CODE (tree01) == MULT_EXPR
9702 && TREE_CODE (tree00) == MULT_EXPR)
9704 tree tree0;
9705 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9706 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9711 bit_rotate:
9712 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9713 is a rotate of A by C1 bits. */
9714 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9715 is a rotate of A by B bits. */
9717 enum tree_code code0, code1;
9718 tree rtype;
9719 code0 = TREE_CODE (arg0);
9720 code1 = TREE_CODE (arg1);
9721 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9722 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9723 && operand_equal_p (TREE_OPERAND (arg0, 0),
9724 TREE_OPERAND (arg1, 0), 0)
9725 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9726 TYPE_UNSIGNED (rtype))
9727 /* Only create rotates in complete modes. Other cases are not
9728 expanded properly. */
9729 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9731 tree tree01, tree11;
9732 enum tree_code code01, code11;
9734 tree01 = TREE_OPERAND (arg0, 1);
9735 tree11 = TREE_OPERAND (arg1, 1);
9736 STRIP_NOPS (tree01);
9737 STRIP_NOPS (tree11);
9738 code01 = TREE_CODE (tree01);
9739 code11 = TREE_CODE (tree11);
9740 if (code01 == INTEGER_CST
9741 && code11 == INTEGER_CST
9742 && TREE_INT_CST_HIGH (tree01) == 0
9743 && TREE_INT_CST_HIGH (tree11) == 0
9744 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9745 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9746 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9747 code0 == LSHIFT_EXPR ? tree01 : tree11);
9748 else if (code11 == MINUS_EXPR)
9750 tree tree110, tree111;
9751 tree110 = TREE_OPERAND (tree11, 0);
9752 tree111 = TREE_OPERAND (tree11, 1);
9753 STRIP_NOPS (tree110);
9754 STRIP_NOPS (tree111);
9755 if (TREE_CODE (tree110) == INTEGER_CST
9756 && 0 == compare_tree_int (tree110,
9757 TYPE_PRECISION
9758 (TREE_TYPE (TREE_OPERAND
9759 (arg0, 0))))
9760 && operand_equal_p (tree01, tree111, 0))
9761 return build2 ((code0 == LSHIFT_EXPR
9762 ? LROTATE_EXPR
9763 : RROTATE_EXPR),
9764 type, TREE_OPERAND (arg0, 0), tree01);
9766 else if (code01 == MINUS_EXPR)
9768 tree tree010, tree011;
9769 tree010 = TREE_OPERAND (tree01, 0);
9770 tree011 = TREE_OPERAND (tree01, 1);
9771 STRIP_NOPS (tree010);
9772 STRIP_NOPS (tree011);
9773 if (TREE_CODE (tree010) == INTEGER_CST
9774 && 0 == compare_tree_int (tree010,
9775 TYPE_PRECISION
9776 (TREE_TYPE (TREE_OPERAND
9777 (arg0, 0))))
9778 && operand_equal_p (tree11, tree011, 0))
9779 return build2 ((code0 != LSHIFT_EXPR
9780 ? LROTATE_EXPR
9781 : RROTATE_EXPR),
9782 type, TREE_OPERAND (arg0, 0), tree11);
9787 associate:
9788 /* In most languages, can't associate operations on floats through
9789 parentheses. Rather than remember where the parentheses were, we
9790 don't associate floats at all, unless the user has specified
9791 -fassociative-math.
9792 And, we need to make sure type is not saturating. */
9794 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9795 && !TYPE_SATURATING (type))
9797 tree var0, con0, lit0, minus_lit0;
9798 tree var1, con1, lit1, minus_lit1;
9799 bool ok = true;
9801 /* Split both trees into variables, constants, and literals. Then
9802 associate each group together, the constants with literals,
9803 then the result with variables. This increases the chances of
9804 literals being recombined later and of generating relocatable
9805 expressions for the sum of a constant and literal. */
9806 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9807 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9808 code == MINUS_EXPR);
9810 /* With undefined overflow we can only associate constants
9811 with one variable. */
9812 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9813 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9814 && var0 && var1)
9816 tree tmp0 = var0;
9817 tree tmp1 = var1;
9819 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9820 tmp0 = TREE_OPERAND (tmp0, 0);
9821 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9822 tmp1 = TREE_OPERAND (tmp1, 0);
9823 /* The only case we can still associate with two variables
9824 is if they are the same, modulo negation. */
9825 if (!operand_equal_p (tmp0, tmp1, 0))
9826 ok = false;
9829 /* Only do something if we found more than two objects. Otherwise,
9830 nothing has changed and we risk infinite recursion. */
9831 if (ok
9832 && (2 < ((var0 != 0) + (var1 != 0)
9833 + (con0 != 0) + (con1 != 0)
9834 + (lit0 != 0) + (lit1 != 0)
9835 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9837 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9838 if (code == MINUS_EXPR)
9839 code = PLUS_EXPR;
9841 var0 = associate_trees (var0, var1, code, type);
9842 con0 = associate_trees (con0, con1, code, type);
9843 lit0 = associate_trees (lit0, lit1, code, type);
9844 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9846 /* Preserve the MINUS_EXPR if the negative part of the literal is
9847 greater than the positive part. Otherwise, the multiplicative
9848 folding code (i.e extract_muldiv) may be fooled in case
9849 unsigned constants are subtracted, like in the following
9850 example: ((X*2 + 4) - 8U)/2. */
9851 if (minus_lit0 && lit0)
9853 if (TREE_CODE (lit0) == INTEGER_CST
9854 && TREE_CODE (minus_lit0) == INTEGER_CST
9855 && tree_int_cst_lt (lit0, minus_lit0))
9857 minus_lit0 = associate_trees (minus_lit0, lit0,
9858 MINUS_EXPR, type);
9859 lit0 = 0;
9861 else
9863 lit0 = associate_trees (lit0, minus_lit0,
9864 MINUS_EXPR, type);
9865 minus_lit0 = 0;
9868 if (minus_lit0)
9870 if (con0 == 0)
9871 return fold_convert (type,
9872 associate_trees (var0, minus_lit0,
9873 MINUS_EXPR, type));
9874 else
9876 con0 = associate_trees (con0, minus_lit0,
9877 MINUS_EXPR, type);
9878 return fold_convert (type,
9879 associate_trees (var0, con0,
9880 PLUS_EXPR, type));
9884 con0 = associate_trees (con0, lit0, code, type);
9885 return fold_convert (type, associate_trees (var0, con0,
9886 code, type));
9890 return NULL_TREE;
9892 case MINUS_EXPR:
9893 /* Pointer simplifications for subtraction, simple reassociations. */
9894 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9896 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9897 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9898 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9900 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9901 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9902 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9903 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9904 return fold_build2 (PLUS_EXPR, type,
9905 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9906 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9908 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9909 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9911 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9912 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9913 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9914 if (tmp)
9915 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9918 /* A - (-B) -> A + B */
9919 if (TREE_CODE (arg1) == NEGATE_EXPR)
9920 return fold_build2 (PLUS_EXPR, type, op0,
9921 fold_convert (type, TREE_OPERAND (arg1, 0)));
9922 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9923 if (TREE_CODE (arg0) == NEGATE_EXPR
9924 && (FLOAT_TYPE_P (type)
9925 || INTEGRAL_TYPE_P (type))
9926 && negate_expr_p (arg1)
9927 && reorder_operands_p (arg0, arg1))
9928 return fold_build2 (MINUS_EXPR, type,
9929 fold_convert (type, negate_expr (arg1)),
9930 fold_convert (type, TREE_OPERAND (arg0, 0)));
9931 /* Convert -A - 1 to ~A. */
9932 if (INTEGRAL_TYPE_P (type)
9933 && TREE_CODE (arg0) == NEGATE_EXPR
9934 && integer_onep (arg1)
9935 && !TYPE_OVERFLOW_TRAPS (type))
9936 return fold_build1 (BIT_NOT_EXPR, type,
9937 fold_convert (type, TREE_OPERAND (arg0, 0)));
9939 /* Convert -1 - A to ~A. */
9940 if (INTEGRAL_TYPE_P (type)
9941 && integer_all_onesp (arg0))
9942 return fold_build1 (BIT_NOT_EXPR, type, op1);
9945 /* X - (X / CST) * CST is X % CST. */
9946 if (INTEGRAL_TYPE_P (type)
9947 && TREE_CODE (arg1) == MULT_EXPR
9948 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9949 && operand_equal_p (arg0,
9950 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9951 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9952 TREE_OPERAND (arg1, 1), 0))
9953 return fold_convert (type,
9954 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9955 arg0, TREE_OPERAND (arg1, 1)));
9957 if (! FLOAT_TYPE_P (type))
9959 if (integer_zerop (arg0))
9960 return negate_expr (fold_convert (type, arg1));
9961 if (integer_zerop (arg1))
9962 return non_lvalue (fold_convert (type, arg0));
9964 /* Fold A - (A & B) into ~B & A. */
9965 if (!TREE_SIDE_EFFECTS (arg0)
9966 && TREE_CODE (arg1) == BIT_AND_EXPR)
9968 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9970 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9971 return fold_build2 (BIT_AND_EXPR, type,
9972 fold_build1 (BIT_NOT_EXPR, type, arg10),
9973 fold_convert (type, arg0));
9975 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9977 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9978 return fold_build2 (BIT_AND_EXPR, type,
9979 fold_build1 (BIT_NOT_EXPR, type, arg11),
9980 fold_convert (type, arg0));
9984 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9985 any power of 2 minus 1. */
9986 if (TREE_CODE (arg0) == BIT_AND_EXPR
9987 && TREE_CODE (arg1) == BIT_AND_EXPR
9988 && operand_equal_p (TREE_OPERAND (arg0, 0),
9989 TREE_OPERAND (arg1, 0), 0))
9991 tree mask0 = TREE_OPERAND (arg0, 1);
9992 tree mask1 = TREE_OPERAND (arg1, 1);
9993 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9995 if (operand_equal_p (tem, mask1, 0))
9997 tem = fold_build2 (BIT_XOR_EXPR, type,
9998 TREE_OPERAND (arg0, 0), mask1);
9999 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10004 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10005 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10006 return non_lvalue (fold_convert (type, arg0));
10008 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10009 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10010 (-ARG1 + ARG0) reduces to -ARG1. */
10011 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10012 return negate_expr (fold_convert (type, arg1));
10014 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10015 __complex__ ( x, -y ). This is not the same for SNaNs or if
10016 signed zeros are involved. */
10017 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10018 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10019 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10021 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10022 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10023 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10024 bool arg0rz = false, arg0iz = false;
10025 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10026 || (arg0i && (arg0iz = real_zerop (arg0i))))
10028 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10029 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10030 if (arg0rz && arg1i && real_zerop (arg1i))
10032 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10033 arg1r ? arg1r
10034 : build1 (REALPART_EXPR, rtype, arg1));
10035 tree ip = arg0i ? arg0i
10036 : build1 (IMAGPART_EXPR, rtype, arg0);
10037 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10039 else if (arg0iz && arg1r && real_zerop (arg1r))
10041 tree rp = arg0r ? arg0r
10042 : build1 (REALPART_EXPR, rtype, arg0);
10043 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10044 arg1i ? arg1i
10045 : build1 (IMAGPART_EXPR, rtype, arg1));
10046 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10051 /* Fold &x - &x. This can happen from &x.foo - &x.
10052 This is unsafe for certain floats even in non-IEEE formats.
10053 In IEEE, it is unsafe because it does wrong for NaNs.
10054 Also note that operand_equal_p is always false if an operand
10055 is volatile. */
10057 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10058 && operand_equal_p (arg0, arg1, 0))
10059 return fold_convert (type, integer_zero_node);
10061 /* A - B -> A + (-B) if B is easily negatable. */
10062 if (negate_expr_p (arg1)
10063 && ((FLOAT_TYPE_P (type)
10064 /* Avoid this transformation if B is a positive REAL_CST. */
10065 && (TREE_CODE (arg1) != REAL_CST
10066 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10067 || INTEGRAL_TYPE_P (type)))
10068 return fold_build2 (PLUS_EXPR, type,
10069 fold_convert (type, arg0),
10070 fold_convert (type, negate_expr (arg1)));
10072 /* Try folding difference of addresses. */
10074 HOST_WIDE_INT diff;
10076 if ((TREE_CODE (arg0) == ADDR_EXPR
10077 || TREE_CODE (arg1) == ADDR_EXPR)
10078 && ptr_difference_const (arg0, arg1, &diff))
10079 return build_int_cst_type (type, diff);
10082 /* Fold &a[i] - &a[j] to i-j. */
10083 if (TREE_CODE (arg0) == ADDR_EXPR
10084 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10085 && TREE_CODE (arg1) == ADDR_EXPR
10086 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10088 tree aref0 = TREE_OPERAND (arg0, 0);
10089 tree aref1 = TREE_OPERAND (arg1, 0);
10090 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10091 TREE_OPERAND (aref1, 0), 0))
10093 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10094 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10095 tree esz = array_ref_element_size (aref0);
10096 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10097 return fold_build2 (MULT_EXPR, type, diff,
10098 fold_convert (type, esz));
10103 if (flag_unsafe_math_optimizations
10104 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10105 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10106 && (tem = distribute_real_division (code, type, arg0, arg1)))
10107 return tem;
10109 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10110 same or one. Make sure type is not saturating.
10111 fold_plusminus_mult_expr will re-associate. */
10112 if ((TREE_CODE (arg0) == MULT_EXPR
10113 || TREE_CODE (arg1) == MULT_EXPR)
10114 && !TYPE_SATURATING (type)
10115 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10117 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10118 if (tem)
10119 return tem;
10122 goto associate;
10124 case MULT_EXPR:
10125 /* (-A) * (-B) -> A * B */
10126 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10127 return fold_build2 (MULT_EXPR, type,
10128 fold_convert (type, TREE_OPERAND (arg0, 0)),
10129 fold_convert (type, negate_expr (arg1)));
10130 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10131 return fold_build2 (MULT_EXPR, type,
10132 fold_convert (type, negate_expr (arg0)),
10133 fold_convert (type, TREE_OPERAND (arg1, 0)));
10135 if (! FLOAT_TYPE_P (type))
10137 if (integer_zerop (arg1))
10138 return omit_one_operand (type, arg1, arg0);
10139 if (integer_onep (arg1))
10140 return non_lvalue (fold_convert (type, arg0));
10141 /* Transform x * -1 into -x. Make sure to do the negation
10142 on the original operand with conversions not stripped
10143 because we can only strip non-sign-changing conversions. */
10144 if (integer_all_onesp (arg1))
10145 return fold_convert (type, negate_expr (op0));
10146 /* Transform x * -C into -x * C if x is easily negatable. */
10147 if (TREE_CODE (arg1) == INTEGER_CST
10148 && tree_int_cst_sgn (arg1) == -1
10149 && negate_expr_p (arg0)
10150 && (tem = negate_expr (arg1)) != arg1
10151 && !TREE_OVERFLOW (tem))
10152 return fold_build2 (MULT_EXPR, type,
10153 fold_convert (type, negate_expr (arg0)), tem);
10155 /* (a * (1 << b)) is (a << b) */
10156 if (TREE_CODE (arg1) == LSHIFT_EXPR
10157 && integer_onep (TREE_OPERAND (arg1, 0)))
10158 return fold_build2 (LSHIFT_EXPR, type, op0,
10159 TREE_OPERAND (arg1, 1));
10160 if (TREE_CODE (arg0) == LSHIFT_EXPR
10161 && integer_onep (TREE_OPERAND (arg0, 0)))
10162 return fold_build2 (LSHIFT_EXPR, type, op1,
10163 TREE_OPERAND (arg0, 1));
10165 /* (A + A) * C -> A * 2 * C */
10166 if (TREE_CODE (arg0) == PLUS_EXPR
10167 && TREE_CODE (arg1) == INTEGER_CST
10168 && operand_equal_p (TREE_OPERAND (arg0, 0),
10169 TREE_OPERAND (arg0, 1), 0))
10170 return fold_build2 (MULT_EXPR, type,
10171 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10172 TREE_OPERAND (arg0, 1)),
10173 fold_build2 (MULT_EXPR, type,
10174 build_int_cst (type, 2) , arg1));
10176 strict_overflow_p = false;
10177 if (TREE_CODE (arg1) == INTEGER_CST
10178 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10179 &strict_overflow_p)))
10181 if (strict_overflow_p)
10182 fold_overflow_warning (("assuming signed overflow does not "
10183 "occur when simplifying "
10184 "multiplication"),
10185 WARN_STRICT_OVERFLOW_MISC);
10186 return fold_convert (type, tem);
10189 /* Optimize z * conj(z) for integer complex numbers. */
10190 if (TREE_CODE (arg0) == CONJ_EXPR
10191 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10192 return fold_mult_zconjz (type, arg1);
10193 if (TREE_CODE (arg1) == CONJ_EXPR
10194 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10195 return fold_mult_zconjz (type, arg0);
10197 else
10199 /* Maybe fold x * 0 to 0. The expressions aren't the same
10200 when x is NaN, since x * 0 is also NaN. Nor are they the
10201 same in modes with signed zeros, since multiplying a
10202 negative value by 0 gives -0, not +0. */
10203 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10204 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10205 && real_zerop (arg1))
10206 return omit_one_operand (type, arg1, arg0);
10207 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10208 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10209 && real_onep (arg1))
10210 return non_lvalue (fold_convert (type, arg0));
10212 /* Transform x * -1.0 into -x. */
10213 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10214 && real_minus_onep (arg1))
10215 return fold_convert (type, negate_expr (arg0));
10217 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10218 the result for floating point types due to rounding so it is applied
10219 only if -fassociative-math was specify. */
10220 if (flag_associative_math
10221 && TREE_CODE (arg0) == RDIV_EXPR
10222 && TREE_CODE (arg1) == REAL_CST
10223 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10225 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10226 arg1, 0);
10227 if (tem)
10228 return fold_build2 (RDIV_EXPR, type, tem,
10229 TREE_OPERAND (arg0, 1));
10232 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10233 if (operand_equal_p (arg0, arg1, 0))
10235 tree tem = fold_strip_sign_ops (arg0);
10236 if (tem != NULL_TREE)
10238 tem = fold_convert (type, tem);
10239 return fold_build2 (MULT_EXPR, type, tem, tem);
10243 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10244 This is not the same for NaNs or if signed zeros are
10245 involved. */
10246 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10247 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10248 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10249 && TREE_CODE (arg1) == COMPLEX_CST
10250 && real_zerop (TREE_REALPART (arg1)))
10252 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10253 if (real_onep (TREE_IMAGPART (arg1)))
10254 return fold_build2 (COMPLEX_EXPR, type,
10255 negate_expr (fold_build1 (IMAGPART_EXPR,
10256 rtype, arg0)),
10257 fold_build1 (REALPART_EXPR, rtype, arg0));
10258 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10259 return fold_build2 (COMPLEX_EXPR, type,
10260 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10261 negate_expr (fold_build1 (REALPART_EXPR,
10262 rtype, arg0)));
10265 /* Optimize z * conj(z) for floating point complex numbers.
10266 Guarded by flag_unsafe_math_optimizations as non-finite
10267 imaginary components don't produce scalar results. */
10268 if (flag_unsafe_math_optimizations
10269 && TREE_CODE (arg0) == CONJ_EXPR
10270 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10271 return fold_mult_zconjz (type, arg1);
10272 if (flag_unsafe_math_optimizations
10273 && TREE_CODE (arg1) == CONJ_EXPR
10274 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10275 return fold_mult_zconjz (type, arg0);
10277 if (flag_unsafe_math_optimizations)
10279 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10280 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10282 /* Optimizations of root(...)*root(...). */
10283 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10285 tree rootfn, arg;
10286 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10287 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10289 /* Optimize sqrt(x)*sqrt(x) as x. */
10290 if (BUILTIN_SQRT_P (fcode0)
10291 && operand_equal_p (arg00, arg10, 0)
10292 && ! HONOR_SNANS (TYPE_MODE (type)))
10293 return arg00;
10295 /* Optimize root(x)*root(y) as root(x*y). */
10296 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10297 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10298 return build_call_expr (rootfn, 1, arg);
10301 /* Optimize expN(x)*expN(y) as expN(x+y). */
10302 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10304 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10305 tree arg = fold_build2 (PLUS_EXPR, type,
10306 CALL_EXPR_ARG (arg0, 0),
10307 CALL_EXPR_ARG (arg1, 0));
10308 return build_call_expr (expfn, 1, arg);
10311 /* Optimizations of pow(...)*pow(...). */
10312 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10313 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10314 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10316 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10317 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10318 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10319 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10321 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10322 if (operand_equal_p (arg01, arg11, 0))
10324 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10325 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10326 return build_call_expr (powfn, 2, arg, arg01);
10329 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10330 if (operand_equal_p (arg00, arg10, 0))
10332 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10333 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10334 return build_call_expr (powfn, 2, arg00, arg);
10338 /* Optimize tan(x)*cos(x) as sin(x). */
10339 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10340 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10341 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10342 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10343 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10344 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10345 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10346 CALL_EXPR_ARG (arg1, 0), 0))
10348 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10350 if (sinfn != NULL_TREE)
10351 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10354 /* Optimize x*pow(x,c) as pow(x,c+1). */
10355 if (fcode1 == BUILT_IN_POW
10356 || fcode1 == BUILT_IN_POWF
10357 || fcode1 == BUILT_IN_POWL)
10359 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10360 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10361 if (TREE_CODE (arg11) == REAL_CST
10362 && !TREE_OVERFLOW (arg11)
10363 && operand_equal_p (arg0, arg10, 0))
10365 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10366 REAL_VALUE_TYPE c;
10367 tree arg;
10369 c = TREE_REAL_CST (arg11);
10370 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10371 arg = build_real (type, c);
10372 return build_call_expr (powfn, 2, arg0, arg);
10376 /* Optimize pow(x,c)*x as pow(x,c+1). */
10377 if (fcode0 == BUILT_IN_POW
10378 || fcode0 == BUILT_IN_POWF
10379 || fcode0 == BUILT_IN_POWL)
10381 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10382 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10383 if (TREE_CODE (arg01) == REAL_CST
10384 && !TREE_OVERFLOW (arg01)
10385 && operand_equal_p (arg1, arg00, 0))
10387 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10388 REAL_VALUE_TYPE c;
10389 tree arg;
10391 c = TREE_REAL_CST (arg01);
10392 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10393 arg = build_real (type, c);
10394 return build_call_expr (powfn, 2, arg1, arg);
10398 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10399 if (! optimize_size
10400 && operand_equal_p (arg0, arg1, 0))
10402 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10404 if (powfn)
10406 tree arg = build_real (type, dconst2);
10407 return build_call_expr (powfn, 2, arg0, arg);
10412 goto associate;
10414 case BIT_IOR_EXPR:
10415 bit_ior:
10416 if (integer_all_onesp (arg1))
10417 return omit_one_operand (type, arg1, arg0);
10418 if (integer_zerop (arg1))
10419 return non_lvalue (fold_convert (type, arg0));
10420 if (operand_equal_p (arg0, arg1, 0))
10421 return non_lvalue (fold_convert (type, arg0));
10423 /* ~X | X is -1. */
10424 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10425 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10427 t1 = fold_convert (type, integer_zero_node);
10428 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10429 return omit_one_operand (type, t1, arg1);
10432 /* X | ~X is -1. */
10433 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10434 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10436 t1 = fold_convert (type, integer_zero_node);
10437 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10438 return omit_one_operand (type, t1, arg0);
10441 /* Canonicalize (X & C1) | C2. */
10442 if (TREE_CODE (arg0) == BIT_AND_EXPR
10443 && TREE_CODE (arg1) == INTEGER_CST
10444 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10446 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10447 int width = TYPE_PRECISION (type), w;
10448 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10449 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10450 hi2 = TREE_INT_CST_HIGH (arg1);
10451 lo2 = TREE_INT_CST_LOW (arg1);
10453 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10454 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10455 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10457 if (width > HOST_BITS_PER_WIDE_INT)
10459 mhi = (unsigned HOST_WIDE_INT) -1
10460 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10461 mlo = -1;
10463 else
10465 mhi = 0;
10466 mlo = (unsigned HOST_WIDE_INT) -1
10467 >> (HOST_BITS_PER_WIDE_INT - width);
10470 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10471 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10472 return fold_build2 (BIT_IOR_EXPR, type,
10473 TREE_OPERAND (arg0, 0), arg1);
10475 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10476 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10477 mode which allows further optimizations. */
10478 hi1 &= mhi;
10479 lo1 &= mlo;
10480 hi2 &= mhi;
10481 lo2 &= mlo;
10482 hi3 = hi1 & ~hi2;
10483 lo3 = lo1 & ~lo2;
10484 for (w = BITS_PER_UNIT;
10485 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10486 w <<= 1)
10488 unsigned HOST_WIDE_INT mask
10489 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10490 if (((lo1 | lo2) & mask) == mask
10491 && (lo1 & ~mask) == 0 && hi1 == 0)
10493 hi3 = 0;
10494 lo3 = mask;
10495 break;
10498 if (hi3 != hi1 || lo3 != lo1)
10499 return fold_build2 (BIT_IOR_EXPR, type,
10500 fold_build2 (BIT_AND_EXPR, type,
10501 TREE_OPERAND (arg0, 0),
10502 build_int_cst_wide (type,
10503 lo3, hi3)),
10504 arg1);
10507 /* (X & Y) | Y is (X, Y). */
10508 if (TREE_CODE (arg0) == BIT_AND_EXPR
10509 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10510 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10511 /* (X & Y) | X is (Y, X). */
10512 if (TREE_CODE (arg0) == BIT_AND_EXPR
10513 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10514 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10515 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10516 /* X | (X & Y) is (Y, X). */
10517 if (TREE_CODE (arg1) == BIT_AND_EXPR
10518 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10519 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10520 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10521 /* X | (Y & X) is (Y, X). */
10522 if (TREE_CODE (arg1) == BIT_AND_EXPR
10523 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10524 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10525 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10527 t1 = distribute_bit_expr (code, type, arg0, arg1);
10528 if (t1 != NULL_TREE)
10529 return t1;
10531 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10533 This results in more efficient code for machines without a NAND
10534 instruction. Combine will canonicalize to the first form
10535 which will allow use of NAND instructions provided by the
10536 backend if they exist. */
10537 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10538 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10540 return fold_build1 (BIT_NOT_EXPR, type,
10541 build2 (BIT_AND_EXPR, type,
10542 fold_convert (type,
10543 TREE_OPERAND (arg0, 0)),
10544 fold_convert (type,
10545 TREE_OPERAND (arg1, 0))));
10548 /* See if this can be simplified into a rotate first. If that
10549 is unsuccessful continue in the association code. */
10550 goto bit_rotate;
10552 case BIT_XOR_EXPR:
10553 if (integer_zerop (arg1))
10554 return non_lvalue (fold_convert (type, arg0));
10555 if (integer_all_onesp (arg1))
10556 return fold_build1 (BIT_NOT_EXPR, type, op0);
10557 if (operand_equal_p (arg0, arg1, 0))
10558 return omit_one_operand (type, integer_zero_node, arg0);
10560 /* ~X ^ X is -1. */
10561 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10562 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10564 t1 = fold_convert (type, integer_zero_node);
10565 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10566 return omit_one_operand (type, t1, arg1);
10569 /* X ^ ~X is -1. */
10570 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10571 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10573 t1 = fold_convert (type, integer_zero_node);
10574 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10575 return omit_one_operand (type, t1, arg0);
10578 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10579 with a constant, and the two constants have no bits in common,
10580 we should treat this as a BIT_IOR_EXPR since this may produce more
10581 simplifications. */
10582 if (TREE_CODE (arg0) == BIT_AND_EXPR
10583 && TREE_CODE (arg1) == BIT_AND_EXPR
10584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10585 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10586 && integer_zerop (const_binop (BIT_AND_EXPR,
10587 TREE_OPERAND (arg0, 1),
10588 TREE_OPERAND (arg1, 1), 0)))
10590 code = BIT_IOR_EXPR;
10591 goto bit_ior;
10594 /* (X | Y) ^ X -> Y & ~ X*/
10595 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10596 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10598 tree t2 = TREE_OPERAND (arg0, 1);
10599 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10600 arg1);
10601 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10602 fold_convert (type, t1));
10603 return t1;
10606 /* (Y | X) ^ X -> Y & ~ X*/
10607 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10608 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10610 tree t2 = TREE_OPERAND (arg0, 0);
10611 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10612 arg1);
10613 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10614 fold_convert (type, t1));
10615 return t1;
10618 /* X ^ (X | Y) -> Y & ~ X*/
10619 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10620 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10622 tree t2 = TREE_OPERAND (arg1, 1);
10623 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10624 arg0);
10625 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10626 fold_convert (type, t1));
10627 return t1;
10630 /* X ^ (Y | X) -> Y & ~ X*/
10631 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10632 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10634 tree t2 = TREE_OPERAND (arg1, 0);
10635 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10636 arg0);
10637 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10638 fold_convert (type, t1));
10639 return t1;
10642 /* Convert ~X ^ ~Y to X ^ Y. */
10643 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10644 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10645 return fold_build2 (code, type,
10646 fold_convert (type, TREE_OPERAND (arg0, 0)),
10647 fold_convert (type, TREE_OPERAND (arg1, 0)));
10649 /* Convert ~X ^ C to X ^ ~C. */
10650 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10651 && TREE_CODE (arg1) == INTEGER_CST)
10652 return fold_build2 (code, type,
10653 fold_convert (type, TREE_OPERAND (arg0, 0)),
10654 fold_build1 (BIT_NOT_EXPR, type, arg1));
10656 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10657 if (TREE_CODE (arg0) == BIT_AND_EXPR
10658 && integer_onep (TREE_OPERAND (arg0, 1))
10659 && integer_onep (arg1))
10660 return fold_build2 (EQ_EXPR, type, arg0,
10661 build_int_cst (TREE_TYPE (arg0), 0));
10663 /* Fold (X & Y) ^ Y as ~X & Y. */
10664 if (TREE_CODE (arg0) == BIT_AND_EXPR
10665 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10667 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10668 return fold_build2 (BIT_AND_EXPR, type,
10669 fold_build1 (BIT_NOT_EXPR, type, tem),
10670 fold_convert (type, arg1));
10672 /* Fold (X & Y) ^ X as ~Y & X. */
10673 if (TREE_CODE (arg0) == BIT_AND_EXPR
10674 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10675 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10677 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10678 return fold_build2 (BIT_AND_EXPR, type,
10679 fold_build1 (BIT_NOT_EXPR, type, tem),
10680 fold_convert (type, arg1));
10682 /* Fold X ^ (X & Y) as X & ~Y. */
10683 if (TREE_CODE (arg1) == BIT_AND_EXPR
10684 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10686 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10687 return fold_build2 (BIT_AND_EXPR, type,
10688 fold_convert (type, arg0),
10689 fold_build1 (BIT_NOT_EXPR, type, tem));
10691 /* Fold X ^ (Y & X) as ~Y & X. */
10692 if (TREE_CODE (arg1) == BIT_AND_EXPR
10693 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10694 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10696 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10697 return fold_build2 (BIT_AND_EXPR, type,
10698 fold_build1 (BIT_NOT_EXPR, type, tem),
10699 fold_convert (type, arg0));
10702 /* See if this can be simplified into a rotate first. If that
10703 is unsuccessful continue in the association code. */
10704 goto bit_rotate;
10706 case BIT_AND_EXPR:
10707 if (integer_all_onesp (arg1))
10708 return non_lvalue (fold_convert (type, arg0));
10709 if (integer_zerop (arg1))
10710 return omit_one_operand (type, arg1, arg0);
10711 if (operand_equal_p (arg0, arg1, 0))
10712 return non_lvalue (fold_convert (type, arg0));
10714 /* ~X & X is always zero. */
10715 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10716 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10717 return omit_one_operand (type, integer_zero_node, arg1);
10719 /* X & ~X is always zero. */
10720 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10721 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10722 return omit_one_operand (type, integer_zero_node, arg0);
10724 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10725 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10726 && TREE_CODE (arg1) == INTEGER_CST
10727 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10729 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10730 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10731 TREE_OPERAND (arg0, 0), tmp1);
10732 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10733 TREE_OPERAND (arg0, 1), tmp1);
10734 return fold_convert (type,
10735 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10736 tmp2, tmp3));
10739 /* (X | Y) & Y is (X, Y). */
10740 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10741 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10742 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10743 /* (X | Y) & X is (Y, X). */
10744 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10745 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10746 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10747 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10748 /* X & (X | Y) is (Y, X). */
10749 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10750 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10751 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10752 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10753 /* X & (Y | X) is (Y, X). */
10754 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10755 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10756 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10757 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10759 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10760 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10761 && integer_onep (TREE_OPERAND (arg0, 1))
10762 && integer_onep (arg1))
10764 tem = TREE_OPERAND (arg0, 0);
10765 return fold_build2 (EQ_EXPR, type,
10766 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10767 build_int_cst (TREE_TYPE (tem), 1)),
10768 build_int_cst (TREE_TYPE (tem), 0));
10770 /* Fold ~X & 1 as (X & 1) == 0. */
10771 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10772 && integer_onep (arg1))
10774 tem = TREE_OPERAND (arg0, 0);
10775 return fold_build2 (EQ_EXPR, type,
10776 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10777 build_int_cst (TREE_TYPE (tem), 1)),
10778 build_int_cst (TREE_TYPE (tem), 0));
10781 /* Fold (X ^ Y) & Y as ~X & Y. */
10782 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10783 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10785 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10786 return fold_build2 (BIT_AND_EXPR, type,
10787 fold_build1 (BIT_NOT_EXPR, type, tem),
10788 fold_convert (type, arg1));
10790 /* Fold (X ^ Y) & X as ~Y & X. */
10791 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10792 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10793 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10795 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10796 return fold_build2 (BIT_AND_EXPR, type,
10797 fold_build1 (BIT_NOT_EXPR, type, tem),
10798 fold_convert (type, arg1));
10800 /* Fold X & (X ^ Y) as X & ~Y. */
10801 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10802 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10804 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10805 return fold_build2 (BIT_AND_EXPR, type,
10806 fold_convert (type, arg0),
10807 fold_build1 (BIT_NOT_EXPR, type, tem));
10809 /* Fold X & (Y ^ X) as ~Y & X. */
10810 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10811 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10812 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10814 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10815 return fold_build2 (BIT_AND_EXPR, type,
10816 fold_build1 (BIT_NOT_EXPR, type, tem),
10817 fold_convert (type, arg0));
10820 t1 = distribute_bit_expr (code, type, arg0, arg1);
10821 if (t1 != NULL_TREE)
10822 return t1;
10823 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10824 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10825 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10827 unsigned int prec
10828 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10830 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10831 && (~TREE_INT_CST_LOW (arg1)
10832 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10833 return fold_convert (type, TREE_OPERAND (arg0, 0));
10836 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10838 This results in more efficient code for machines without a NOR
10839 instruction. Combine will canonicalize to the first form
10840 which will allow use of NOR instructions provided by the
10841 backend if they exist. */
10842 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10843 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10845 return fold_build1 (BIT_NOT_EXPR, type,
10846 build2 (BIT_IOR_EXPR, type,
10847 fold_convert (type,
10848 TREE_OPERAND (arg0, 0)),
10849 fold_convert (type,
10850 TREE_OPERAND (arg1, 0))));
10853 /* If arg0 is derived from the address of an object or function, we may
10854 be able to fold this expression using the object or function's
10855 alignment. */
10856 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10858 unsigned HOST_WIDE_INT modulus, residue;
10859 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10861 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10863 /* This works because modulus is a power of 2. If this weren't the
10864 case, we'd have to replace it by its greatest power-of-2
10865 divisor: modulus & -modulus. */
10866 if (low < modulus)
10867 return build_int_cst (type, residue & low);
10870 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10871 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10872 if the new mask might be further optimized. */
10873 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10874 || TREE_CODE (arg0) == RSHIFT_EXPR)
10875 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10876 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10877 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10878 < TYPE_PRECISION (TREE_TYPE (arg0))
10879 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10880 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10882 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10883 unsigned HOST_WIDE_INT mask
10884 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10885 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10886 tree shift_type = TREE_TYPE (arg0);
10888 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10889 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10890 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10891 && TYPE_PRECISION (TREE_TYPE (arg0))
10892 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10894 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10895 tree arg00 = TREE_OPERAND (arg0, 0);
10896 /* See if more bits can be proven as zero because of
10897 zero extension. */
10898 if (TREE_CODE (arg00) == NOP_EXPR
10899 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10901 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10902 if (TYPE_PRECISION (inner_type)
10903 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10904 && TYPE_PRECISION (inner_type) < prec)
10906 prec = TYPE_PRECISION (inner_type);
10907 /* See if we can shorten the right shift. */
10908 if (shiftc < prec)
10909 shift_type = inner_type;
10912 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10913 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10914 zerobits <<= prec - shiftc;
10915 /* For arithmetic shift if sign bit could be set, zerobits
10916 can contain actually sign bits, so no transformation is
10917 possible, unless MASK masks them all away. In that
10918 case the shift needs to be converted into logical shift. */
10919 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10920 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10922 if ((mask & zerobits) == 0)
10923 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10924 else
10925 zerobits = 0;
10929 /* ((X << 16) & 0xff00) is (X, 0). */
10930 if ((mask & zerobits) == mask)
10931 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10933 newmask = mask | zerobits;
10934 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10936 unsigned int prec;
10938 /* Only do the transformation if NEWMASK is some integer
10939 mode's mask. */
10940 for (prec = BITS_PER_UNIT;
10941 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10942 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10943 break;
10944 if (prec < HOST_BITS_PER_WIDE_INT
10945 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10947 if (shift_type != TREE_TYPE (arg0))
10949 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10950 fold_convert (shift_type,
10951 TREE_OPERAND (arg0, 0)),
10952 TREE_OPERAND (arg0, 1));
10953 tem = fold_convert (type, tem);
10955 else
10956 tem = op0;
10957 return fold_build2 (BIT_AND_EXPR, type, tem,
10958 build_int_cst_type (TREE_TYPE (op1),
10959 newmask));
10964 goto associate;
10966 case RDIV_EXPR:
10967 /* Don't touch a floating-point divide by zero unless the mode
10968 of the constant can represent infinity. */
10969 if (TREE_CODE (arg1) == REAL_CST
10970 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10971 && real_zerop (arg1))
10972 return NULL_TREE;
10974 /* Optimize A / A to 1.0 if we don't care about
10975 NaNs or Infinities. Skip the transformation
10976 for non-real operands. */
10977 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10978 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10979 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10980 && operand_equal_p (arg0, arg1, 0))
10982 tree r = build_real (TREE_TYPE (arg0), dconst1);
10984 return omit_two_operands (type, r, arg0, arg1);
10987 /* The complex version of the above A / A optimization. */
10988 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10989 && operand_equal_p (arg0, arg1, 0))
10991 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10992 if (! HONOR_NANS (TYPE_MODE (elem_type))
10993 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10995 tree r = build_real (elem_type, dconst1);
10996 /* omit_two_operands will call fold_convert for us. */
10997 return omit_two_operands (type, r, arg0, arg1);
11001 /* (-A) / (-B) -> A / B */
11002 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11003 return fold_build2 (RDIV_EXPR, type,
11004 TREE_OPERAND (arg0, 0),
11005 negate_expr (arg1));
11006 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11007 return fold_build2 (RDIV_EXPR, type,
11008 negate_expr (arg0),
11009 TREE_OPERAND (arg1, 0));
11011 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11012 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11013 && real_onep (arg1))
11014 return non_lvalue (fold_convert (type, arg0));
11016 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11017 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11018 && real_minus_onep (arg1))
11019 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11021 /* If ARG1 is a constant, we can convert this to a multiply by the
11022 reciprocal. This does not have the same rounding properties,
11023 so only do this if -freciprocal-math. We can actually
11024 always safely do it if ARG1 is a power of two, but it's hard to
11025 tell if it is or not in a portable manner. */
11026 if (TREE_CODE (arg1) == REAL_CST)
11028 if (flag_reciprocal_math
11029 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11030 arg1, 0)))
11031 return fold_build2 (MULT_EXPR, type, arg0, tem);
11032 /* Find the reciprocal if optimizing and the result is exact. */
11033 if (optimize)
11035 REAL_VALUE_TYPE r;
11036 r = TREE_REAL_CST (arg1);
11037 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11039 tem = build_real (type, r);
11040 return fold_build2 (MULT_EXPR, type,
11041 fold_convert (type, arg0), tem);
11045 /* Convert A/B/C to A/(B*C). */
11046 if (flag_reciprocal_math
11047 && TREE_CODE (arg0) == RDIV_EXPR)
11048 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11049 fold_build2 (MULT_EXPR, type,
11050 TREE_OPERAND (arg0, 1), arg1));
11052 /* Convert A/(B/C) to (A/B)*C. */
11053 if (flag_reciprocal_math
11054 && TREE_CODE (arg1) == RDIV_EXPR)
11055 return fold_build2 (MULT_EXPR, type,
11056 fold_build2 (RDIV_EXPR, type, arg0,
11057 TREE_OPERAND (arg1, 0)),
11058 TREE_OPERAND (arg1, 1));
11060 /* Convert C1/(X*C2) into (C1/C2)/X. */
11061 if (flag_reciprocal_math
11062 && TREE_CODE (arg1) == MULT_EXPR
11063 && TREE_CODE (arg0) == REAL_CST
11064 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11066 tree tem = const_binop (RDIV_EXPR, arg0,
11067 TREE_OPERAND (arg1, 1), 0);
11068 if (tem)
11069 return fold_build2 (RDIV_EXPR, type, tem,
11070 TREE_OPERAND (arg1, 0));
11073 if (flag_unsafe_math_optimizations)
11075 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11076 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11078 /* Optimize sin(x)/cos(x) as tan(x). */
11079 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11080 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11081 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11082 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11083 CALL_EXPR_ARG (arg1, 0), 0))
11085 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11087 if (tanfn != NULL_TREE)
11088 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11091 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11092 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11093 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11094 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11095 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11096 CALL_EXPR_ARG (arg1, 0), 0))
11098 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11100 if (tanfn != NULL_TREE)
11102 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11103 return fold_build2 (RDIV_EXPR, type,
11104 build_real (type, dconst1), tmp);
11108 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11109 NaNs or Infinities. */
11110 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11111 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11112 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11114 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11115 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11117 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11118 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11119 && operand_equal_p (arg00, arg01, 0))
11121 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11123 if (cosfn != NULL_TREE)
11124 return build_call_expr (cosfn, 1, arg00);
11128 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11129 NaNs or Infinities. */
11130 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11131 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11132 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11134 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11135 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11137 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11138 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11139 && operand_equal_p (arg00, arg01, 0))
11141 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11143 if (cosfn != NULL_TREE)
11145 tree tmp = build_call_expr (cosfn, 1, arg00);
11146 return fold_build2 (RDIV_EXPR, type,
11147 build_real (type, dconst1),
11148 tmp);
11153 /* Optimize pow(x,c)/x as pow(x,c-1). */
11154 if (fcode0 == BUILT_IN_POW
11155 || fcode0 == BUILT_IN_POWF
11156 || fcode0 == BUILT_IN_POWL)
11158 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11159 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11160 if (TREE_CODE (arg01) == REAL_CST
11161 && !TREE_OVERFLOW (arg01)
11162 && operand_equal_p (arg1, arg00, 0))
11164 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11165 REAL_VALUE_TYPE c;
11166 tree arg;
11168 c = TREE_REAL_CST (arg01);
11169 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11170 arg = build_real (type, c);
11171 return build_call_expr (powfn, 2, arg1, arg);
11175 /* Optimize a/root(b/c) into a*root(c/b). */
11176 if (BUILTIN_ROOT_P (fcode1))
11178 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11180 if (TREE_CODE (rootarg) == RDIV_EXPR)
11182 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11183 tree b = TREE_OPERAND (rootarg, 0);
11184 tree c = TREE_OPERAND (rootarg, 1);
11186 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11188 tmp = build_call_expr (rootfn, 1, tmp);
11189 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11193 /* Optimize x/expN(y) into x*expN(-y). */
11194 if (BUILTIN_EXPONENT_P (fcode1))
11196 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11197 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11198 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11199 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11202 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11203 if (fcode1 == BUILT_IN_POW
11204 || fcode1 == BUILT_IN_POWF
11205 || fcode1 == BUILT_IN_POWL)
11207 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11208 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11209 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11210 tree neg11 = fold_convert (type, negate_expr (arg11));
11211 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11212 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11215 return NULL_TREE;
11217 case TRUNC_DIV_EXPR:
11218 case FLOOR_DIV_EXPR:
11219 /* Simplify A / (B << N) where A and B are positive and B is
11220 a power of 2, to A >> (N + log2(B)). */
11221 strict_overflow_p = false;
11222 if (TREE_CODE (arg1) == LSHIFT_EXPR
11223 && (TYPE_UNSIGNED (type)
11224 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11226 tree sval = TREE_OPERAND (arg1, 0);
11227 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11229 tree sh_cnt = TREE_OPERAND (arg1, 1);
11230 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11232 if (strict_overflow_p)
11233 fold_overflow_warning (("assuming signed overflow does not "
11234 "occur when simplifying A / (B << N)"),
11235 WARN_STRICT_OVERFLOW_MISC);
11237 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11238 sh_cnt, build_int_cst (NULL_TREE, pow2));
11239 return fold_build2 (RSHIFT_EXPR, type,
11240 fold_convert (type, arg0), sh_cnt);
11244 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11245 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11246 if (INTEGRAL_TYPE_P (type)
11247 && TYPE_UNSIGNED (type)
11248 && code == FLOOR_DIV_EXPR)
11249 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11251 /* Fall thru */
11253 case ROUND_DIV_EXPR:
11254 case CEIL_DIV_EXPR:
11255 case EXACT_DIV_EXPR:
11256 if (integer_onep (arg1))
11257 return non_lvalue (fold_convert (type, arg0));
11258 if (integer_zerop (arg1))
11259 return NULL_TREE;
11260 /* X / -1 is -X. */
11261 if (!TYPE_UNSIGNED (type)
11262 && TREE_CODE (arg1) == INTEGER_CST
11263 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11264 && TREE_INT_CST_HIGH (arg1) == -1)
11265 return fold_convert (type, negate_expr (arg0));
11267 /* Convert -A / -B to A / B when the type is signed and overflow is
11268 undefined. */
11269 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11270 && TREE_CODE (arg0) == NEGATE_EXPR
11271 && negate_expr_p (arg1))
11273 if (INTEGRAL_TYPE_P (type))
11274 fold_overflow_warning (("assuming signed overflow does not occur "
11275 "when distributing negation across "
11276 "division"),
11277 WARN_STRICT_OVERFLOW_MISC);
11278 return fold_build2 (code, type,
11279 fold_convert (type, TREE_OPERAND (arg0, 0)),
11280 negate_expr (arg1));
11282 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11283 && TREE_CODE (arg1) == NEGATE_EXPR
11284 && negate_expr_p (arg0))
11286 if (INTEGRAL_TYPE_P (type))
11287 fold_overflow_warning (("assuming signed overflow does not occur "
11288 "when distributing negation across "
11289 "division"),
11290 WARN_STRICT_OVERFLOW_MISC);
11291 return fold_build2 (code, type, negate_expr (arg0),
11292 TREE_OPERAND (arg1, 0));
11295 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11296 operation, EXACT_DIV_EXPR.
11298 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11299 At one time others generated faster code, it's not clear if they do
11300 after the last round to changes to the DIV code in expmed.c. */
11301 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11302 && multiple_of_p (type, arg0, arg1))
11303 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11305 strict_overflow_p = false;
11306 if (TREE_CODE (arg1) == INTEGER_CST
11307 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11308 &strict_overflow_p)))
11310 if (strict_overflow_p)
11311 fold_overflow_warning (("assuming signed overflow does not occur "
11312 "when simplifying division"),
11313 WARN_STRICT_OVERFLOW_MISC);
11314 return fold_convert (type, tem);
11317 return NULL_TREE;
11319 case CEIL_MOD_EXPR:
11320 case FLOOR_MOD_EXPR:
11321 case ROUND_MOD_EXPR:
11322 case TRUNC_MOD_EXPR:
11323 /* X % 1 is always zero, but be sure to preserve any side
11324 effects in X. */
11325 if (integer_onep (arg1))
11326 return omit_one_operand (type, integer_zero_node, arg0);
11328 /* X % 0, return X % 0 unchanged so that we can get the
11329 proper warnings and errors. */
11330 if (integer_zerop (arg1))
11331 return NULL_TREE;
11333 /* 0 % X is always zero, but be sure to preserve any side
11334 effects in X. Place this after checking for X == 0. */
11335 if (integer_zerop (arg0))
11336 return omit_one_operand (type, integer_zero_node, arg1);
11338 /* X % -1 is zero. */
11339 if (!TYPE_UNSIGNED (type)
11340 && TREE_CODE (arg1) == INTEGER_CST
11341 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11342 && TREE_INT_CST_HIGH (arg1) == -1)
11343 return omit_one_operand (type, integer_zero_node, arg0);
11345 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11346 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11347 strict_overflow_p = false;
11348 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11349 && (TYPE_UNSIGNED (type)
11350 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11352 tree c = arg1;
11353 /* Also optimize A % (C << N) where C is a power of 2,
11354 to A & ((C << N) - 1). */
11355 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11356 c = TREE_OPERAND (arg1, 0);
11358 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11360 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11361 build_int_cst (TREE_TYPE (arg1), 1));
11362 if (strict_overflow_p)
11363 fold_overflow_warning (("assuming signed overflow does not "
11364 "occur when simplifying "
11365 "X % (power of two)"),
11366 WARN_STRICT_OVERFLOW_MISC);
11367 return fold_build2 (BIT_AND_EXPR, type,
11368 fold_convert (type, arg0),
11369 fold_convert (type, mask));
11373 /* X % -C is the same as X % C. */
11374 if (code == TRUNC_MOD_EXPR
11375 && !TYPE_UNSIGNED (type)
11376 && TREE_CODE (arg1) == INTEGER_CST
11377 && !TREE_OVERFLOW (arg1)
11378 && TREE_INT_CST_HIGH (arg1) < 0
11379 && !TYPE_OVERFLOW_TRAPS (type)
11380 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11381 && !sign_bit_p (arg1, arg1))
11382 return fold_build2 (code, type, fold_convert (type, arg0),
11383 fold_convert (type, negate_expr (arg1)));
11385 /* X % -Y is the same as X % Y. */
11386 if (code == TRUNC_MOD_EXPR
11387 && !TYPE_UNSIGNED (type)
11388 && TREE_CODE (arg1) == NEGATE_EXPR
11389 && !TYPE_OVERFLOW_TRAPS (type))
11390 return fold_build2 (code, type, fold_convert (type, arg0),
11391 fold_convert (type, TREE_OPERAND (arg1, 0)));
11393 if (TREE_CODE (arg1) == INTEGER_CST
11394 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11395 &strict_overflow_p)))
11397 if (strict_overflow_p)
11398 fold_overflow_warning (("assuming signed overflow does not occur "
11399 "when simplifying modulus"),
11400 WARN_STRICT_OVERFLOW_MISC);
11401 return fold_convert (type, tem);
11404 return NULL_TREE;
11406 case LROTATE_EXPR:
11407 case RROTATE_EXPR:
11408 if (integer_all_onesp (arg0))
11409 return omit_one_operand (type, arg0, arg1);
11410 goto shift;
11412 case RSHIFT_EXPR:
11413 /* Optimize -1 >> x for arithmetic right shifts. */
11414 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11415 return omit_one_operand (type, arg0, arg1);
11416 /* ... fall through ... */
11418 case LSHIFT_EXPR:
11419 shift:
11420 if (integer_zerop (arg1))
11421 return non_lvalue (fold_convert (type, arg0));
11422 if (integer_zerop (arg0))
11423 return omit_one_operand (type, arg0, arg1);
11425 /* Since negative shift count is not well-defined,
11426 don't try to compute it in the compiler. */
11427 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11428 return NULL_TREE;
11430 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11431 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11432 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11433 && host_integerp (TREE_OPERAND (arg0, 1), false)
11434 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11436 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11437 + TREE_INT_CST_LOW (arg1));
11439 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11440 being well defined. */
11441 if (low >= TYPE_PRECISION (type))
11443 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11444 low = low % TYPE_PRECISION (type);
11445 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11446 return build_int_cst (type, 0);
11447 else
11448 low = TYPE_PRECISION (type) - 1;
11451 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11452 build_int_cst (type, low));
11455 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11456 into x & ((unsigned)-1 >> c) for unsigned types. */
11457 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11458 || (TYPE_UNSIGNED (type)
11459 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11460 && host_integerp (arg1, false)
11461 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11462 && host_integerp (TREE_OPERAND (arg0, 1), false)
11463 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11465 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11466 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11467 tree lshift;
11468 tree arg00;
11470 if (low0 == low1)
11472 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11474 lshift = build_int_cst (type, -1);
11475 lshift = int_const_binop (code, lshift, arg1, 0);
11477 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11481 /* Rewrite an LROTATE_EXPR by a constant into an
11482 RROTATE_EXPR by a new constant. */
11483 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11485 tree tem = build_int_cst (TREE_TYPE (arg1),
11486 TYPE_PRECISION (type));
11487 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11488 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11491 /* If we have a rotate of a bit operation with the rotate count and
11492 the second operand of the bit operation both constant,
11493 permute the two operations. */
11494 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11495 && (TREE_CODE (arg0) == BIT_AND_EXPR
11496 || TREE_CODE (arg0) == BIT_IOR_EXPR
11497 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11499 return fold_build2 (TREE_CODE (arg0), type,
11500 fold_build2 (code, type,
11501 TREE_OPERAND (arg0, 0), arg1),
11502 fold_build2 (code, type,
11503 TREE_OPERAND (arg0, 1), arg1));
11505 /* Two consecutive rotates adding up to the precision of the
11506 type can be ignored. */
11507 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11508 && TREE_CODE (arg0) == RROTATE_EXPR
11509 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11510 && TREE_INT_CST_HIGH (arg1) == 0
11511 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11512 && ((TREE_INT_CST_LOW (arg1)
11513 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11514 == (unsigned int) TYPE_PRECISION (type)))
11515 return TREE_OPERAND (arg0, 0);
11517 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11518 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11519 if the latter can be further optimized. */
11520 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11521 && TREE_CODE (arg0) == BIT_AND_EXPR
11522 && TREE_CODE (arg1) == INTEGER_CST
11523 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11525 tree mask = fold_build2 (code, type,
11526 fold_convert (type, TREE_OPERAND (arg0, 1)),
11527 arg1);
11528 tree shift = fold_build2 (code, type,
11529 fold_convert (type, TREE_OPERAND (arg0, 0)),
11530 arg1);
11531 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11532 if (tem)
11533 return tem;
11536 return NULL_TREE;
11538 case MIN_EXPR:
11539 if (operand_equal_p (arg0, arg1, 0))
11540 return omit_one_operand (type, arg0, arg1);
11541 if (INTEGRAL_TYPE_P (type)
11542 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11543 return omit_one_operand (type, arg1, arg0);
11544 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11545 if (tem)
11546 return tem;
11547 goto associate;
11549 case MAX_EXPR:
11550 if (operand_equal_p (arg0, arg1, 0))
11551 return omit_one_operand (type, arg0, arg1);
11552 if (INTEGRAL_TYPE_P (type)
11553 && TYPE_MAX_VALUE (type)
11554 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11555 return omit_one_operand (type, arg1, arg0);
11556 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11557 if (tem)
11558 return tem;
11559 goto associate;
11561 case TRUTH_ANDIF_EXPR:
11562 /* Note that the operands of this must be ints
11563 and their values must be 0 or 1.
11564 ("true" is a fixed value perhaps depending on the language.) */
11565 /* If first arg is constant zero, return it. */
11566 if (integer_zerop (arg0))
11567 return fold_convert (type, arg0);
11568 case TRUTH_AND_EXPR:
11569 /* If either arg is constant true, drop it. */
11570 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11571 return non_lvalue (fold_convert (type, arg1));
11572 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11573 /* Preserve sequence points. */
11574 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11575 return non_lvalue (fold_convert (type, arg0));
11576 /* If second arg is constant zero, result is zero, but first arg
11577 must be evaluated. */
11578 if (integer_zerop (arg1))
11579 return omit_one_operand (type, arg1, arg0);
11580 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11581 case will be handled here. */
11582 if (integer_zerop (arg0))
11583 return omit_one_operand (type, arg0, arg1);
11585 /* !X && X is always false. */
11586 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11587 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11588 return omit_one_operand (type, integer_zero_node, arg1);
11589 /* X && !X is always false. */
11590 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11591 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11592 return omit_one_operand (type, integer_zero_node, arg0);
11594 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11595 means A >= Y && A != MAX, but in this case we know that
11596 A < X <= MAX. */
11598 if (!TREE_SIDE_EFFECTS (arg0)
11599 && !TREE_SIDE_EFFECTS (arg1))
11601 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11602 if (tem && !operand_equal_p (tem, arg0, 0))
11603 return fold_build2 (code, type, tem, arg1);
11605 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11606 if (tem && !operand_equal_p (tem, arg1, 0))
11607 return fold_build2 (code, type, arg0, tem);
11610 truth_andor:
11611 /* We only do these simplifications if we are optimizing. */
11612 if (!optimize)
11613 return NULL_TREE;
11615 /* Check for things like (A || B) && (A || C). We can convert this
11616 to A || (B && C). Note that either operator can be any of the four
11617 truth and/or operations and the transformation will still be
11618 valid. Also note that we only care about order for the
11619 ANDIF and ORIF operators. If B contains side effects, this
11620 might change the truth-value of A. */
11621 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11622 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11623 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11624 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11625 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11626 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11628 tree a00 = TREE_OPERAND (arg0, 0);
11629 tree a01 = TREE_OPERAND (arg0, 1);
11630 tree a10 = TREE_OPERAND (arg1, 0);
11631 tree a11 = TREE_OPERAND (arg1, 1);
11632 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11633 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11634 && (code == TRUTH_AND_EXPR
11635 || code == TRUTH_OR_EXPR));
11637 if (operand_equal_p (a00, a10, 0))
11638 return fold_build2 (TREE_CODE (arg0), type, a00,
11639 fold_build2 (code, type, a01, a11));
11640 else if (commutative && operand_equal_p (a00, a11, 0))
11641 return fold_build2 (TREE_CODE (arg0), type, a00,
11642 fold_build2 (code, type, a01, a10));
11643 else if (commutative && operand_equal_p (a01, a10, 0))
11644 return fold_build2 (TREE_CODE (arg0), type, a01,
11645 fold_build2 (code, type, a00, a11));
11647 /* This case if tricky because we must either have commutative
11648 operators or else A10 must not have side-effects. */
11650 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11651 && operand_equal_p (a01, a11, 0))
11652 return fold_build2 (TREE_CODE (arg0), type,
11653 fold_build2 (code, type, a00, a10),
11654 a01);
11657 /* See if we can build a range comparison. */
11658 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11659 return tem;
11661 /* Check for the possibility of merging component references. If our
11662 lhs is another similar operation, try to merge its rhs with our
11663 rhs. Then try to merge our lhs and rhs. */
11664 if (TREE_CODE (arg0) == code
11665 && 0 != (tem = fold_truthop (code, type,
11666 TREE_OPERAND (arg0, 1), arg1)))
11667 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11669 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11670 return tem;
11672 return NULL_TREE;
11674 case TRUTH_ORIF_EXPR:
11675 /* Note that the operands of this must be ints
11676 and their values must be 0 or true.
11677 ("true" is a fixed value perhaps depending on the language.) */
11678 /* If first arg is constant true, return it. */
11679 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11680 return fold_convert (type, arg0);
11681 case TRUTH_OR_EXPR:
11682 /* If either arg is constant zero, drop it. */
11683 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11684 return non_lvalue (fold_convert (type, arg1));
11685 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11686 /* Preserve sequence points. */
11687 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11688 return non_lvalue (fold_convert (type, arg0));
11689 /* If second arg is constant true, result is true, but we must
11690 evaluate first arg. */
11691 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11692 return omit_one_operand (type, arg1, arg0);
11693 /* Likewise for first arg, but note this only occurs here for
11694 TRUTH_OR_EXPR. */
11695 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11696 return omit_one_operand (type, arg0, arg1);
11698 /* !X || X is always true. */
11699 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11700 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11701 return omit_one_operand (type, integer_one_node, arg1);
11702 /* X || !X is always true. */
11703 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11704 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11705 return omit_one_operand (type, integer_one_node, arg0);
11707 goto truth_andor;
11709 case TRUTH_XOR_EXPR:
11710 /* If the second arg is constant zero, drop it. */
11711 if (integer_zerop (arg1))
11712 return non_lvalue (fold_convert (type, arg0));
11713 /* If the second arg is constant true, this is a logical inversion. */
11714 if (integer_onep (arg1))
11716 /* Only call invert_truthvalue if operand is a truth value. */
11717 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11718 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11719 else
11720 tem = invert_truthvalue (arg0);
11721 return non_lvalue (fold_convert (type, tem));
11723 /* Identical arguments cancel to zero. */
11724 if (operand_equal_p (arg0, arg1, 0))
11725 return omit_one_operand (type, integer_zero_node, arg0);
11727 /* !X ^ X is always true. */
11728 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11729 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11730 return omit_one_operand (type, integer_one_node, arg1);
11732 /* X ^ !X is always true. */
11733 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11734 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11735 return omit_one_operand (type, integer_one_node, arg0);
11737 return NULL_TREE;
11739 case EQ_EXPR:
11740 case NE_EXPR:
11741 tem = fold_comparison (code, type, op0, op1);
11742 if (tem != NULL_TREE)
11743 return tem;
11745 /* bool_var != 0 becomes bool_var. */
11746 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11747 && code == NE_EXPR)
11748 return non_lvalue (fold_convert (type, arg0));
11750 /* bool_var == 1 becomes bool_var. */
11751 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11752 && code == EQ_EXPR)
11753 return non_lvalue (fold_convert (type, arg0));
11755 /* bool_var != 1 becomes !bool_var. */
11756 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11757 && code == NE_EXPR)
11758 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11760 /* bool_var == 0 becomes !bool_var. */
11761 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11762 && code == EQ_EXPR)
11763 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11765 /* If this is an equality comparison of the address of two non-weak,
11766 unaliased symbols neither of which are extern (since we do not
11767 have access to attributes for externs), then we know the result. */
11768 if (TREE_CODE (arg0) == ADDR_EXPR
11769 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11770 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11771 && ! lookup_attribute ("alias",
11772 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11773 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11774 && TREE_CODE (arg1) == ADDR_EXPR
11775 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11776 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11777 && ! lookup_attribute ("alias",
11778 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11779 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11781 /* We know that we're looking at the address of two
11782 non-weak, unaliased, static _DECL nodes.
11784 It is both wasteful and incorrect to call operand_equal_p
11785 to compare the two ADDR_EXPR nodes. It is wasteful in that
11786 all we need to do is test pointer equality for the arguments
11787 to the two ADDR_EXPR nodes. It is incorrect to use
11788 operand_equal_p as that function is NOT equivalent to a
11789 C equality test. It can in fact return false for two
11790 objects which would test as equal using the C equality
11791 operator. */
11792 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11793 return constant_boolean_node (equal
11794 ? code == EQ_EXPR : code != EQ_EXPR,
11795 type);
11798 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11799 a MINUS_EXPR of a constant, we can convert it into a comparison with
11800 a revised constant as long as no overflow occurs. */
11801 if (TREE_CODE (arg1) == INTEGER_CST
11802 && (TREE_CODE (arg0) == PLUS_EXPR
11803 || TREE_CODE (arg0) == MINUS_EXPR)
11804 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11805 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11806 ? MINUS_EXPR : PLUS_EXPR,
11807 fold_convert (TREE_TYPE (arg0), arg1),
11808 TREE_OPERAND (arg0, 1), 0))
11809 && !TREE_OVERFLOW (tem))
11810 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11812 /* Similarly for a NEGATE_EXPR. */
11813 if (TREE_CODE (arg0) == NEGATE_EXPR
11814 && TREE_CODE (arg1) == INTEGER_CST
11815 && 0 != (tem = negate_expr (arg1))
11816 && TREE_CODE (tem) == INTEGER_CST
11817 && !TREE_OVERFLOW (tem))
11818 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11820 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11821 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11822 && TREE_CODE (arg1) == INTEGER_CST
11823 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11824 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11825 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11826 fold_convert (TREE_TYPE (arg0), arg1),
11827 TREE_OPERAND (arg0, 1)));
11829 /* Transform comparisons of the form X +- C CMP X. */
11830 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11831 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11832 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11833 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11834 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11836 tree cst = TREE_OPERAND (arg0, 1);
11838 if (code == EQ_EXPR
11839 && !integer_zerop (cst))
11840 return omit_two_operands (type, boolean_false_node,
11841 TREE_OPERAND (arg0, 0), arg1);
11842 else
11843 return omit_two_operands (type, boolean_true_node,
11844 TREE_OPERAND (arg0, 0), arg1);
11847 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11848 for !=. Don't do this for ordered comparisons due to overflow. */
11849 if (TREE_CODE (arg0) == MINUS_EXPR
11850 && integer_zerop (arg1))
11851 return fold_build2 (code, type,
11852 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11854 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11855 if (TREE_CODE (arg0) == ABS_EXPR
11856 && (integer_zerop (arg1) || real_zerop (arg1)))
11857 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11859 /* If this is an EQ or NE comparison with zero and ARG0 is
11860 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11861 two operations, but the latter can be done in one less insn
11862 on machines that have only two-operand insns or on which a
11863 constant cannot be the first operand. */
11864 if (TREE_CODE (arg0) == BIT_AND_EXPR
11865 && integer_zerop (arg1))
11867 tree arg00 = TREE_OPERAND (arg0, 0);
11868 tree arg01 = TREE_OPERAND (arg0, 1);
11869 if (TREE_CODE (arg00) == LSHIFT_EXPR
11870 && integer_onep (TREE_OPERAND (arg00, 0)))
11872 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11873 arg01, TREE_OPERAND (arg00, 1));
11874 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11875 build_int_cst (TREE_TYPE (arg0), 1));
11876 return fold_build2 (code, type,
11877 fold_convert (TREE_TYPE (arg1), tem), arg1);
11879 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11880 && integer_onep (TREE_OPERAND (arg01, 0)))
11882 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11883 arg00, TREE_OPERAND (arg01, 1));
11884 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11885 build_int_cst (TREE_TYPE (arg0), 1));
11886 return fold_build2 (code, type,
11887 fold_convert (TREE_TYPE (arg1), tem), arg1);
11891 /* If this is an NE or EQ comparison of zero against the result of a
11892 signed MOD operation whose second operand is a power of 2, make
11893 the MOD operation unsigned since it is simpler and equivalent. */
11894 if (integer_zerop (arg1)
11895 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11896 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11897 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11898 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11899 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11900 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11902 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11903 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11904 fold_convert (newtype,
11905 TREE_OPERAND (arg0, 0)),
11906 fold_convert (newtype,
11907 TREE_OPERAND (arg0, 1)));
11909 return fold_build2 (code, type, newmod,
11910 fold_convert (newtype, arg1));
11913 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11914 C1 is a valid shift constant, and C2 is a power of two, i.e.
11915 a single bit. */
11916 if (TREE_CODE (arg0) == BIT_AND_EXPR
11917 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11918 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11919 == INTEGER_CST
11920 && integer_pow2p (TREE_OPERAND (arg0, 1))
11921 && integer_zerop (arg1))
11923 tree itype = TREE_TYPE (arg0);
11924 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11925 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11927 /* Check for a valid shift count. */
11928 if (TREE_INT_CST_HIGH (arg001) == 0
11929 && TREE_INT_CST_LOW (arg001) < prec)
11931 tree arg01 = TREE_OPERAND (arg0, 1);
11932 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11933 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11934 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11935 can be rewritten as (X & (C2 << C1)) != 0. */
11936 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11938 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11939 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11940 return fold_build2 (code, type, tem, arg1);
11942 /* Otherwise, for signed (arithmetic) shifts,
11943 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11944 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11945 else if (!TYPE_UNSIGNED (itype))
11946 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11947 arg000, build_int_cst (itype, 0));
11948 /* Otherwise, of unsigned (logical) shifts,
11949 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11950 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11951 else
11952 return omit_one_operand (type,
11953 code == EQ_EXPR ? integer_one_node
11954 : integer_zero_node,
11955 arg000);
11959 /* If this is an NE comparison of zero with an AND of one, remove the
11960 comparison since the AND will give the correct value. */
11961 if (code == NE_EXPR
11962 && integer_zerop (arg1)
11963 && TREE_CODE (arg0) == BIT_AND_EXPR
11964 && integer_onep (TREE_OPERAND (arg0, 1)))
11965 return fold_convert (type, arg0);
11967 /* If we have (A & C) == C where C is a power of 2, convert this into
11968 (A & C) != 0. Similarly for NE_EXPR. */
11969 if (TREE_CODE (arg0) == BIT_AND_EXPR
11970 && integer_pow2p (TREE_OPERAND (arg0, 1))
11971 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11972 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11973 arg0, fold_convert (TREE_TYPE (arg0),
11974 integer_zero_node));
11976 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11977 bit, then fold the expression into A < 0 or A >= 0. */
11978 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11979 if (tem)
11980 return tem;
11982 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11983 Similarly for NE_EXPR. */
11984 if (TREE_CODE (arg0) == BIT_AND_EXPR
11985 && TREE_CODE (arg1) == INTEGER_CST
11986 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11988 tree notc = fold_build1 (BIT_NOT_EXPR,
11989 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11990 TREE_OPERAND (arg0, 1));
11991 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11992 arg1, notc);
11993 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11994 if (integer_nonzerop (dandnotc))
11995 return omit_one_operand (type, rslt, arg0);
11998 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11999 Similarly for NE_EXPR. */
12000 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12001 && TREE_CODE (arg1) == INTEGER_CST
12002 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12004 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12005 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12006 TREE_OPERAND (arg0, 1), notd);
12007 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12008 if (integer_nonzerop (candnotd))
12009 return omit_one_operand (type, rslt, arg0);
12012 /* Optimize comparisons of strlen vs zero to a compare of the
12013 first character of the string vs zero. To wit,
12014 strlen(ptr) == 0 => *ptr == 0
12015 strlen(ptr) != 0 => *ptr != 0
12016 Other cases should reduce to one of these two (or a constant)
12017 due to the return value of strlen being unsigned. */
12018 if (TREE_CODE (arg0) == CALL_EXPR
12019 && integer_zerop (arg1))
12021 tree fndecl = get_callee_fndecl (arg0);
12023 if (fndecl
12024 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12025 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12026 && call_expr_nargs (arg0) == 1
12027 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12029 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12030 return fold_build2 (code, type, iref,
12031 build_int_cst (TREE_TYPE (iref), 0));
12035 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12036 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12037 if (TREE_CODE (arg0) == RSHIFT_EXPR
12038 && integer_zerop (arg1)
12039 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12041 tree arg00 = TREE_OPERAND (arg0, 0);
12042 tree arg01 = TREE_OPERAND (arg0, 1);
12043 tree itype = TREE_TYPE (arg00);
12044 if (TREE_INT_CST_HIGH (arg01) == 0
12045 && TREE_INT_CST_LOW (arg01)
12046 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12048 if (TYPE_UNSIGNED (itype))
12050 itype = signed_type_for (itype);
12051 arg00 = fold_convert (itype, arg00);
12053 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12054 type, arg00, build_int_cst (itype, 0));
12058 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12059 if (integer_zerop (arg1)
12060 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12061 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12062 TREE_OPERAND (arg0, 1));
12064 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12065 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12066 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12067 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12068 build_int_cst (TREE_TYPE (arg1), 0));
12069 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12070 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12071 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12072 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12073 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12074 build_int_cst (TREE_TYPE (arg1), 0));
12076 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12077 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12078 && TREE_CODE (arg1) == INTEGER_CST
12079 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12080 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12081 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12082 TREE_OPERAND (arg0, 1), arg1));
12084 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12085 (X & C) == 0 when C is a single bit. */
12086 if (TREE_CODE (arg0) == BIT_AND_EXPR
12087 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12088 && integer_zerop (arg1)
12089 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12091 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12092 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12093 TREE_OPERAND (arg0, 1));
12094 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12095 type, tem, arg1);
12098 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12099 constant C is a power of two, i.e. a single bit. */
12100 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12101 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12102 && integer_zerop (arg1)
12103 && integer_pow2p (TREE_OPERAND (arg0, 1))
12104 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12105 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12107 tree arg00 = TREE_OPERAND (arg0, 0);
12108 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12109 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12112 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12113 when is C is a power of two, i.e. a single bit. */
12114 if (TREE_CODE (arg0) == BIT_AND_EXPR
12115 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12116 && integer_zerop (arg1)
12117 && integer_pow2p (TREE_OPERAND (arg0, 1))
12118 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12119 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12121 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12122 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12123 arg000, TREE_OPERAND (arg0, 1));
12124 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12125 tem, build_int_cst (TREE_TYPE (tem), 0));
12128 if (integer_zerop (arg1)
12129 && tree_expr_nonzero_p (arg0))
12131 tree res = constant_boolean_node (code==NE_EXPR, type);
12132 return omit_one_operand (type, res, arg0);
12135 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12136 if (TREE_CODE (arg0) == NEGATE_EXPR
12137 && TREE_CODE (arg1) == NEGATE_EXPR)
12138 return fold_build2 (code, type,
12139 TREE_OPERAND (arg0, 0),
12140 TREE_OPERAND (arg1, 0));
12142 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12143 if (TREE_CODE (arg0) == BIT_AND_EXPR
12144 && TREE_CODE (arg1) == BIT_AND_EXPR)
12146 tree arg00 = TREE_OPERAND (arg0, 0);
12147 tree arg01 = TREE_OPERAND (arg0, 1);
12148 tree arg10 = TREE_OPERAND (arg1, 0);
12149 tree arg11 = TREE_OPERAND (arg1, 1);
12150 tree itype = TREE_TYPE (arg0);
12152 if (operand_equal_p (arg01, arg11, 0))
12153 return fold_build2 (code, type,
12154 fold_build2 (BIT_AND_EXPR, itype,
12155 fold_build2 (BIT_XOR_EXPR, itype,
12156 arg00, arg10),
12157 arg01),
12158 build_int_cst (itype, 0));
12160 if (operand_equal_p (arg01, arg10, 0))
12161 return fold_build2 (code, type,
12162 fold_build2 (BIT_AND_EXPR, itype,
12163 fold_build2 (BIT_XOR_EXPR, itype,
12164 arg00, arg11),
12165 arg01),
12166 build_int_cst (itype, 0));
12168 if (operand_equal_p (arg00, arg11, 0))
12169 return fold_build2 (code, type,
12170 fold_build2 (BIT_AND_EXPR, itype,
12171 fold_build2 (BIT_XOR_EXPR, itype,
12172 arg01, arg10),
12173 arg00),
12174 build_int_cst (itype, 0));
12176 if (operand_equal_p (arg00, arg10, 0))
12177 return fold_build2 (code, type,
12178 fold_build2 (BIT_AND_EXPR, itype,
12179 fold_build2 (BIT_XOR_EXPR, itype,
12180 arg01, arg11),
12181 arg00),
12182 build_int_cst (itype, 0));
12185 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12186 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12188 tree arg00 = TREE_OPERAND (arg0, 0);
12189 tree arg01 = TREE_OPERAND (arg0, 1);
12190 tree arg10 = TREE_OPERAND (arg1, 0);
12191 tree arg11 = TREE_OPERAND (arg1, 1);
12192 tree itype = TREE_TYPE (arg0);
12194 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12195 operand_equal_p guarantees no side-effects so we don't need
12196 to use omit_one_operand on Z. */
12197 if (operand_equal_p (arg01, arg11, 0))
12198 return fold_build2 (code, type, arg00, arg10);
12199 if (operand_equal_p (arg01, arg10, 0))
12200 return fold_build2 (code, type, arg00, arg11);
12201 if (operand_equal_p (arg00, arg11, 0))
12202 return fold_build2 (code, type, arg01, arg10);
12203 if (operand_equal_p (arg00, arg10, 0))
12204 return fold_build2 (code, type, arg01, arg11);
12206 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12207 if (TREE_CODE (arg01) == INTEGER_CST
12208 && TREE_CODE (arg11) == INTEGER_CST)
12209 return fold_build2 (code, type,
12210 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12211 fold_build2 (BIT_XOR_EXPR, itype,
12212 arg01, arg11)),
12213 arg10);
12216 /* Attempt to simplify equality/inequality comparisons of complex
12217 values. Only lower the comparison if the result is known or
12218 can be simplified to a single scalar comparison. */
12219 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12220 || TREE_CODE (arg0) == COMPLEX_CST)
12221 && (TREE_CODE (arg1) == COMPLEX_EXPR
12222 || TREE_CODE (arg1) == COMPLEX_CST))
12224 tree real0, imag0, real1, imag1;
12225 tree rcond, icond;
12227 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12229 real0 = TREE_OPERAND (arg0, 0);
12230 imag0 = TREE_OPERAND (arg0, 1);
12232 else
12234 real0 = TREE_REALPART (arg0);
12235 imag0 = TREE_IMAGPART (arg0);
12238 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12240 real1 = TREE_OPERAND (arg1, 0);
12241 imag1 = TREE_OPERAND (arg1, 1);
12243 else
12245 real1 = TREE_REALPART (arg1);
12246 imag1 = TREE_IMAGPART (arg1);
12249 rcond = fold_binary (code, type, real0, real1);
12250 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12252 if (integer_zerop (rcond))
12254 if (code == EQ_EXPR)
12255 return omit_two_operands (type, boolean_false_node,
12256 imag0, imag1);
12257 return fold_build2 (NE_EXPR, type, imag0, imag1);
12259 else
12261 if (code == NE_EXPR)
12262 return omit_two_operands (type, boolean_true_node,
12263 imag0, imag1);
12264 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12268 icond = fold_binary (code, type, imag0, imag1);
12269 if (icond && TREE_CODE (icond) == INTEGER_CST)
12271 if (integer_zerop (icond))
12273 if (code == EQ_EXPR)
12274 return omit_two_operands (type, boolean_false_node,
12275 real0, real1);
12276 return fold_build2 (NE_EXPR, type, real0, real1);
12278 else
12280 if (code == NE_EXPR)
12281 return omit_two_operands (type, boolean_true_node,
12282 real0, real1);
12283 return fold_build2 (EQ_EXPR, type, real0, real1);
12288 return NULL_TREE;
12290 case LT_EXPR:
12291 case GT_EXPR:
12292 case LE_EXPR:
12293 case GE_EXPR:
12294 tem = fold_comparison (code, type, op0, op1);
12295 if (tem != NULL_TREE)
12296 return tem;
12298 /* Transform comparisons of the form X +- C CMP X. */
12299 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12300 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12301 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12302 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12303 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12304 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12306 tree arg01 = TREE_OPERAND (arg0, 1);
12307 enum tree_code code0 = TREE_CODE (arg0);
12308 int is_positive;
12310 if (TREE_CODE (arg01) == REAL_CST)
12311 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12312 else
12313 is_positive = tree_int_cst_sgn (arg01);
12315 /* (X - c) > X becomes false. */
12316 if (code == GT_EXPR
12317 && ((code0 == MINUS_EXPR && is_positive >= 0)
12318 || (code0 == PLUS_EXPR && is_positive <= 0)))
12320 if (TREE_CODE (arg01) == INTEGER_CST
12321 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12322 fold_overflow_warning (("assuming signed overflow does not "
12323 "occur when assuming that (X - c) > X "
12324 "is always false"),
12325 WARN_STRICT_OVERFLOW_ALL);
12326 return constant_boolean_node (0, type);
12329 /* Likewise (X + c) < X becomes false. */
12330 if (code == LT_EXPR
12331 && ((code0 == PLUS_EXPR && is_positive >= 0)
12332 || (code0 == MINUS_EXPR && is_positive <= 0)))
12334 if (TREE_CODE (arg01) == INTEGER_CST
12335 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12336 fold_overflow_warning (("assuming signed overflow does not "
12337 "occur when assuming that "
12338 "(X + c) < X is always false"),
12339 WARN_STRICT_OVERFLOW_ALL);
12340 return constant_boolean_node (0, type);
12343 /* Convert (X - c) <= X to true. */
12344 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12345 && code == LE_EXPR
12346 && ((code0 == MINUS_EXPR && is_positive >= 0)
12347 || (code0 == PLUS_EXPR && is_positive <= 0)))
12349 if (TREE_CODE (arg01) == INTEGER_CST
12350 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12351 fold_overflow_warning (("assuming signed overflow does not "
12352 "occur when assuming that "
12353 "(X - c) <= X is always true"),
12354 WARN_STRICT_OVERFLOW_ALL);
12355 return constant_boolean_node (1, type);
12358 /* Convert (X + c) >= X to true. */
12359 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12360 && code == GE_EXPR
12361 && ((code0 == PLUS_EXPR && is_positive >= 0)
12362 || (code0 == MINUS_EXPR && is_positive <= 0)))
12364 if (TREE_CODE (arg01) == INTEGER_CST
12365 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12366 fold_overflow_warning (("assuming signed overflow does not "
12367 "occur when assuming that "
12368 "(X + c) >= X is always true"),
12369 WARN_STRICT_OVERFLOW_ALL);
12370 return constant_boolean_node (1, type);
12373 if (TREE_CODE (arg01) == INTEGER_CST)
12375 /* Convert X + c > X and X - c < X to true for integers. */
12376 if (code == GT_EXPR
12377 && ((code0 == PLUS_EXPR && is_positive > 0)
12378 || (code0 == MINUS_EXPR && is_positive < 0)))
12380 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12381 fold_overflow_warning (("assuming signed overflow does "
12382 "not occur when assuming that "
12383 "(X + c) > X is always true"),
12384 WARN_STRICT_OVERFLOW_ALL);
12385 return constant_boolean_node (1, type);
12388 if (code == LT_EXPR
12389 && ((code0 == MINUS_EXPR && is_positive > 0)
12390 || (code0 == PLUS_EXPR && is_positive < 0)))
12392 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12393 fold_overflow_warning (("assuming signed overflow does "
12394 "not occur when assuming that "
12395 "(X - c) < X is always true"),
12396 WARN_STRICT_OVERFLOW_ALL);
12397 return constant_boolean_node (1, type);
12400 /* Convert X + c <= X and X - c >= X to false for integers. */
12401 if (code == LE_EXPR
12402 && ((code0 == PLUS_EXPR && is_positive > 0)
12403 || (code0 == MINUS_EXPR && is_positive < 0)))
12405 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12406 fold_overflow_warning (("assuming signed overflow does "
12407 "not occur when assuming that "
12408 "(X + c) <= X is always false"),
12409 WARN_STRICT_OVERFLOW_ALL);
12410 return constant_boolean_node (0, type);
12413 if (code == GE_EXPR
12414 && ((code0 == MINUS_EXPR && is_positive > 0)
12415 || (code0 == PLUS_EXPR && is_positive < 0)))
12417 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12418 fold_overflow_warning (("assuming signed overflow does "
12419 "not occur when assuming that "
12420 "(X - c) >= X is always false"),
12421 WARN_STRICT_OVERFLOW_ALL);
12422 return constant_boolean_node (0, type);
12427 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12428 This transformation affects the cases which are handled in later
12429 optimizations involving comparisons with non-negative constants. */
12430 if (TREE_CODE (arg1) == INTEGER_CST
12431 && TREE_CODE (arg0) != INTEGER_CST
12432 && tree_int_cst_sgn (arg1) > 0)
12434 if (code == GE_EXPR)
12436 arg1 = const_binop (MINUS_EXPR, arg1,
12437 build_int_cst (TREE_TYPE (arg1), 1), 0);
12438 return fold_build2 (GT_EXPR, type, arg0,
12439 fold_convert (TREE_TYPE (arg0), arg1));
12441 if (code == LT_EXPR)
12443 arg1 = const_binop (MINUS_EXPR, arg1,
12444 build_int_cst (TREE_TYPE (arg1), 1), 0);
12445 return fold_build2 (LE_EXPR, type, arg0,
12446 fold_convert (TREE_TYPE (arg0), arg1));
12450 /* Comparisons with the highest or lowest possible integer of
12451 the specified precision will have known values. */
12453 tree arg1_type = TREE_TYPE (arg1);
12454 unsigned int width = TYPE_PRECISION (arg1_type);
12456 if (TREE_CODE (arg1) == INTEGER_CST
12457 && !TREE_OVERFLOW (arg1)
12458 && width <= 2 * HOST_BITS_PER_WIDE_INT
12459 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12461 HOST_WIDE_INT signed_max_hi;
12462 unsigned HOST_WIDE_INT signed_max_lo;
12463 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12465 if (width <= HOST_BITS_PER_WIDE_INT)
12467 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12468 - 1;
12469 signed_max_hi = 0;
12470 max_hi = 0;
12472 if (TYPE_UNSIGNED (arg1_type))
12474 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12475 min_lo = 0;
12476 min_hi = 0;
12478 else
12480 max_lo = signed_max_lo;
12481 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12482 min_hi = -1;
12485 else
12487 width -= HOST_BITS_PER_WIDE_INT;
12488 signed_max_lo = -1;
12489 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12490 - 1;
12491 max_lo = -1;
12492 min_lo = 0;
12494 if (TYPE_UNSIGNED (arg1_type))
12496 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12497 min_hi = 0;
12499 else
12501 max_hi = signed_max_hi;
12502 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12506 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12507 && TREE_INT_CST_LOW (arg1) == max_lo)
12508 switch (code)
12510 case GT_EXPR:
12511 return omit_one_operand (type, integer_zero_node, arg0);
12513 case GE_EXPR:
12514 return fold_build2 (EQ_EXPR, type, op0, op1);
12516 case LE_EXPR:
12517 return omit_one_operand (type, integer_one_node, arg0);
12519 case LT_EXPR:
12520 return fold_build2 (NE_EXPR, type, op0, op1);
12522 /* The GE_EXPR and LT_EXPR cases above are not normally
12523 reached because of previous transformations. */
12525 default:
12526 break;
12528 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12529 == max_hi
12530 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12531 switch (code)
12533 case GT_EXPR:
12534 arg1 = const_binop (PLUS_EXPR, arg1,
12535 build_int_cst (TREE_TYPE (arg1), 1), 0);
12536 return fold_build2 (EQ_EXPR, type,
12537 fold_convert (TREE_TYPE (arg1), arg0),
12538 arg1);
12539 case LE_EXPR:
12540 arg1 = const_binop (PLUS_EXPR, arg1,
12541 build_int_cst (TREE_TYPE (arg1), 1), 0);
12542 return fold_build2 (NE_EXPR, type,
12543 fold_convert (TREE_TYPE (arg1), arg0),
12544 arg1);
12545 default:
12546 break;
12548 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12549 == min_hi
12550 && TREE_INT_CST_LOW (arg1) == min_lo)
12551 switch (code)
12553 case LT_EXPR:
12554 return omit_one_operand (type, integer_zero_node, arg0);
12556 case LE_EXPR:
12557 return fold_build2 (EQ_EXPR, type, op0, op1);
12559 case GE_EXPR:
12560 return omit_one_operand (type, integer_one_node, arg0);
12562 case GT_EXPR:
12563 return fold_build2 (NE_EXPR, type, op0, op1);
12565 default:
12566 break;
12568 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12569 == min_hi
12570 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12571 switch (code)
12573 case GE_EXPR:
12574 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12575 return fold_build2 (NE_EXPR, type,
12576 fold_convert (TREE_TYPE (arg1), arg0),
12577 arg1);
12578 case LT_EXPR:
12579 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12580 return fold_build2 (EQ_EXPR, type,
12581 fold_convert (TREE_TYPE (arg1), arg0),
12582 arg1);
12583 default:
12584 break;
12587 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12588 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12589 && TYPE_UNSIGNED (arg1_type)
12590 /* We will flip the signedness of the comparison operator
12591 associated with the mode of arg1, so the sign bit is
12592 specified by this mode. Check that arg1 is the signed
12593 max associated with this sign bit. */
12594 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12595 /* signed_type does not work on pointer types. */
12596 && INTEGRAL_TYPE_P (arg1_type))
12598 /* The following case also applies to X < signed_max+1
12599 and X >= signed_max+1 because previous transformations. */
12600 if (code == LE_EXPR || code == GT_EXPR)
12602 tree st;
12603 st = signed_type_for (TREE_TYPE (arg1));
12604 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12605 type, fold_convert (st, arg0),
12606 build_int_cst (st, 0));
12612 /* If we are comparing an ABS_EXPR with a constant, we can
12613 convert all the cases into explicit comparisons, but they may
12614 well not be faster than doing the ABS and one comparison.
12615 But ABS (X) <= C is a range comparison, which becomes a subtraction
12616 and a comparison, and is probably faster. */
12617 if (code == LE_EXPR
12618 && TREE_CODE (arg1) == INTEGER_CST
12619 && TREE_CODE (arg0) == ABS_EXPR
12620 && ! TREE_SIDE_EFFECTS (arg0)
12621 && (0 != (tem = negate_expr (arg1)))
12622 && TREE_CODE (tem) == INTEGER_CST
12623 && !TREE_OVERFLOW (tem))
12624 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12625 build2 (GE_EXPR, type,
12626 TREE_OPERAND (arg0, 0), tem),
12627 build2 (LE_EXPR, type,
12628 TREE_OPERAND (arg0, 0), arg1));
12630 /* Convert ABS_EXPR<x> >= 0 to true. */
12631 strict_overflow_p = false;
12632 if (code == GE_EXPR
12633 && (integer_zerop (arg1)
12634 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12635 && real_zerop (arg1)))
12636 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12638 if (strict_overflow_p)
12639 fold_overflow_warning (("assuming signed overflow does not occur "
12640 "when simplifying comparison of "
12641 "absolute value and zero"),
12642 WARN_STRICT_OVERFLOW_CONDITIONAL);
12643 return omit_one_operand (type, integer_one_node, arg0);
12646 /* Convert ABS_EXPR<x> < 0 to false. */
12647 strict_overflow_p = false;
12648 if (code == LT_EXPR
12649 && (integer_zerop (arg1) || real_zerop (arg1))
12650 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12652 if (strict_overflow_p)
12653 fold_overflow_warning (("assuming signed overflow does not occur "
12654 "when simplifying comparison of "
12655 "absolute value and zero"),
12656 WARN_STRICT_OVERFLOW_CONDITIONAL);
12657 return omit_one_operand (type, integer_zero_node, arg0);
12660 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12661 and similarly for >= into !=. */
12662 if ((code == LT_EXPR || code == GE_EXPR)
12663 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12664 && TREE_CODE (arg1) == LSHIFT_EXPR
12665 && integer_onep (TREE_OPERAND (arg1, 0)))
12666 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12667 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12668 TREE_OPERAND (arg1, 1)),
12669 build_int_cst (TREE_TYPE (arg0), 0));
12671 if ((code == LT_EXPR || code == GE_EXPR)
12672 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12673 && CONVERT_EXPR_P (arg1)
12674 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12675 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12676 return
12677 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12678 fold_convert (TREE_TYPE (arg0),
12679 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12680 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12681 1))),
12682 build_int_cst (TREE_TYPE (arg0), 0));
12684 return NULL_TREE;
12686 case UNORDERED_EXPR:
12687 case ORDERED_EXPR:
12688 case UNLT_EXPR:
12689 case UNLE_EXPR:
12690 case UNGT_EXPR:
12691 case UNGE_EXPR:
12692 case UNEQ_EXPR:
12693 case LTGT_EXPR:
12694 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12696 t1 = fold_relational_const (code, type, arg0, arg1);
12697 if (t1 != NULL_TREE)
12698 return t1;
12701 /* If the first operand is NaN, the result is constant. */
12702 if (TREE_CODE (arg0) == REAL_CST
12703 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12704 && (code != LTGT_EXPR || ! flag_trapping_math))
12706 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12707 ? integer_zero_node
12708 : integer_one_node;
12709 return omit_one_operand (type, t1, arg1);
12712 /* If the second operand is NaN, the result is constant. */
12713 if (TREE_CODE (arg1) == REAL_CST
12714 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12715 && (code != LTGT_EXPR || ! flag_trapping_math))
12717 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12718 ? integer_zero_node
12719 : integer_one_node;
12720 return omit_one_operand (type, t1, arg0);
12723 /* Simplify unordered comparison of something with itself. */
12724 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12725 && operand_equal_p (arg0, arg1, 0))
12726 return constant_boolean_node (1, type);
12728 if (code == LTGT_EXPR
12729 && !flag_trapping_math
12730 && operand_equal_p (arg0, arg1, 0))
12731 return constant_boolean_node (0, type);
12733 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12735 tree targ0 = strip_float_extensions (arg0);
12736 tree targ1 = strip_float_extensions (arg1);
12737 tree newtype = TREE_TYPE (targ0);
12739 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12740 newtype = TREE_TYPE (targ1);
12742 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12743 return fold_build2 (code, type, fold_convert (newtype, targ0),
12744 fold_convert (newtype, targ1));
12747 return NULL_TREE;
12749 case COMPOUND_EXPR:
12750 /* When pedantic, a compound expression can be neither an lvalue
12751 nor an integer constant expression. */
12752 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12753 return NULL_TREE;
12754 /* Don't let (0, 0) be null pointer constant. */
12755 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12756 : fold_convert (type, arg1);
12757 return pedantic_non_lvalue (tem);
12759 case COMPLEX_EXPR:
12760 if ((TREE_CODE (arg0) == REAL_CST
12761 && TREE_CODE (arg1) == REAL_CST)
12762 || (TREE_CODE (arg0) == INTEGER_CST
12763 && TREE_CODE (arg1) == INTEGER_CST))
12764 return build_complex (type, arg0, arg1);
12765 return NULL_TREE;
12767 case ASSERT_EXPR:
12768 /* An ASSERT_EXPR should never be passed to fold_binary. */
12769 gcc_unreachable ();
12771 default:
12772 return NULL_TREE;
12773 } /* switch (code) */
12776 /* Callback for walk_tree, looking for LABEL_EXPR.
12777 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12778 Do not check the sub-tree of GOTO_EXPR. */
12780 static tree
12781 contains_label_1 (tree *tp,
12782 int *walk_subtrees,
12783 void *data ATTRIBUTE_UNUSED)
12785 switch (TREE_CODE (*tp))
12787 case LABEL_EXPR:
12788 return *tp;
12789 case GOTO_EXPR:
12790 *walk_subtrees = 0;
12791 /* no break */
12792 default:
12793 return NULL_TREE;
12797 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12798 accessible from outside the sub-tree. Returns NULL_TREE if no
12799 addressable label is found. */
12801 static bool
12802 contains_label_p (tree st)
12804 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12807 /* Fold a ternary expression of code CODE and type TYPE with operands
12808 OP0, OP1, and OP2. Return the folded expression if folding is
12809 successful. Otherwise, return NULL_TREE. */
12811 tree
12812 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12814 tree tem;
12815 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12816 enum tree_code_class kind = TREE_CODE_CLASS (code);
12818 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12819 && TREE_CODE_LENGTH (code) == 3);
12821 /* Strip any conversions that don't change the mode. This is safe
12822 for every expression, except for a comparison expression because
12823 its signedness is derived from its operands. So, in the latter
12824 case, only strip conversions that don't change the signedness.
12826 Note that this is done as an internal manipulation within the
12827 constant folder, in order to find the simplest representation of
12828 the arguments so that their form can be studied. In any cases,
12829 the appropriate type conversions should be put back in the tree
12830 that will get out of the constant folder. */
12831 if (op0)
12833 arg0 = op0;
12834 STRIP_NOPS (arg0);
12837 if (op1)
12839 arg1 = op1;
12840 STRIP_NOPS (arg1);
12843 switch (code)
12845 case COMPONENT_REF:
12846 if (TREE_CODE (arg0) == CONSTRUCTOR
12847 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12849 unsigned HOST_WIDE_INT idx;
12850 tree field, value;
12851 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12852 if (field == arg1)
12853 return value;
12855 return NULL_TREE;
12857 case COND_EXPR:
12858 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12859 so all simple results must be passed through pedantic_non_lvalue. */
12860 if (TREE_CODE (arg0) == INTEGER_CST)
12862 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12863 tem = integer_zerop (arg0) ? op2 : op1;
12864 /* Only optimize constant conditions when the selected branch
12865 has the same type as the COND_EXPR. This avoids optimizing
12866 away "c ? x : throw", where the throw has a void type.
12867 Avoid throwing away that operand which contains label. */
12868 if ((!TREE_SIDE_EFFECTS (unused_op)
12869 || !contains_label_p (unused_op))
12870 && (! VOID_TYPE_P (TREE_TYPE (tem))
12871 || VOID_TYPE_P (type)))
12872 return pedantic_non_lvalue (tem);
12873 return NULL_TREE;
12875 if (operand_equal_p (arg1, op2, 0))
12876 return pedantic_omit_one_operand (type, arg1, arg0);
12878 /* If we have A op B ? A : C, we may be able to convert this to a
12879 simpler expression, depending on the operation and the values
12880 of B and C. Signed zeros prevent all of these transformations,
12881 for reasons given above each one.
12883 Also try swapping the arguments and inverting the conditional. */
12884 if (COMPARISON_CLASS_P (arg0)
12885 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12886 arg1, TREE_OPERAND (arg0, 1))
12887 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12889 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12890 if (tem)
12891 return tem;
12894 if (COMPARISON_CLASS_P (arg0)
12895 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12896 op2,
12897 TREE_OPERAND (arg0, 1))
12898 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12900 tem = fold_truth_not_expr (arg0);
12901 if (tem && COMPARISON_CLASS_P (tem))
12903 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12904 if (tem)
12905 return tem;
12909 /* If the second operand is simpler than the third, swap them
12910 since that produces better jump optimization results. */
12911 if (truth_value_p (TREE_CODE (arg0))
12912 && tree_swap_operands_p (op1, op2, false))
12914 /* See if this can be inverted. If it can't, possibly because
12915 it was a floating-point inequality comparison, don't do
12916 anything. */
12917 tem = fold_truth_not_expr (arg0);
12918 if (tem)
12919 return fold_build3 (code, type, tem, op2, op1);
12922 /* Convert A ? 1 : 0 to simply A. */
12923 if (integer_onep (op1)
12924 && integer_zerop (op2)
12925 /* If we try to convert OP0 to our type, the
12926 call to fold will try to move the conversion inside
12927 a COND, which will recurse. In that case, the COND_EXPR
12928 is probably the best choice, so leave it alone. */
12929 && type == TREE_TYPE (arg0))
12930 return pedantic_non_lvalue (arg0);
12932 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12933 over COND_EXPR in cases such as floating point comparisons. */
12934 if (integer_zerop (op1)
12935 && integer_onep (op2)
12936 && truth_value_p (TREE_CODE (arg0)))
12937 return pedantic_non_lvalue (fold_convert (type,
12938 invert_truthvalue (arg0)));
12940 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12941 if (TREE_CODE (arg0) == LT_EXPR
12942 && integer_zerop (TREE_OPERAND (arg0, 1))
12943 && integer_zerop (op2)
12944 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12946 /* sign_bit_p only checks ARG1 bits within A's precision.
12947 If <sign bit of A> has wider type than A, bits outside
12948 of A's precision in <sign bit of A> need to be checked.
12949 If they are all 0, this optimization needs to be done
12950 in unsigned A's type, if they are all 1 in signed A's type,
12951 otherwise this can't be done. */
12952 if (TYPE_PRECISION (TREE_TYPE (tem))
12953 < TYPE_PRECISION (TREE_TYPE (arg1))
12954 && TYPE_PRECISION (TREE_TYPE (tem))
12955 < TYPE_PRECISION (type))
12957 unsigned HOST_WIDE_INT mask_lo;
12958 HOST_WIDE_INT mask_hi;
12959 int inner_width, outer_width;
12960 tree tem_type;
12962 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12963 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12964 if (outer_width > TYPE_PRECISION (type))
12965 outer_width = TYPE_PRECISION (type);
12967 if (outer_width > HOST_BITS_PER_WIDE_INT)
12969 mask_hi = ((unsigned HOST_WIDE_INT) -1
12970 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12971 mask_lo = -1;
12973 else
12975 mask_hi = 0;
12976 mask_lo = ((unsigned HOST_WIDE_INT) -1
12977 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12979 if (inner_width > HOST_BITS_PER_WIDE_INT)
12981 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12982 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12983 mask_lo = 0;
12985 else
12986 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12987 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12989 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12990 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12992 tem_type = signed_type_for (TREE_TYPE (tem));
12993 tem = fold_convert (tem_type, tem);
12995 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12996 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12998 tem_type = unsigned_type_for (TREE_TYPE (tem));
12999 tem = fold_convert (tem_type, tem);
13001 else
13002 tem = NULL;
13005 if (tem)
13006 return fold_convert (type,
13007 fold_build2 (BIT_AND_EXPR,
13008 TREE_TYPE (tem), tem,
13009 fold_convert (TREE_TYPE (tem),
13010 arg1)));
13013 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13014 already handled above. */
13015 if (TREE_CODE (arg0) == BIT_AND_EXPR
13016 && integer_onep (TREE_OPERAND (arg0, 1))
13017 && integer_zerop (op2)
13018 && integer_pow2p (arg1))
13020 tree tem = TREE_OPERAND (arg0, 0);
13021 STRIP_NOPS (tem);
13022 if (TREE_CODE (tem) == RSHIFT_EXPR
13023 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13024 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13025 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13026 return fold_build2 (BIT_AND_EXPR, type,
13027 TREE_OPERAND (tem, 0), arg1);
13030 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13031 is probably obsolete because the first operand should be a
13032 truth value (that's why we have the two cases above), but let's
13033 leave it in until we can confirm this for all front-ends. */
13034 if (integer_zerop (op2)
13035 && TREE_CODE (arg0) == NE_EXPR
13036 && integer_zerop (TREE_OPERAND (arg0, 1))
13037 && integer_pow2p (arg1)
13038 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13039 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13040 arg1, OEP_ONLY_CONST))
13041 return pedantic_non_lvalue (fold_convert (type,
13042 TREE_OPERAND (arg0, 0)));
13044 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13045 if (integer_zerop (op2)
13046 && truth_value_p (TREE_CODE (arg0))
13047 && truth_value_p (TREE_CODE (arg1)))
13048 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13049 fold_convert (type, arg0),
13050 arg1);
13052 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13053 if (integer_onep (op2)
13054 && truth_value_p (TREE_CODE (arg0))
13055 && truth_value_p (TREE_CODE (arg1)))
13057 /* Only perform transformation if ARG0 is easily inverted. */
13058 tem = fold_truth_not_expr (arg0);
13059 if (tem)
13060 return fold_build2 (TRUTH_ORIF_EXPR, type,
13061 fold_convert (type, tem),
13062 arg1);
13065 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13066 if (integer_zerop (arg1)
13067 && truth_value_p (TREE_CODE (arg0))
13068 && truth_value_p (TREE_CODE (op2)))
13070 /* Only perform transformation if ARG0 is easily inverted. */
13071 tem = fold_truth_not_expr (arg0);
13072 if (tem)
13073 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13074 fold_convert (type, tem),
13075 op2);
13078 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13079 if (integer_onep (arg1)
13080 && truth_value_p (TREE_CODE (arg0))
13081 && truth_value_p (TREE_CODE (op2)))
13082 return fold_build2 (TRUTH_ORIF_EXPR, type,
13083 fold_convert (type, arg0),
13084 op2);
13086 return NULL_TREE;
13088 case CALL_EXPR:
13089 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13090 of fold_ternary on them. */
13091 gcc_unreachable ();
13093 case BIT_FIELD_REF:
13094 if ((TREE_CODE (arg0) == VECTOR_CST
13095 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13096 && type == TREE_TYPE (TREE_TYPE (arg0)))
13098 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13099 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13101 if (width != 0
13102 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13103 && (idx % width) == 0
13104 && (idx = idx / width)
13105 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13107 tree elements = NULL_TREE;
13109 if (TREE_CODE (arg0) == VECTOR_CST)
13110 elements = TREE_VECTOR_CST_ELTS (arg0);
13111 else
13113 unsigned HOST_WIDE_INT idx;
13114 tree value;
13116 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13117 elements = tree_cons (NULL_TREE, value, elements);
13119 while (idx-- > 0 && elements)
13120 elements = TREE_CHAIN (elements);
13121 if (elements)
13122 return TREE_VALUE (elements);
13123 else
13124 return fold_convert (type, integer_zero_node);
13128 /* A bit-field-ref that referenced the full argument can be stripped. */
13129 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13130 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13131 && integer_zerop (op2))
13132 return fold_convert (type, arg0);
13134 return NULL_TREE;
13136 default:
13137 return NULL_TREE;
13138 } /* switch (code) */
13141 /* Perform constant folding and related simplification of EXPR.
13142 The related simplifications include x*1 => x, x*0 => 0, etc.,
13143 and application of the associative law.
13144 NOP_EXPR conversions may be removed freely (as long as we
13145 are careful not to change the type of the overall expression).
13146 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13147 but we can constant-fold them if they have constant operands. */
13149 #ifdef ENABLE_FOLD_CHECKING
13150 # define fold(x) fold_1 (x)
13151 static tree fold_1 (tree);
13152 static
13153 #endif
13154 tree
13155 fold (tree expr)
13157 const tree t = expr;
13158 enum tree_code code = TREE_CODE (t);
13159 enum tree_code_class kind = TREE_CODE_CLASS (code);
13160 tree tem;
13162 /* Return right away if a constant. */
13163 if (kind == tcc_constant)
13164 return t;
13166 /* CALL_EXPR-like objects with variable numbers of operands are
13167 treated specially. */
13168 if (kind == tcc_vl_exp)
13170 if (code == CALL_EXPR)
13172 tem = fold_call_expr (expr, false);
13173 return tem ? tem : expr;
13175 return expr;
13178 if (IS_EXPR_CODE_CLASS (kind))
13180 tree type = TREE_TYPE (t);
13181 tree op0, op1, op2;
13183 switch (TREE_CODE_LENGTH (code))
13185 case 1:
13186 op0 = TREE_OPERAND (t, 0);
13187 tem = fold_unary (code, type, op0);
13188 return tem ? tem : expr;
13189 case 2:
13190 op0 = TREE_OPERAND (t, 0);
13191 op1 = TREE_OPERAND (t, 1);
13192 tem = fold_binary (code, type, op0, op1);
13193 return tem ? tem : expr;
13194 case 3:
13195 op0 = TREE_OPERAND (t, 0);
13196 op1 = TREE_OPERAND (t, 1);
13197 op2 = TREE_OPERAND (t, 2);
13198 tem = fold_ternary (code, type, op0, op1, op2);
13199 return tem ? tem : expr;
13200 default:
13201 break;
13205 switch (code)
13207 case ARRAY_REF:
13209 tree op0 = TREE_OPERAND (t, 0);
13210 tree op1 = TREE_OPERAND (t, 1);
13212 if (TREE_CODE (op1) == INTEGER_CST
13213 && TREE_CODE (op0) == CONSTRUCTOR
13214 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13216 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13217 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13218 unsigned HOST_WIDE_INT begin = 0;
13220 /* Find a matching index by means of a binary search. */
13221 while (begin != end)
13223 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13224 tree index = VEC_index (constructor_elt, elts, middle)->index;
13226 if (TREE_CODE (index) == INTEGER_CST
13227 && tree_int_cst_lt (index, op1))
13228 begin = middle + 1;
13229 else if (TREE_CODE (index) == INTEGER_CST
13230 && tree_int_cst_lt (op1, index))
13231 end = middle;
13232 else if (TREE_CODE (index) == RANGE_EXPR
13233 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13234 begin = middle + 1;
13235 else if (TREE_CODE (index) == RANGE_EXPR
13236 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13237 end = middle;
13238 else
13239 return VEC_index (constructor_elt, elts, middle)->value;
13243 return t;
13246 case CONST_DECL:
13247 return fold (DECL_INITIAL (t));
13249 default:
13250 return t;
13251 } /* switch (code) */
13254 #ifdef ENABLE_FOLD_CHECKING
13255 #undef fold
13257 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13258 static void fold_check_failed (const_tree, const_tree);
13259 void print_fold_checksum (const_tree);
13261 /* When --enable-checking=fold, compute a digest of expr before
13262 and after actual fold call to see if fold did not accidentally
13263 change original expr. */
13265 tree
13266 fold (tree expr)
13268 tree ret;
13269 struct md5_ctx ctx;
13270 unsigned char checksum_before[16], checksum_after[16];
13271 htab_t ht;
13273 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13274 md5_init_ctx (&ctx);
13275 fold_checksum_tree (expr, &ctx, ht);
13276 md5_finish_ctx (&ctx, checksum_before);
13277 htab_empty (ht);
13279 ret = fold_1 (expr);
13281 md5_init_ctx (&ctx);
13282 fold_checksum_tree (expr, &ctx, ht);
13283 md5_finish_ctx (&ctx, checksum_after);
13284 htab_delete (ht);
13286 if (memcmp (checksum_before, checksum_after, 16))
13287 fold_check_failed (expr, ret);
13289 return ret;
13292 void
13293 print_fold_checksum (const_tree expr)
13295 struct md5_ctx ctx;
13296 unsigned char checksum[16], cnt;
13297 htab_t ht;
13299 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13300 md5_init_ctx (&ctx);
13301 fold_checksum_tree (expr, &ctx, ht);
13302 md5_finish_ctx (&ctx, checksum);
13303 htab_delete (ht);
13304 for (cnt = 0; cnt < 16; ++cnt)
13305 fprintf (stderr, "%02x", checksum[cnt]);
13306 putc ('\n', stderr);
13309 static void
13310 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13312 internal_error ("fold check: original tree changed by fold");
13315 static void
13316 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13318 const void **slot;
13319 enum tree_code code;
13320 struct tree_function_decl buf;
13321 int i, len;
13323 recursive_label:
13325 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13326 <= sizeof (struct tree_function_decl))
13327 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13328 if (expr == NULL)
13329 return;
13330 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13331 if (*slot != NULL)
13332 return;
13333 *slot = expr;
13334 code = TREE_CODE (expr);
13335 if (TREE_CODE_CLASS (code) == tcc_declaration
13336 && DECL_ASSEMBLER_NAME_SET_P (expr))
13338 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13339 memcpy ((char *) &buf, expr, tree_size (expr));
13340 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13341 expr = (tree) &buf;
13343 else if (TREE_CODE_CLASS (code) == tcc_type
13344 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13345 || TYPE_CACHED_VALUES_P (expr)
13346 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13348 /* Allow these fields to be modified. */
13349 tree tmp;
13350 memcpy ((char *) &buf, expr, tree_size (expr));
13351 expr = tmp = (tree) &buf;
13352 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13353 TYPE_POINTER_TO (tmp) = NULL;
13354 TYPE_REFERENCE_TO (tmp) = NULL;
13355 if (TYPE_CACHED_VALUES_P (tmp))
13357 TYPE_CACHED_VALUES_P (tmp) = 0;
13358 TYPE_CACHED_VALUES (tmp) = NULL;
13361 md5_process_bytes (expr, tree_size (expr), ctx);
13362 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13363 if (TREE_CODE_CLASS (code) != tcc_type
13364 && TREE_CODE_CLASS (code) != tcc_declaration
13365 && code != TREE_LIST
13366 && code != SSA_NAME)
13367 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13368 switch (TREE_CODE_CLASS (code))
13370 case tcc_constant:
13371 switch (code)
13373 case STRING_CST:
13374 md5_process_bytes (TREE_STRING_POINTER (expr),
13375 TREE_STRING_LENGTH (expr), ctx);
13376 break;
13377 case COMPLEX_CST:
13378 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13379 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13380 break;
13381 case VECTOR_CST:
13382 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13383 break;
13384 default:
13385 break;
13387 break;
13388 case tcc_exceptional:
13389 switch (code)
13391 case TREE_LIST:
13392 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13393 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13394 expr = TREE_CHAIN (expr);
13395 goto recursive_label;
13396 break;
13397 case TREE_VEC:
13398 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13399 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13400 break;
13401 default:
13402 break;
13404 break;
13405 case tcc_expression:
13406 case tcc_reference:
13407 case tcc_comparison:
13408 case tcc_unary:
13409 case tcc_binary:
13410 case tcc_statement:
13411 case tcc_vl_exp:
13412 len = TREE_OPERAND_LENGTH (expr);
13413 for (i = 0; i < len; ++i)
13414 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13415 break;
13416 case tcc_declaration:
13417 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13418 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13419 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13421 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13422 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13423 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13424 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13425 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13427 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13428 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13430 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13432 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13433 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13434 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13436 break;
13437 case tcc_type:
13438 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13439 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13440 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13441 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13442 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13443 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13444 if (INTEGRAL_TYPE_P (expr)
13445 || SCALAR_FLOAT_TYPE_P (expr))
13447 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13448 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13450 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13451 if (TREE_CODE (expr) == RECORD_TYPE
13452 || TREE_CODE (expr) == UNION_TYPE
13453 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13454 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13455 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13456 break;
13457 default:
13458 break;
13462 /* Helper function for outputting the checksum of a tree T. When
13463 debugging with gdb, you can "define mynext" to be "next" followed
13464 by "call debug_fold_checksum (op0)", then just trace down till the
13465 outputs differ. */
13467 void
13468 debug_fold_checksum (const_tree t)
13470 int i;
13471 unsigned char checksum[16];
13472 struct md5_ctx ctx;
13473 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13475 md5_init_ctx (&ctx);
13476 fold_checksum_tree (t, &ctx, ht);
13477 md5_finish_ctx (&ctx, checksum);
13478 htab_empty (ht);
13480 for (i = 0; i < 16; i++)
13481 fprintf (stderr, "%d ", checksum[i]);
13483 fprintf (stderr, "\n");
13486 #endif
13488 /* Fold a unary tree expression with code CODE of type TYPE with an
13489 operand OP0. Return a folded expression if successful. Otherwise,
13490 return a tree expression with code CODE of type TYPE with an
13491 operand OP0. */
13493 tree
13494 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13496 tree tem;
13497 #ifdef ENABLE_FOLD_CHECKING
13498 unsigned char checksum_before[16], checksum_after[16];
13499 struct md5_ctx ctx;
13500 htab_t ht;
13502 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13503 md5_init_ctx (&ctx);
13504 fold_checksum_tree (op0, &ctx, ht);
13505 md5_finish_ctx (&ctx, checksum_before);
13506 htab_empty (ht);
13507 #endif
13509 tem = fold_unary (code, type, op0);
13510 if (!tem)
13511 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13513 #ifdef ENABLE_FOLD_CHECKING
13514 md5_init_ctx (&ctx);
13515 fold_checksum_tree (op0, &ctx, ht);
13516 md5_finish_ctx (&ctx, checksum_after);
13517 htab_delete (ht);
13519 if (memcmp (checksum_before, checksum_after, 16))
13520 fold_check_failed (op0, tem);
13521 #endif
13522 return tem;
13525 /* Fold a binary tree expression with code CODE of type TYPE with
13526 operands OP0 and OP1. Return a folded expression if successful.
13527 Otherwise, return a tree expression with code CODE of type TYPE
13528 with operands OP0 and OP1. */
13530 tree
13531 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13532 MEM_STAT_DECL)
13534 tree tem;
13535 #ifdef ENABLE_FOLD_CHECKING
13536 unsigned char checksum_before_op0[16],
13537 checksum_before_op1[16],
13538 checksum_after_op0[16],
13539 checksum_after_op1[16];
13540 struct md5_ctx ctx;
13541 htab_t ht;
13543 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13544 md5_init_ctx (&ctx);
13545 fold_checksum_tree (op0, &ctx, ht);
13546 md5_finish_ctx (&ctx, checksum_before_op0);
13547 htab_empty (ht);
13549 md5_init_ctx (&ctx);
13550 fold_checksum_tree (op1, &ctx, ht);
13551 md5_finish_ctx (&ctx, checksum_before_op1);
13552 htab_empty (ht);
13553 #endif
13555 tem = fold_binary (code, type, op0, op1);
13556 if (!tem)
13557 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13559 #ifdef ENABLE_FOLD_CHECKING
13560 md5_init_ctx (&ctx);
13561 fold_checksum_tree (op0, &ctx, ht);
13562 md5_finish_ctx (&ctx, checksum_after_op0);
13563 htab_empty (ht);
13565 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13566 fold_check_failed (op0, tem);
13568 md5_init_ctx (&ctx);
13569 fold_checksum_tree (op1, &ctx, ht);
13570 md5_finish_ctx (&ctx, checksum_after_op1);
13571 htab_delete (ht);
13573 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13574 fold_check_failed (op1, tem);
13575 #endif
13576 return tem;
13579 /* Fold a ternary tree expression with code CODE of type TYPE with
13580 operands OP0, OP1, and OP2. Return a folded expression if
13581 successful. Otherwise, return a tree expression with code CODE of
13582 type TYPE with operands OP0, OP1, and OP2. */
13584 tree
13585 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13586 MEM_STAT_DECL)
13588 tree tem;
13589 #ifdef ENABLE_FOLD_CHECKING
13590 unsigned char checksum_before_op0[16],
13591 checksum_before_op1[16],
13592 checksum_before_op2[16],
13593 checksum_after_op0[16],
13594 checksum_after_op1[16],
13595 checksum_after_op2[16];
13596 struct md5_ctx ctx;
13597 htab_t ht;
13599 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13600 md5_init_ctx (&ctx);
13601 fold_checksum_tree (op0, &ctx, ht);
13602 md5_finish_ctx (&ctx, checksum_before_op0);
13603 htab_empty (ht);
13605 md5_init_ctx (&ctx);
13606 fold_checksum_tree (op1, &ctx, ht);
13607 md5_finish_ctx (&ctx, checksum_before_op1);
13608 htab_empty (ht);
13610 md5_init_ctx (&ctx);
13611 fold_checksum_tree (op2, &ctx, ht);
13612 md5_finish_ctx (&ctx, checksum_before_op2);
13613 htab_empty (ht);
13614 #endif
13616 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13617 tem = fold_ternary (code, type, op0, op1, op2);
13618 if (!tem)
13619 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13621 #ifdef ENABLE_FOLD_CHECKING
13622 md5_init_ctx (&ctx);
13623 fold_checksum_tree (op0, &ctx, ht);
13624 md5_finish_ctx (&ctx, checksum_after_op0);
13625 htab_empty (ht);
13627 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13628 fold_check_failed (op0, tem);
13630 md5_init_ctx (&ctx);
13631 fold_checksum_tree (op1, &ctx, ht);
13632 md5_finish_ctx (&ctx, checksum_after_op1);
13633 htab_empty (ht);
13635 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13636 fold_check_failed (op1, tem);
13638 md5_init_ctx (&ctx);
13639 fold_checksum_tree (op2, &ctx, ht);
13640 md5_finish_ctx (&ctx, checksum_after_op2);
13641 htab_delete (ht);
13643 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13644 fold_check_failed (op2, tem);
13645 #endif
13646 return tem;
13649 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13650 arguments in ARGARRAY, and a null static chain.
13651 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13652 of type TYPE from the given operands as constructed by build_call_array. */
13654 tree
13655 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13657 tree tem;
13658 #ifdef ENABLE_FOLD_CHECKING
13659 unsigned char checksum_before_fn[16],
13660 checksum_before_arglist[16],
13661 checksum_after_fn[16],
13662 checksum_after_arglist[16];
13663 struct md5_ctx ctx;
13664 htab_t ht;
13665 int i;
13667 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13668 md5_init_ctx (&ctx);
13669 fold_checksum_tree (fn, &ctx, ht);
13670 md5_finish_ctx (&ctx, checksum_before_fn);
13671 htab_empty (ht);
13673 md5_init_ctx (&ctx);
13674 for (i = 0; i < nargs; i++)
13675 fold_checksum_tree (argarray[i], &ctx, ht);
13676 md5_finish_ctx (&ctx, checksum_before_arglist);
13677 htab_empty (ht);
13678 #endif
13680 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13682 #ifdef ENABLE_FOLD_CHECKING
13683 md5_init_ctx (&ctx);
13684 fold_checksum_tree (fn, &ctx, ht);
13685 md5_finish_ctx (&ctx, checksum_after_fn);
13686 htab_empty (ht);
13688 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13689 fold_check_failed (fn, tem);
13691 md5_init_ctx (&ctx);
13692 for (i = 0; i < nargs; i++)
13693 fold_checksum_tree (argarray[i], &ctx, ht);
13694 md5_finish_ctx (&ctx, checksum_after_arglist);
13695 htab_delete (ht);
13697 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13698 fold_check_failed (NULL_TREE, tem);
13699 #endif
13700 return tem;
13703 /* Perform constant folding and related simplification of initializer
13704 expression EXPR. These behave identically to "fold_buildN" but ignore
13705 potential run-time traps and exceptions that fold must preserve. */
13707 #define START_FOLD_INIT \
13708 int saved_signaling_nans = flag_signaling_nans;\
13709 int saved_trapping_math = flag_trapping_math;\
13710 int saved_rounding_math = flag_rounding_math;\
13711 int saved_trapv = flag_trapv;\
13712 int saved_folding_initializer = folding_initializer;\
13713 flag_signaling_nans = 0;\
13714 flag_trapping_math = 0;\
13715 flag_rounding_math = 0;\
13716 flag_trapv = 0;\
13717 folding_initializer = 1;
13719 #define END_FOLD_INIT \
13720 flag_signaling_nans = saved_signaling_nans;\
13721 flag_trapping_math = saved_trapping_math;\
13722 flag_rounding_math = saved_rounding_math;\
13723 flag_trapv = saved_trapv;\
13724 folding_initializer = saved_folding_initializer;
13726 tree
13727 fold_build1_initializer (enum tree_code code, tree type, tree op)
13729 tree result;
13730 START_FOLD_INIT;
13732 result = fold_build1 (code, type, op);
13734 END_FOLD_INIT;
13735 return result;
13738 tree
13739 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13741 tree result;
13742 START_FOLD_INIT;
13744 result = fold_build2 (code, type, op0, op1);
13746 END_FOLD_INIT;
13747 return result;
13750 tree
13751 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13752 tree op2)
13754 tree result;
13755 START_FOLD_INIT;
13757 result = fold_build3 (code, type, op0, op1, op2);
13759 END_FOLD_INIT;
13760 return result;
13763 tree
13764 fold_build_call_array_initializer (tree type, tree fn,
13765 int nargs, tree *argarray)
13767 tree result;
13768 START_FOLD_INIT;
13770 result = fold_build_call_array (type, fn, nargs, argarray);
13772 END_FOLD_INIT;
13773 return result;
13776 #undef START_FOLD_INIT
13777 #undef END_FOLD_INIT
13779 /* Determine if first argument is a multiple of second argument. Return 0 if
13780 it is not, or we cannot easily determined it to be.
13782 An example of the sort of thing we care about (at this point; this routine
13783 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13784 fold cases do now) is discovering that
13786 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13788 is a multiple of
13790 SAVE_EXPR (J * 8)
13792 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13794 This code also handles discovering that
13796 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13798 is a multiple of 8 so we don't have to worry about dealing with a
13799 possible remainder.
13801 Note that we *look* inside a SAVE_EXPR only to determine how it was
13802 calculated; it is not safe for fold to do much of anything else with the
13803 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13804 at run time. For example, the latter example above *cannot* be implemented
13805 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13806 evaluation time of the original SAVE_EXPR is not necessarily the same at
13807 the time the new expression is evaluated. The only optimization of this
13808 sort that would be valid is changing
13810 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13812 divided by 8 to
13814 SAVE_EXPR (I) * SAVE_EXPR (J)
13816 (where the same SAVE_EXPR (J) is used in the original and the
13817 transformed version). */
13820 multiple_of_p (tree type, const_tree top, const_tree bottom)
13822 if (operand_equal_p (top, bottom, 0))
13823 return 1;
13825 if (TREE_CODE (type) != INTEGER_TYPE)
13826 return 0;
13828 switch (TREE_CODE (top))
13830 case BIT_AND_EXPR:
13831 /* Bitwise and provides a power of two multiple. If the mask is
13832 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13833 if (!integer_pow2p (bottom))
13834 return 0;
13835 /* FALLTHRU */
13837 case MULT_EXPR:
13838 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13839 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13841 case PLUS_EXPR:
13842 case MINUS_EXPR:
13843 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13844 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13846 case LSHIFT_EXPR:
13847 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13849 tree op1, t1;
13851 op1 = TREE_OPERAND (top, 1);
13852 /* const_binop may not detect overflow correctly,
13853 so check for it explicitly here. */
13854 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13855 > TREE_INT_CST_LOW (op1)
13856 && TREE_INT_CST_HIGH (op1) == 0
13857 && 0 != (t1 = fold_convert (type,
13858 const_binop (LSHIFT_EXPR,
13859 size_one_node,
13860 op1, 0)))
13861 && !TREE_OVERFLOW (t1))
13862 return multiple_of_p (type, t1, bottom);
13864 return 0;
13866 case NOP_EXPR:
13867 /* Can't handle conversions from non-integral or wider integral type. */
13868 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13869 || (TYPE_PRECISION (type)
13870 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13871 return 0;
13873 /* .. fall through ... */
13875 case SAVE_EXPR:
13876 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13878 case INTEGER_CST:
13879 if (TREE_CODE (bottom) != INTEGER_CST
13880 || integer_zerop (bottom)
13881 || (TYPE_UNSIGNED (type)
13882 && (tree_int_cst_sgn (top) < 0
13883 || tree_int_cst_sgn (bottom) < 0)))
13884 return 0;
13885 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13886 top, bottom, 0));
13888 default:
13889 return 0;
13893 /* Return true if CODE or TYPE is known to be non-negative. */
13895 static bool
13896 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13898 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13899 && truth_value_p (code))
13900 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13901 have a signed:1 type (where the value is -1 and 0). */
13902 return true;
13903 return false;
13906 /* Return true if (CODE OP0) is known to be non-negative. If the return
13907 value is based on the assumption that signed overflow is undefined,
13908 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13909 *STRICT_OVERFLOW_P. */
13911 bool
13912 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13913 bool *strict_overflow_p)
13915 if (TYPE_UNSIGNED (type))
13916 return true;
13918 switch (code)
13920 case ABS_EXPR:
13921 /* We can't return 1 if flag_wrapv is set because
13922 ABS_EXPR<INT_MIN> = INT_MIN. */
13923 if (!INTEGRAL_TYPE_P (type))
13924 return true;
13925 if (TYPE_OVERFLOW_UNDEFINED (type))
13927 *strict_overflow_p = true;
13928 return true;
13930 break;
13932 case NON_LVALUE_EXPR:
13933 case FLOAT_EXPR:
13934 case FIX_TRUNC_EXPR:
13935 return tree_expr_nonnegative_warnv_p (op0,
13936 strict_overflow_p);
13938 case NOP_EXPR:
13940 tree inner_type = TREE_TYPE (op0);
13941 tree outer_type = type;
13943 if (TREE_CODE (outer_type) == REAL_TYPE)
13945 if (TREE_CODE (inner_type) == REAL_TYPE)
13946 return tree_expr_nonnegative_warnv_p (op0,
13947 strict_overflow_p);
13948 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13950 if (TYPE_UNSIGNED (inner_type))
13951 return true;
13952 return tree_expr_nonnegative_warnv_p (op0,
13953 strict_overflow_p);
13956 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13958 if (TREE_CODE (inner_type) == REAL_TYPE)
13959 return tree_expr_nonnegative_warnv_p (op0,
13960 strict_overflow_p);
13961 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13962 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13963 && TYPE_UNSIGNED (inner_type);
13966 break;
13968 default:
13969 return tree_simple_nonnegative_warnv_p (code, type);
13972 /* We don't know sign of `t', so be conservative and return false. */
13973 return false;
13976 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13977 value is based on the assumption that signed overflow is undefined,
13978 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13979 *STRICT_OVERFLOW_P. */
13981 bool
13982 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13983 tree op1, bool *strict_overflow_p)
13985 if (TYPE_UNSIGNED (type))
13986 return true;
13988 switch (code)
13990 case POINTER_PLUS_EXPR:
13991 case PLUS_EXPR:
13992 if (FLOAT_TYPE_P (type))
13993 return (tree_expr_nonnegative_warnv_p (op0,
13994 strict_overflow_p)
13995 && tree_expr_nonnegative_warnv_p (op1,
13996 strict_overflow_p));
13998 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13999 both unsigned and at least 2 bits shorter than the result. */
14000 if (TREE_CODE (type) == INTEGER_TYPE
14001 && TREE_CODE (op0) == NOP_EXPR
14002 && TREE_CODE (op1) == NOP_EXPR)
14004 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14005 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14006 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14007 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14009 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14010 TYPE_PRECISION (inner2)) + 1;
14011 return prec < TYPE_PRECISION (type);
14014 break;
14016 case MULT_EXPR:
14017 if (FLOAT_TYPE_P (type))
14019 /* x * x for floating point x is always non-negative. */
14020 if (operand_equal_p (op0, op1, 0))
14021 return true;
14022 return (tree_expr_nonnegative_warnv_p (op0,
14023 strict_overflow_p)
14024 && tree_expr_nonnegative_warnv_p (op1,
14025 strict_overflow_p));
14028 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14029 both unsigned and their total bits is shorter than the result. */
14030 if (TREE_CODE (type) == INTEGER_TYPE
14031 && TREE_CODE (op0) == NOP_EXPR
14032 && TREE_CODE (op1) == NOP_EXPR)
14034 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14035 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14036 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14037 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14038 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14039 < TYPE_PRECISION (type);
14041 return false;
14043 case BIT_AND_EXPR:
14044 case MAX_EXPR:
14045 return (tree_expr_nonnegative_warnv_p (op0,
14046 strict_overflow_p)
14047 || tree_expr_nonnegative_warnv_p (op1,
14048 strict_overflow_p));
14050 case BIT_IOR_EXPR:
14051 case BIT_XOR_EXPR:
14052 case MIN_EXPR:
14053 case RDIV_EXPR:
14054 case TRUNC_DIV_EXPR:
14055 case CEIL_DIV_EXPR:
14056 case FLOOR_DIV_EXPR:
14057 case ROUND_DIV_EXPR:
14058 return (tree_expr_nonnegative_warnv_p (op0,
14059 strict_overflow_p)
14060 && tree_expr_nonnegative_warnv_p (op1,
14061 strict_overflow_p));
14063 case TRUNC_MOD_EXPR:
14064 case CEIL_MOD_EXPR:
14065 case FLOOR_MOD_EXPR:
14066 case ROUND_MOD_EXPR:
14067 return tree_expr_nonnegative_warnv_p (op0,
14068 strict_overflow_p);
14069 default:
14070 return tree_simple_nonnegative_warnv_p (code, type);
14073 /* We don't know sign of `t', so be conservative and return false. */
14074 return false;
14077 /* Return true if T is known to be non-negative. If the return
14078 value is based on the assumption that signed overflow is undefined,
14079 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14080 *STRICT_OVERFLOW_P. */
14082 bool
14083 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14085 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14086 return true;
14088 switch (TREE_CODE (t))
14090 case INTEGER_CST:
14091 return tree_int_cst_sgn (t) >= 0;
14093 case REAL_CST:
14094 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14096 case FIXED_CST:
14097 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14099 case COND_EXPR:
14100 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14101 strict_overflow_p)
14102 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14103 strict_overflow_p));
14104 default:
14105 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14106 TREE_TYPE (t));
14108 /* We don't know sign of `t', so be conservative and return false. */
14109 return false;
14112 /* Return true if T is known to be non-negative. If the return
14113 value is based on the assumption that signed overflow is undefined,
14114 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14115 *STRICT_OVERFLOW_P. */
14117 bool
14118 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14119 tree arg0, tree arg1, bool *strict_overflow_p)
14121 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14122 switch (DECL_FUNCTION_CODE (fndecl))
14124 CASE_FLT_FN (BUILT_IN_ACOS):
14125 CASE_FLT_FN (BUILT_IN_ACOSH):
14126 CASE_FLT_FN (BUILT_IN_CABS):
14127 CASE_FLT_FN (BUILT_IN_COSH):
14128 CASE_FLT_FN (BUILT_IN_ERFC):
14129 CASE_FLT_FN (BUILT_IN_EXP):
14130 CASE_FLT_FN (BUILT_IN_EXP10):
14131 CASE_FLT_FN (BUILT_IN_EXP2):
14132 CASE_FLT_FN (BUILT_IN_FABS):
14133 CASE_FLT_FN (BUILT_IN_FDIM):
14134 CASE_FLT_FN (BUILT_IN_HYPOT):
14135 CASE_FLT_FN (BUILT_IN_POW10):
14136 CASE_INT_FN (BUILT_IN_FFS):
14137 CASE_INT_FN (BUILT_IN_PARITY):
14138 CASE_INT_FN (BUILT_IN_POPCOUNT):
14139 case BUILT_IN_BSWAP32:
14140 case BUILT_IN_BSWAP64:
14141 /* Always true. */
14142 return true;
14144 CASE_FLT_FN (BUILT_IN_SQRT):
14145 /* sqrt(-0.0) is -0.0. */
14146 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14147 return true;
14148 return tree_expr_nonnegative_warnv_p (arg0,
14149 strict_overflow_p);
14151 CASE_FLT_FN (BUILT_IN_ASINH):
14152 CASE_FLT_FN (BUILT_IN_ATAN):
14153 CASE_FLT_FN (BUILT_IN_ATANH):
14154 CASE_FLT_FN (BUILT_IN_CBRT):
14155 CASE_FLT_FN (BUILT_IN_CEIL):
14156 CASE_FLT_FN (BUILT_IN_ERF):
14157 CASE_FLT_FN (BUILT_IN_EXPM1):
14158 CASE_FLT_FN (BUILT_IN_FLOOR):
14159 CASE_FLT_FN (BUILT_IN_FMOD):
14160 CASE_FLT_FN (BUILT_IN_FREXP):
14161 CASE_FLT_FN (BUILT_IN_LCEIL):
14162 CASE_FLT_FN (BUILT_IN_LDEXP):
14163 CASE_FLT_FN (BUILT_IN_LFLOOR):
14164 CASE_FLT_FN (BUILT_IN_LLCEIL):
14165 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14166 CASE_FLT_FN (BUILT_IN_LLRINT):
14167 CASE_FLT_FN (BUILT_IN_LLROUND):
14168 CASE_FLT_FN (BUILT_IN_LRINT):
14169 CASE_FLT_FN (BUILT_IN_LROUND):
14170 CASE_FLT_FN (BUILT_IN_MODF):
14171 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14172 CASE_FLT_FN (BUILT_IN_RINT):
14173 CASE_FLT_FN (BUILT_IN_ROUND):
14174 CASE_FLT_FN (BUILT_IN_SCALB):
14175 CASE_FLT_FN (BUILT_IN_SCALBLN):
14176 CASE_FLT_FN (BUILT_IN_SCALBN):
14177 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14178 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14179 CASE_FLT_FN (BUILT_IN_SINH):
14180 CASE_FLT_FN (BUILT_IN_TANH):
14181 CASE_FLT_FN (BUILT_IN_TRUNC):
14182 /* True if the 1st argument is nonnegative. */
14183 return tree_expr_nonnegative_warnv_p (arg0,
14184 strict_overflow_p);
14186 CASE_FLT_FN (BUILT_IN_FMAX):
14187 /* True if the 1st OR 2nd arguments are nonnegative. */
14188 return (tree_expr_nonnegative_warnv_p (arg0,
14189 strict_overflow_p)
14190 || (tree_expr_nonnegative_warnv_p (arg1,
14191 strict_overflow_p)));
14193 CASE_FLT_FN (BUILT_IN_FMIN):
14194 /* True if the 1st AND 2nd arguments are nonnegative. */
14195 return (tree_expr_nonnegative_warnv_p (arg0,
14196 strict_overflow_p)
14197 && (tree_expr_nonnegative_warnv_p (arg1,
14198 strict_overflow_p)));
14200 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14201 /* True if the 2nd argument is nonnegative. */
14202 return tree_expr_nonnegative_warnv_p (arg1,
14203 strict_overflow_p);
14205 CASE_FLT_FN (BUILT_IN_POWI):
14206 /* True if the 1st argument is nonnegative or the second
14207 argument is an even integer. */
14208 if (TREE_CODE (arg1) == INTEGER_CST
14209 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14210 return true;
14211 return tree_expr_nonnegative_warnv_p (arg0,
14212 strict_overflow_p);
14214 CASE_FLT_FN (BUILT_IN_POW):
14215 /* True if the 1st argument is nonnegative or the second
14216 argument is an even integer valued real. */
14217 if (TREE_CODE (arg1) == REAL_CST)
14219 REAL_VALUE_TYPE c;
14220 HOST_WIDE_INT n;
14222 c = TREE_REAL_CST (arg1);
14223 n = real_to_integer (&c);
14224 if ((n & 1) == 0)
14226 REAL_VALUE_TYPE cint;
14227 real_from_integer (&cint, VOIDmode, n,
14228 n < 0 ? -1 : 0, 0);
14229 if (real_identical (&c, &cint))
14230 return true;
14233 return tree_expr_nonnegative_warnv_p (arg0,
14234 strict_overflow_p);
14236 default:
14237 break;
14239 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14240 type);
14243 /* Return true if T is known to be non-negative. If the return
14244 value is based on the assumption that signed overflow is undefined,
14245 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14246 *STRICT_OVERFLOW_P. */
14248 bool
14249 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14251 enum tree_code code = TREE_CODE (t);
14252 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14253 return true;
14255 switch (code)
14257 case TARGET_EXPR:
14259 tree temp = TARGET_EXPR_SLOT (t);
14260 t = TARGET_EXPR_INITIAL (t);
14262 /* If the initializer is non-void, then it's a normal expression
14263 that will be assigned to the slot. */
14264 if (!VOID_TYPE_P (t))
14265 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14267 /* Otherwise, the initializer sets the slot in some way. One common
14268 way is an assignment statement at the end of the initializer. */
14269 while (1)
14271 if (TREE_CODE (t) == BIND_EXPR)
14272 t = expr_last (BIND_EXPR_BODY (t));
14273 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14274 || TREE_CODE (t) == TRY_CATCH_EXPR)
14275 t = expr_last (TREE_OPERAND (t, 0));
14276 else if (TREE_CODE (t) == STATEMENT_LIST)
14277 t = expr_last (t);
14278 else
14279 break;
14281 if (TREE_CODE (t) == MODIFY_EXPR
14282 && TREE_OPERAND (t, 0) == temp)
14283 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14284 strict_overflow_p);
14286 return false;
14289 case CALL_EXPR:
14291 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14292 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14294 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14295 get_callee_fndecl (t),
14296 arg0,
14297 arg1,
14298 strict_overflow_p);
14300 case COMPOUND_EXPR:
14301 case MODIFY_EXPR:
14302 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14303 strict_overflow_p);
14304 case BIND_EXPR:
14305 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14306 strict_overflow_p);
14307 case SAVE_EXPR:
14308 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14309 strict_overflow_p);
14311 default:
14312 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14313 TREE_TYPE (t));
14316 /* We don't know sign of `t', so be conservative and return false. */
14317 return false;
14320 /* Return true if T is known to be non-negative. If the return
14321 value is based on the assumption that signed overflow is undefined,
14322 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14323 *STRICT_OVERFLOW_P. */
14325 bool
14326 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14328 enum tree_code code;
14329 if (t == error_mark_node)
14330 return false;
14332 code = TREE_CODE (t);
14333 switch (TREE_CODE_CLASS (code))
14335 case tcc_binary:
14336 case tcc_comparison:
14337 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14338 TREE_TYPE (t),
14339 TREE_OPERAND (t, 0),
14340 TREE_OPERAND (t, 1),
14341 strict_overflow_p);
14343 case tcc_unary:
14344 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14345 TREE_TYPE (t),
14346 TREE_OPERAND (t, 0),
14347 strict_overflow_p);
14349 case tcc_constant:
14350 case tcc_declaration:
14351 case tcc_reference:
14352 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14354 default:
14355 break;
14358 switch (code)
14360 case TRUTH_AND_EXPR:
14361 case TRUTH_OR_EXPR:
14362 case TRUTH_XOR_EXPR:
14363 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14364 TREE_TYPE (t),
14365 TREE_OPERAND (t, 0),
14366 TREE_OPERAND (t, 1),
14367 strict_overflow_p);
14368 case TRUTH_NOT_EXPR:
14369 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14370 TREE_TYPE (t),
14371 TREE_OPERAND (t, 0),
14372 strict_overflow_p);
14374 case COND_EXPR:
14375 case CONSTRUCTOR:
14376 case OBJ_TYPE_REF:
14377 case ASSERT_EXPR:
14378 case ADDR_EXPR:
14379 case WITH_SIZE_EXPR:
14380 case EXC_PTR_EXPR:
14381 case SSA_NAME:
14382 case FILTER_EXPR:
14383 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14385 default:
14386 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14390 /* Return true if `t' is known to be non-negative. Handle warnings
14391 about undefined signed overflow. */
14393 bool
14394 tree_expr_nonnegative_p (tree t)
14396 bool ret, strict_overflow_p;
14398 strict_overflow_p = false;
14399 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14400 if (strict_overflow_p)
14401 fold_overflow_warning (("assuming signed overflow does not occur when "
14402 "determining that expression is always "
14403 "non-negative"),
14404 WARN_STRICT_OVERFLOW_MISC);
14405 return ret;
14409 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14410 For floating point we further ensure that T is not denormal.
14411 Similar logic is present in nonzero_address in rtlanal.h.
14413 If the return value is based on the assumption that signed overflow
14414 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14415 change *STRICT_OVERFLOW_P. */
14417 bool
14418 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14419 bool *strict_overflow_p)
14421 switch (code)
14423 case ABS_EXPR:
14424 return tree_expr_nonzero_warnv_p (op0,
14425 strict_overflow_p);
14427 case NOP_EXPR:
14429 tree inner_type = TREE_TYPE (op0);
14430 tree outer_type = type;
14432 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14433 && tree_expr_nonzero_warnv_p (op0,
14434 strict_overflow_p));
14436 break;
14438 case NON_LVALUE_EXPR:
14439 return tree_expr_nonzero_warnv_p (op0,
14440 strict_overflow_p);
14442 default:
14443 break;
14446 return false;
14449 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14450 For floating point we further ensure that T is not denormal.
14451 Similar logic is present in nonzero_address in rtlanal.h.
14453 If the return value is based on the assumption that signed overflow
14454 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14455 change *STRICT_OVERFLOW_P. */
14457 bool
14458 tree_binary_nonzero_warnv_p (enum tree_code code,
14459 tree type,
14460 tree op0,
14461 tree op1, bool *strict_overflow_p)
14463 bool sub_strict_overflow_p;
14464 switch (code)
14466 case POINTER_PLUS_EXPR:
14467 case PLUS_EXPR:
14468 if (TYPE_OVERFLOW_UNDEFINED (type))
14470 /* With the presence of negative values it is hard
14471 to say something. */
14472 sub_strict_overflow_p = false;
14473 if (!tree_expr_nonnegative_warnv_p (op0,
14474 &sub_strict_overflow_p)
14475 || !tree_expr_nonnegative_warnv_p (op1,
14476 &sub_strict_overflow_p))
14477 return false;
14478 /* One of operands must be positive and the other non-negative. */
14479 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14480 overflows, on a twos-complement machine the sum of two
14481 nonnegative numbers can never be zero. */
14482 return (tree_expr_nonzero_warnv_p (op0,
14483 strict_overflow_p)
14484 || tree_expr_nonzero_warnv_p (op1,
14485 strict_overflow_p));
14487 break;
14489 case MULT_EXPR:
14490 if (TYPE_OVERFLOW_UNDEFINED (type))
14492 if (tree_expr_nonzero_warnv_p (op0,
14493 strict_overflow_p)
14494 && tree_expr_nonzero_warnv_p (op1,
14495 strict_overflow_p))
14497 *strict_overflow_p = true;
14498 return true;
14501 break;
14503 case MIN_EXPR:
14504 sub_strict_overflow_p = false;
14505 if (tree_expr_nonzero_warnv_p (op0,
14506 &sub_strict_overflow_p)
14507 && tree_expr_nonzero_warnv_p (op1,
14508 &sub_strict_overflow_p))
14510 if (sub_strict_overflow_p)
14511 *strict_overflow_p = true;
14513 break;
14515 case MAX_EXPR:
14516 sub_strict_overflow_p = false;
14517 if (tree_expr_nonzero_warnv_p (op0,
14518 &sub_strict_overflow_p))
14520 if (sub_strict_overflow_p)
14521 *strict_overflow_p = true;
14523 /* When both operands are nonzero, then MAX must be too. */
14524 if (tree_expr_nonzero_warnv_p (op1,
14525 strict_overflow_p))
14526 return true;
14528 /* MAX where operand 0 is positive is positive. */
14529 return tree_expr_nonnegative_warnv_p (op0,
14530 strict_overflow_p);
14532 /* MAX where operand 1 is positive is positive. */
14533 else if (tree_expr_nonzero_warnv_p (op1,
14534 &sub_strict_overflow_p)
14535 && tree_expr_nonnegative_warnv_p (op1,
14536 &sub_strict_overflow_p))
14538 if (sub_strict_overflow_p)
14539 *strict_overflow_p = true;
14540 return true;
14542 break;
14544 case BIT_IOR_EXPR:
14545 return (tree_expr_nonzero_warnv_p (op1,
14546 strict_overflow_p)
14547 || tree_expr_nonzero_warnv_p (op0,
14548 strict_overflow_p));
14550 default:
14551 break;
14554 return false;
14557 /* Return true when T is an address and is known to be nonzero.
14558 For floating point we further ensure that T is not denormal.
14559 Similar logic is present in nonzero_address in rtlanal.h.
14561 If the return value is based on the assumption that signed overflow
14562 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14563 change *STRICT_OVERFLOW_P. */
14565 bool
14566 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14568 bool sub_strict_overflow_p;
14569 switch (TREE_CODE (t))
14571 case INTEGER_CST:
14572 return !integer_zerop (t);
14574 case ADDR_EXPR:
14576 tree base = get_base_address (TREE_OPERAND (t, 0));
14578 if (!base)
14579 return false;
14581 /* Weak declarations may link to NULL. */
14582 if (VAR_OR_FUNCTION_DECL_P (base))
14583 return !DECL_WEAK (base);
14585 /* Constants are never weak. */
14586 if (CONSTANT_CLASS_P (base))
14587 return true;
14589 return false;
14592 case COND_EXPR:
14593 sub_strict_overflow_p = false;
14594 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14595 &sub_strict_overflow_p)
14596 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14597 &sub_strict_overflow_p))
14599 if (sub_strict_overflow_p)
14600 *strict_overflow_p = true;
14601 return true;
14603 break;
14605 default:
14606 break;
14608 return false;
14611 /* Return true when T is an address and is known to be nonzero.
14612 For floating point we further ensure that T is not denormal.
14613 Similar logic is present in nonzero_address in rtlanal.h.
14615 If the return value is based on the assumption that signed overflow
14616 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14617 change *STRICT_OVERFLOW_P. */
14619 bool
14620 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14622 tree type = TREE_TYPE (t);
14623 enum tree_code code;
14625 /* Doing something useful for floating point would need more work. */
14626 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14627 return false;
14629 code = TREE_CODE (t);
14630 switch (TREE_CODE_CLASS (code))
14632 case tcc_unary:
14633 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14634 strict_overflow_p);
14635 case tcc_binary:
14636 case tcc_comparison:
14637 return tree_binary_nonzero_warnv_p (code, type,
14638 TREE_OPERAND (t, 0),
14639 TREE_OPERAND (t, 1),
14640 strict_overflow_p);
14641 case tcc_constant:
14642 case tcc_declaration:
14643 case tcc_reference:
14644 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14646 default:
14647 break;
14650 switch (code)
14652 case TRUTH_NOT_EXPR:
14653 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14654 strict_overflow_p);
14656 case TRUTH_AND_EXPR:
14657 case TRUTH_OR_EXPR:
14658 case TRUTH_XOR_EXPR:
14659 return tree_binary_nonzero_warnv_p (code, type,
14660 TREE_OPERAND (t, 0),
14661 TREE_OPERAND (t, 1),
14662 strict_overflow_p);
14664 case COND_EXPR:
14665 case CONSTRUCTOR:
14666 case OBJ_TYPE_REF:
14667 case ASSERT_EXPR:
14668 case ADDR_EXPR:
14669 case WITH_SIZE_EXPR:
14670 case EXC_PTR_EXPR:
14671 case SSA_NAME:
14672 case FILTER_EXPR:
14673 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14675 case COMPOUND_EXPR:
14676 case MODIFY_EXPR:
14677 case BIND_EXPR:
14678 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14679 strict_overflow_p);
14681 case SAVE_EXPR:
14682 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14683 strict_overflow_p);
14685 case CALL_EXPR:
14686 return alloca_call_p (t);
14688 default:
14689 break;
14691 return false;
14694 /* Return true when T is an address and is known to be nonzero.
14695 Handle warnings about undefined signed overflow. */
14697 bool
14698 tree_expr_nonzero_p (tree t)
14700 bool ret, strict_overflow_p;
14702 strict_overflow_p = false;
14703 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14704 if (strict_overflow_p)
14705 fold_overflow_warning (("assuming signed overflow does not occur when "
14706 "determining that expression is always "
14707 "non-zero"),
14708 WARN_STRICT_OVERFLOW_MISC);
14709 return ret;
14712 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14713 attempt to fold the expression to a constant without modifying TYPE,
14714 OP0 or OP1.
14716 If the expression could be simplified to a constant, then return
14717 the constant. If the expression would not be simplified to a
14718 constant, then return NULL_TREE. */
14720 tree
14721 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14723 tree tem = fold_binary (code, type, op0, op1);
14724 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14727 /* Given the components of a unary expression CODE, TYPE and OP0,
14728 attempt to fold the expression to a constant without modifying
14729 TYPE or OP0.
14731 If the expression could be simplified to a constant, then return
14732 the constant. If the expression would not be simplified to a
14733 constant, then return NULL_TREE. */
14735 tree
14736 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14738 tree tem = fold_unary (code, type, op0);
14739 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14742 /* If EXP represents referencing an element in a constant string
14743 (either via pointer arithmetic or array indexing), return the
14744 tree representing the value accessed, otherwise return NULL. */
14746 tree
14747 fold_read_from_constant_string (tree exp)
14749 if ((TREE_CODE (exp) == INDIRECT_REF
14750 || TREE_CODE (exp) == ARRAY_REF)
14751 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14753 tree exp1 = TREE_OPERAND (exp, 0);
14754 tree index;
14755 tree string;
14757 if (TREE_CODE (exp) == INDIRECT_REF)
14758 string = string_constant (exp1, &index);
14759 else
14761 tree low_bound = array_ref_low_bound (exp);
14762 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14764 /* Optimize the special-case of a zero lower bound.
14766 We convert the low_bound to sizetype to avoid some problems
14767 with constant folding. (E.g. suppose the lower bound is 1,
14768 and its mode is QI. Without the conversion,l (ARRAY
14769 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14770 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14771 if (! integer_zerop (low_bound))
14772 index = size_diffop (index, fold_convert (sizetype, low_bound));
14774 string = exp1;
14777 if (string
14778 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14779 && TREE_CODE (string) == STRING_CST
14780 && TREE_CODE (index) == INTEGER_CST
14781 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14782 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14783 == MODE_INT)
14784 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14785 return build_int_cst_type (TREE_TYPE (exp),
14786 (TREE_STRING_POINTER (string)
14787 [TREE_INT_CST_LOW (index)]));
14789 return NULL;
14792 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14793 an integer constant, real, or fixed-point constant.
14795 TYPE is the type of the result. */
14797 static tree
14798 fold_negate_const (tree arg0, tree type)
14800 tree t = NULL_TREE;
14802 switch (TREE_CODE (arg0))
14804 case INTEGER_CST:
14806 unsigned HOST_WIDE_INT low;
14807 HOST_WIDE_INT high;
14808 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14809 TREE_INT_CST_HIGH (arg0),
14810 &low, &high);
14811 t = force_fit_type_double (type, low, high, 1,
14812 (overflow | TREE_OVERFLOW (arg0))
14813 && !TYPE_UNSIGNED (type));
14814 break;
14817 case REAL_CST:
14818 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14819 break;
14821 case FIXED_CST:
14823 FIXED_VALUE_TYPE f;
14824 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14825 &(TREE_FIXED_CST (arg0)), NULL,
14826 TYPE_SATURATING (type));
14827 t = build_fixed (type, f);
14828 /* Propagate overflow flags. */
14829 if (overflow_p | TREE_OVERFLOW (arg0))
14831 TREE_OVERFLOW (t) = 1;
14832 TREE_CONSTANT_OVERFLOW (t) = 1;
14834 else if (TREE_CONSTANT_OVERFLOW (arg0))
14835 TREE_CONSTANT_OVERFLOW (t) = 1;
14836 break;
14839 default:
14840 gcc_unreachable ();
14843 return t;
14846 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14847 an integer constant or real constant.
14849 TYPE is the type of the result. */
14851 tree
14852 fold_abs_const (tree arg0, tree type)
14854 tree t = NULL_TREE;
14856 switch (TREE_CODE (arg0))
14858 case INTEGER_CST:
14859 /* If the value is unsigned, then the absolute value is
14860 the same as the ordinary value. */
14861 if (TYPE_UNSIGNED (type))
14862 t = arg0;
14863 /* Similarly, if the value is non-negative. */
14864 else if (INT_CST_LT (integer_minus_one_node, arg0))
14865 t = arg0;
14866 /* If the value is negative, then the absolute value is
14867 its negation. */
14868 else
14870 unsigned HOST_WIDE_INT low;
14871 HOST_WIDE_INT high;
14872 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14873 TREE_INT_CST_HIGH (arg0),
14874 &low, &high);
14875 t = force_fit_type_double (type, low, high, -1,
14876 overflow | TREE_OVERFLOW (arg0));
14878 break;
14880 case REAL_CST:
14881 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14882 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14883 else
14884 t = arg0;
14885 break;
14887 default:
14888 gcc_unreachable ();
14891 return t;
14894 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14895 constant. TYPE is the type of the result. */
14897 static tree
14898 fold_not_const (tree arg0, tree type)
14900 tree t = NULL_TREE;
14902 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14904 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14905 ~TREE_INT_CST_HIGH (arg0), 0,
14906 TREE_OVERFLOW (arg0));
14908 return t;
14911 /* Given CODE, a relational operator, the target type, TYPE and two
14912 constant operands OP0 and OP1, return the result of the
14913 relational operation. If the result is not a compile time
14914 constant, then return NULL_TREE. */
14916 static tree
14917 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14919 int result, invert;
14921 /* From here on, the only cases we handle are when the result is
14922 known to be a constant. */
14924 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14926 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14927 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14929 /* Handle the cases where either operand is a NaN. */
14930 if (real_isnan (c0) || real_isnan (c1))
14932 switch (code)
14934 case EQ_EXPR:
14935 case ORDERED_EXPR:
14936 result = 0;
14937 break;
14939 case NE_EXPR:
14940 case UNORDERED_EXPR:
14941 case UNLT_EXPR:
14942 case UNLE_EXPR:
14943 case UNGT_EXPR:
14944 case UNGE_EXPR:
14945 case UNEQ_EXPR:
14946 result = 1;
14947 break;
14949 case LT_EXPR:
14950 case LE_EXPR:
14951 case GT_EXPR:
14952 case GE_EXPR:
14953 case LTGT_EXPR:
14954 if (flag_trapping_math)
14955 return NULL_TREE;
14956 result = 0;
14957 break;
14959 default:
14960 gcc_unreachable ();
14963 return constant_boolean_node (result, type);
14966 return constant_boolean_node (real_compare (code, c0, c1), type);
14969 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14971 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14972 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14973 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14976 /* Handle equality/inequality of complex constants. */
14977 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14979 tree rcond = fold_relational_const (code, type,
14980 TREE_REALPART (op0),
14981 TREE_REALPART (op1));
14982 tree icond = fold_relational_const (code, type,
14983 TREE_IMAGPART (op0),
14984 TREE_IMAGPART (op1));
14985 if (code == EQ_EXPR)
14986 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14987 else if (code == NE_EXPR)
14988 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14989 else
14990 return NULL_TREE;
14993 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14995 To compute GT, swap the arguments and do LT.
14996 To compute GE, do LT and invert the result.
14997 To compute LE, swap the arguments, do LT and invert the result.
14998 To compute NE, do EQ and invert the result.
15000 Therefore, the code below must handle only EQ and LT. */
15002 if (code == LE_EXPR || code == GT_EXPR)
15004 tree tem = op0;
15005 op0 = op1;
15006 op1 = tem;
15007 code = swap_tree_comparison (code);
15010 /* Note that it is safe to invert for real values here because we
15011 have already handled the one case that it matters. */
15013 invert = 0;
15014 if (code == NE_EXPR || code == GE_EXPR)
15016 invert = 1;
15017 code = invert_tree_comparison (code, false);
15020 /* Compute a result for LT or EQ if args permit;
15021 Otherwise return T. */
15022 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15024 if (code == EQ_EXPR)
15025 result = tree_int_cst_equal (op0, op1);
15026 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15027 result = INT_CST_LT_UNSIGNED (op0, op1);
15028 else
15029 result = INT_CST_LT (op0, op1);
15031 else
15032 return NULL_TREE;
15034 if (invert)
15035 result ^= 1;
15036 return constant_boolean_node (result, type);
15039 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15040 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15041 itself. */
15043 tree
15044 fold_build_cleanup_point_expr (tree type, tree expr)
15046 /* If the expression does not have side effects then we don't have to wrap
15047 it with a cleanup point expression. */
15048 if (!TREE_SIDE_EFFECTS (expr))
15049 return expr;
15051 /* If the expression is a return, check to see if the expression inside the
15052 return has no side effects or the right hand side of the modify expression
15053 inside the return. If either don't have side effects set we don't need to
15054 wrap the expression in a cleanup point expression. Note we don't check the
15055 left hand side of the modify because it should always be a return decl. */
15056 if (TREE_CODE (expr) == RETURN_EXPR)
15058 tree op = TREE_OPERAND (expr, 0);
15059 if (!op || !TREE_SIDE_EFFECTS (op))
15060 return expr;
15061 op = TREE_OPERAND (op, 1);
15062 if (!TREE_SIDE_EFFECTS (op))
15063 return expr;
15066 return build1 (CLEANUP_POINT_EXPR, type, expr);
15069 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15070 of an indirection through OP0, or NULL_TREE if no simplification is
15071 possible. */
15073 tree
15074 fold_indirect_ref_1 (tree type, tree op0)
15076 tree sub = op0;
15077 tree subtype;
15079 STRIP_NOPS (sub);
15080 subtype = TREE_TYPE (sub);
15081 if (!POINTER_TYPE_P (subtype))
15082 return NULL_TREE;
15084 if (TREE_CODE (sub) == ADDR_EXPR)
15086 tree op = TREE_OPERAND (sub, 0);
15087 tree optype = TREE_TYPE (op);
15088 /* *&CONST_DECL -> to the value of the const decl. */
15089 if (TREE_CODE (op) == CONST_DECL)
15090 return DECL_INITIAL (op);
15091 /* *&p => p; make sure to handle *&"str"[cst] here. */
15092 if (type == optype)
15094 tree fop = fold_read_from_constant_string (op);
15095 if (fop)
15096 return fop;
15097 else
15098 return op;
15100 /* *(foo *)&fooarray => fooarray[0] */
15101 else if (TREE_CODE (optype) == ARRAY_TYPE
15102 && type == TREE_TYPE (optype))
15104 tree type_domain = TYPE_DOMAIN (optype);
15105 tree min_val = size_zero_node;
15106 if (type_domain && TYPE_MIN_VALUE (type_domain))
15107 min_val = TYPE_MIN_VALUE (type_domain);
15108 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15110 /* *(foo *)&complexfoo => __real__ complexfoo */
15111 else if (TREE_CODE (optype) == COMPLEX_TYPE
15112 && type == TREE_TYPE (optype))
15113 return fold_build1 (REALPART_EXPR, type, op);
15114 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15115 else if (TREE_CODE (optype) == VECTOR_TYPE
15116 && type == TREE_TYPE (optype))
15118 tree part_width = TYPE_SIZE (type);
15119 tree index = bitsize_int (0);
15120 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15124 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15125 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15126 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15128 tree op00 = TREE_OPERAND (sub, 0);
15129 tree op01 = TREE_OPERAND (sub, 1);
15130 tree op00type;
15132 STRIP_NOPS (op00);
15133 op00type = TREE_TYPE (op00);
15134 if (TREE_CODE (op00) == ADDR_EXPR
15135 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15136 && type == TREE_TYPE (TREE_TYPE (op00type)))
15138 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15139 tree part_width = TYPE_SIZE (type);
15140 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15141 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15142 tree index = bitsize_int (indexi);
15144 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15145 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15146 part_width, index);
15152 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15153 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15154 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15156 tree op00 = TREE_OPERAND (sub, 0);
15157 tree op01 = TREE_OPERAND (sub, 1);
15158 tree op00type;
15160 STRIP_NOPS (op00);
15161 op00type = TREE_TYPE (op00);
15162 if (TREE_CODE (op00) == ADDR_EXPR
15163 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15164 && type == TREE_TYPE (TREE_TYPE (op00type)))
15166 tree size = TYPE_SIZE_UNIT (type);
15167 if (tree_int_cst_equal (size, op01))
15168 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15172 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15173 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15174 && type == TREE_TYPE (TREE_TYPE (subtype)))
15176 tree type_domain;
15177 tree min_val = size_zero_node;
15178 sub = build_fold_indirect_ref (sub);
15179 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15180 if (type_domain && TYPE_MIN_VALUE (type_domain))
15181 min_val = TYPE_MIN_VALUE (type_domain);
15182 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15185 return NULL_TREE;
15188 /* Builds an expression for an indirection through T, simplifying some
15189 cases. */
15191 tree
15192 build_fold_indirect_ref (tree t)
15194 tree type = TREE_TYPE (TREE_TYPE (t));
15195 tree sub = fold_indirect_ref_1 (type, t);
15197 if (sub)
15198 return sub;
15199 else
15200 return build1 (INDIRECT_REF, type, t);
15203 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15205 tree
15206 fold_indirect_ref (tree t)
15208 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15210 if (sub)
15211 return sub;
15212 else
15213 return t;
15216 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15217 whose result is ignored. The type of the returned tree need not be
15218 the same as the original expression. */
15220 tree
15221 fold_ignored_result (tree t)
15223 if (!TREE_SIDE_EFFECTS (t))
15224 return integer_zero_node;
15226 for (;;)
15227 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15229 case tcc_unary:
15230 t = TREE_OPERAND (t, 0);
15231 break;
15233 case tcc_binary:
15234 case tcc_comparison:
15235 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15236 t = TREE_OPERAND (t, 0);
15237 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15238 t = TREE_OPERAND (t, 1);
15239 else
15240 return t;
15241 break;
15243 case tcc_expression:
15244 switch (TREE_CODE (t))
15246 case COMPOUND_EXPR:
15247 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15248 return t;
15249 t = TREE_OPERAND (t, 0);
15250 break;
15252 case COND_EXPR:
15253 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15254 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15255 return t;
15256 t = TREE_OPERAND (t, 0);
15257 break;
15259 default:
15260 return t;
15262 break;
15264 default:
15265 return t;
15269 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15270 This can only be applied to objects of a sizetype. */
15272 tree
15273 round_up (tree value, int divisor)
15275 tree div = NULL_TREE;
15277 gcc_assert (divisor > 0);
15278 if (divisor == 1)
15279 return value;
15281 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15282 have to do anything. Only do this when we are not given a const,
15283 because in that case, this check is more expensive than just
15284 doing it. */
15285 if (TREE_CODE (value) != INTEGER_CST)
15287 div = build_int_cst (TREE_TYPE (value), divisor);
15289 if (multiple_of_p (TREE_TYPE (value), value, div))
15290 return value;
15293 /* If divisor is a power of two, simplify this to bit manipulation. */
15294 if (divisor == (divisor & -divisor))
15296 if (TREE_CODE (value) == INTEGER_CST)
15298 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15299 unsigned HOST_WIDE_INT high;
15300 bool overflow_p;
15302 if ((low & (divisor - 1)) == 0)
15303 return value;
15305 overflow_p = TREE_OVERFLOW (value);
15306 high = TREE_INT_CST_HIGH (value);
15307 low &= ~(divisor - 1);
15308 low += divisor;
15309 if (low == 0)
15311 high++;
15312 if (high == 0)
15313 overflow_p = true;
15316 return force_fit_type_double (TREE_TYPE (value), low, high,
15317 -1, overflow_p);
15319 else
15321 tree t;
15323 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15324 value = size_binop (PLUS_EXPR, value, t);
15325 t = build_int_cst (TREE_TYPE (value), -divisor);
15326 value = size_binop (BIT_AND_EXPR, value, t);
15329 else
15331 if (!div)
15332 div = build_int_cst (TREE_TYPE (value), divisor);
15333 value = size_binop (CEIL_DIV_EXPR, value, div);
15334 value = size_binop (MULT_EXPR, value, div);
15337 return value;
15340 /* Likewise, but round down. */
15342 tree
15343 round_down (tree value, int divisor)
15345 tree div = NULL_TREE;
15347 gcc_assert (divisor > 0);
15348 if (divisor == 1)
15349 return value;
15351 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15352 have to do anything. Only do this when we are not given a const,
15353 because in that case, this check is more expensive than just
15354 doing it. */
15355 if (TREE_CODE (value) != INTEGER_CST)
15357 div = build_int_cst (TREE_TYPE (value), divisor);
15359 if (multiple_of_p (TREE_TYPE (value), value, div))
15360 return value;
15363 /* If divisor is a power of two, simplify this to bit manipulation. */
15364 if (divisor == (divisor & -divisor))
15366 tree t;
15368 t = build_int_cst (TREE_TYPE (value), -divisor);
15369 value = size_binop (BIT_AND_EXPR, value, t);
15371 else
15373 if (!div)
15374 div = build_int_cst (TREE_TYPE (value), divisor);
15375 value = size_binop (FLOOR_DIV_EXPR, value, div);
15376 value = size_binop (MULT_EXPR, value, div);
15379 return value;
15382 /* Returns the pointer to the base of the object addressed by EXP and
15383 extracts the information about the offset of the access, storing it
15384 to PBITPOS and POFFSET. */
15386 static tree
15387 split_address_to_core_and_offset (tree exp,
15388 HOST_WIDE_INT *pbitpos, tree *poffset)
15390 tree core;
15391 enum machine_mode mode;
15392 int unsignedp, volatilep;
15393 HOST_WIDE_INT bitsize;
15395 if (TREE_CODE (exp) == ADDR_EXPR)
15397 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15398 poffset, &mode, &unsignedp, &volatilep,
15399 false);
15400 core = fold_addr_expr (core);
15402 else
15404 core = exp;
15405 *pbitpos = 0;
15406 *poffset = NULL_TREE;
15409 return core;
15412 /* Returns true if addresses of E1 and E2 differ by a constant, false
15413 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15415 bool
15416 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15418 tree core1, core2;
15419 HOST_WIDE_INT bitpos1, bitpos2;
15420 tree toffset1, toffset2, tdiff, type;
15422 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15423 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15425 if (bitpos1 % BITS_PER_UNIT != 0
15426 || bitpos2 % BITS_PER_UNIT != 0
15427 || !operand_equal_p (core1, core2, 0))
15428 return false;
15430 if (toffset1 && toffset2)
15432 type = TREE_TYPE (toffset1);
15433 if (type != TREE_TYPE (toffset2))
15434 toffset2 = fold_convert (type, toffset2);
15436 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15437 if (!cst_and_fits_in_hwi (tdiff))
15438 return false;
15440 *diff = int_cst_value (tdiff);
15442 else if (toffset1 || toffset2)
15444 /* If only one of the offsets is non-constant, the difference cannot
15445 be a constant. */
15446 return false;
15448 else
15449 *diff = 0;
15451 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15452 return true;
15455 /* Simplify the floating point expression EXP when the sign of the
15456 result is not significant. Return NULL_TREE if no simplification
15457 is possible. */
15459 tree
15460 fold_strip_sign_ops (tree exp)
15462 tree arg0, arg1;
15464 switch (TREE_CODE (exp))
15466 case ABS_EXPR:
15467 case NEGATE_EXPR:
15468 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15469 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15471 case MULT_EXPR:
15472 case RDIV_EXPR:
15473 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15474 return NULL_TREE;
15475 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15476 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15477 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15478 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15479 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15480 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15481 break;
15483 case COMPOUND_EXPR:
15484 arg0 = TREE_OPERAND (exp, 0);
15485 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15486 if (arg1)
15487 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15488 break;
15490 case COND_EXPR:
15491 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15492 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15493 if (arg0 || arg1)
15494 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15495 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15496 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15497 break;
15499 case CALL_EXPR:
15501 const enum built_in_function fcode = builtin_mathfn_code (exp);
15502 switch (fcode)
15504 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15505 /* Strip copysign function call, return the 1st argument. */
15506 arg0 = CALL_EXPR_ARG (exp, 0);
15507 arg1 = CALL_EXPR_ARG (exp, 1);
15508 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15510 default:
15511 /* Strip sign ops from the argument of "odd" math functions. */
15512 if (negate_mathfn_p (fcode))
15514 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15515 if (arg0)
15516 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15518 break;
15521 break;
15523 default:
15524 break;
15526 return NULL_TREE;