Mark ChangeLog
[official-gcc.git] / gcc / fold-const.c
blob56897ab4ae87f32da2976dc5ead2391aa4a976cb
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type takes a constant, an overflowable flag and prior
44 overflow indicators. It forces the value to fit the type and sets
45 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51 #include "flags.h"
52 #include "tree.h"
53 #include "real.h"
54 #include "rtl.h"
55 #include "expr.h"
56 #include "tm_p.h"
57 #include "toplev.h"
58 #include "intl.h"
59 #include "ggc.h"
60 #include "hashtab.h"
61 #include "langhooks.h"
62 #include "md5.h"
64 /* Non-zero if we are folding constants inside an initializer; zero
65 otherwise. */
66 int folding_initializer = 0;
68 /* The following constants represent a bit based encoding of GCC's
69 comparison operators. This encoding simplifies transformations
70 on relational comparison operators, such as AND and OR. */
71 enum comparison_code {
72 COMPCODE_FALSE = 0,
73 COMPCODE_LT = 1,
74 COMPCODE_EQ = 2,
75 COMPCODE_LE = 3,
76 COMPCODE_GT = 4,
77 COMPCODE_LTGT = 5,
78 COMPCODE_GE = 6,
79 COMPCODE_ORD = 7,
80 COMPCODE_UNORD = 8,
81 COMPCODE_UNLT = 9,
82 COMPCODE_UNEQ = 10,
83 COMPCODE_UNLE = 11,
84 COMPCODE_UNGT = 12,
85 COMPCODE_NE = 13,
86 COMPCODE_UNGE = 14,
87 COMPCODE_TRUE = 15
90 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
91 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
92 static bool negate_mathfn_p (enum built_in_function);
93 static bool negate_expr_p (tree);
94 static tree negate_expr (tree);
95 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96 static tree associate_trees (tree, tree, enum tree_code, tree);
97 static tree const_binop (enum tree_code, tree, tree, int);
98 static enum comparison_code comparison_to_compcode (enum tree_code);
99 static enum tree_code compcode_to_comparison (enum comparison_code);
100 static tree combine_comparisons (enum tree_code, enum tree_code,
101 enum tree_code, tree, tree, tree);
102 static int truth_value_p (enum tree_code);
103 static int operand_equal_for_comparison_p (tree, tree, tree);
104 static int twoval_comparison_p (tree, tree *, tree *, int *);
105 static tree eval_subst (tree, tree, tree, tree, tree);
106 static tree pedantic_omit_one_operand (tree, tree, tree);
107 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
108 static tree make_bit_field_ref (tree, tree, int, int, int);
109 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
110 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
112 tree *, tree *);
113 static int all_ones_mask_p (tree, int);
114 static tree sign_bit_p (tree, tree);
115 static int simple_operand_p (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree make_range (tree, int *, tree *, tree *, bool *);
120 static tree build_range_check (tree, tree, int, tree, tree);
121 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
122 tree);
123 static tree fold_range_test (enum tree_code, tree, tree, tree);
124 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
125 static tree unextend (tree, int, int, tree);
126 static tree fold_truthop (enum tree_code, tree, tree, tree);
127 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static int multiple_of_p (tree, tree, tree);
131 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static bool fold_real_zero_addition_p (tree, tree, int);
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136 tree, tree, tree);
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (tree, tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static int native_encode_expr (tree, unsigned char *, int);
144 static tree native_interpret_expr (tree, unsigned char *, int);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
150 addition.
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 sign. */
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
162 #define LOWPART(x) \
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
172 static void
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185 static void
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 HOST_WIDE_INT *hi)
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
194 in overflow of the value, when >0 we are only interested in signed
195 overflow, for <0 we are interested in any overflow. OVERFLOWED
196 indicates whether overflow has already occurred. CONST_OVERFLOWED
197 indicates whether constant overflow has already occurred. We force
198 T's value to be within range of T's type (by setting to 0 or 1 all
199 the bits outside the type's range). We set TREE_OVERFLOWED if,
200 OVERFLOWED is nonzero,
201 or OVERFLOWABLE is >0 and signed overflow occurs
202 or OVERFLOWABLE is <0 and any overflow occurs
203 We set TREE_CONSTANT_OVERFLOWED if,
204 CONST_OVERFLOWED is nonzero
205 or we set TREE_OVERFLOWED.
206 We return either the original T, or a copy. */
208 tree
209 force_fit_type (tree t, int overflowable,
210 bool overflowed, bool overflowed_const)
212 unsigned HOST_WIDE_INT low;
213 HOST_WIDE_INT high;
214 unsigned int prec;
215 int sign_extended_type;
217 gcc_assert (TREE_CODE (t) == INTEGER_CST);
219 low = TREE_INT_CST_LOW (t);
220 high = TREE_INT_CST_HIGH (t);
222 if (POINTER_TYPE_P (TREE_TYPE (t))
223 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = POINTER_SIZE;
225 else
226 prec = TYPE_PRECISION (TREE_TYPE (t));
227 /* Size types *are* sign extended. */
228 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
229 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
232 /* First clear all bits that are beyond the type's precision. */
234 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
236 else if (prec > HOST_BITS_PER_WIDE_INT)
237 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238 else
240 high = 0;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 low &= ~((HOST_WIDE_INT) (-1) << prec);
245 if (!sign_extended_type)
246 /* No sign extension */;
247 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
248 /* Correct width already. */;
249 else if (prec > HOST_BITS_PER_WIDE_INT)
251 /* Sign extend top half? */
252 if (high & ((unsigned HOST_WIDE_INT)1
253 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
254 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
256 else if (prec == HOST_BITS_PER_WIDE_INT)
258 if ((HOST_WIDE_INT)low < 0)
259 high = -1;
261 else
263 /* Sign extend bottom half? */
264 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
266 high = -1;
267 low |= (HOST_WIDE_INT)(-1) << prec;
271 /* If the value changed, return a new node. */
272 if (overflowed || overflowed_const
273 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
275 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 if (overflowed
278 || overflowable < 0
279 || (overflowable > 0 && sign_extended_type))
281 t = copy_node (t);
282 TREE_OVERFLOW (t) = 1;
283 TREE_CONSTANT_OVERFLOW (t) = 1;
285 else if (overflowed_const)
287 t = copy_node (t);
288 TREE_CONSTANT_OVERFLOW (t) = 1;
292 return t;
295 /* Add two doubleword integers with doubleword result.
296 Return nonzero if the operation overflows according to UNSIGNED_P.
297 Each argument is given as two `HOST_WIDE_INT' pieces.
298 One argument is L1 and H1; the other, L2 and H2.
299 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
302 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
303 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
304 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305 bool unsigned_p)
307 unsigned HOST_WIDE_INT l;
308 HOST_WIDE_INT h;
310 l = l1 + l2;
311 h = h1 + h2 + (l < l1);
313 *lv = l;
314 *hv = h;
316 if (unsigned_p)
317 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
318 else
319 return OVERFLOW_SUM_SIGN (h1, h2, h);
322 /* Negate a doubleword integer with doubleword result.
323 Return nonzero if the operation overflows, assuming it's signed.
324 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
325 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
328 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
329 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
331 if (l1 == 0)
333 *lv = 0;
334 *hv = - h1;
335 return (*hv & h1) < 0;
337 else
339 *lv = -l1;
340 *hv = ~h1;
341 return 0;
345 /* Multiply two doubleword integers with doubleword result.
346 Return nonzero if the operation overflows according to UNSIGNED_P.
347 Each argument is given as two `HOST_WIDE_INT' pieces.
348 One argument is L1 and H1; the other, L2 and H2.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
352 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
354 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355 bool unsigned_p)
357 HOST_WIDE_INT arg1[4];
358 HOST_WIDE_INT arg2[4];
359 HOST_WIDE_INT prod[4 * 2];
360 unsigned HOST_WIDE_INT carry;
361 int i, j, k;
362 unsigned HOST_WIDE_INT toplow, neglow;
363 HOST_WIDE_INT tophigh, neghigh;
365 encode (arg1, l1, h1);
366 encode (arg2, l2, h2);
368 memset (prod, 0, sizeof prod);
370 for (i = 0; i < 4; i++)
372 carry = 0;
373 for (j = 0; j < 4; j++)
375 k = i + j;
376 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
377 carry += arg1[i] * arg2[j];
378 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
379 carry += prod[k];
380 prod[k] = LOWPART (carry);
381 carry = HIGHPART (carry);
383 prod[i + 4] = carry;
386 decode (prod, lv, hv);
387 decode (prod + 4, &toplow, &tophigh);
389 /* Unsigned overflow is immediate. */
390 if (unsigned_p)
391 return (toplow | tophigh) != 0;
393 /* Check for signed overflow by calculating the signed representation of the
394 top half of the result; it should agree with the low half's sign bit. */
395 if (h1 < 0)
397 neg_double (l2, h2, &neglow, &neghigh);
398 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
400 if (h2 < 0)
402 neg_double (l1, h1, &neglow, &neghigh);
403 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
405 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
408 /* Shift the doubleword integer in L1, H1 left by COUNT places
409 keeping only PREC bits of result.
410 Shift right if COUNT is negative.
411 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
412 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
414 void
415 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
416 HOST_WIDE_INT count, unsigned int prec,
417 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
419 unsigned HOST_WIDE_INT signmask;
421 if (count < 0)
423 rshift_double (l1, h1, -count, prec, lv, hv, arith);
424 return;
427 if (SHIFT_COUNT_TRUNCATED)
428 count %= prec;
430 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
432 /* Shifting by the host word size is undefined according to the
433 ANSI standard, so we must handle this as a special case. */
434 *hv = 0;
435 *lv = 0;
437 else if (count >= HOST_BITS_PER_WIDE_INT)
439 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
440 *lv = 0;
442 else
444 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
445 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
446 *lv = l1 << count;
449 /* Sign extend all bits that are beyond the precision. */
451 signmask = -((prec > HOST_BITS_PER_WIDE_INT
452 ? ((unsigned HOST_WIDE_INT) *hv
453 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
454 : (*lv >> (prec - 1))) & 1);
456 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
458 else if (prec >= HOST_BITS_PER_WIDE_INT)
460 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
461 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
463 else
465 *hv = signmask;
466 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
467 *lv |= signmask << prec;
471 /* Shift the doubleword integer in L1, H1 right by COUNT places
472 keeping only PREC bits of result. COUNT must be positive.
473 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
474 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
476 void
477 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
478 HOST_WIDE_INT count, unsigned int prec,
479 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480 int arith)
482 unsigned HOST_WIDE_INT signmask;
484 signmask = (arith
485 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486 : 0);
488 if (SHIFT_COUNT_TRUNCATED)
489 count %= prec;
491 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
493 /* Shifting by the host word size is undefined according to the
494 ANSI standard, so we must handle this as a special case. */
495 *hv = 0;
496 *lv = 0;
498 else if (count >= HOST_BITS_PER_WIDE_INT)
500 *hv = 0;
501 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
503 else
505 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
506 *lv = ((l1 >> count)
507 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
510 /* Zero / sign extend all bits that are beyond the precision. */
512 if (count >= (HOST_WIDE_INT)prec)
514 *hv = signmask;
515 *lv = signmask;
517 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
519 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
521 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
522 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
524 else
526 *hv = signmask;
527 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
528 *lv |= signmask << (prec - count);
532 /* Rotate the doubleword integer in L1, H1 left by COUNT places
533 keeping only PREC bits of result.
534 Rotate right if COUNT is negative.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
537 void
538 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
545 count %= prec;
546 if (count < 0)
547 count += prec;
549 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result. COUNT must be positive.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
559 void
560 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
567 count %= prec;
568 if (count < 0)
569 count += prec;
571 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
577 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
578 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
579 CODE is a tree code for a kind of division, one of
580 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
581 or EXACT_DIV_EXPR
582 It controls how the quotient is rounded to an integer.
583 Return nonzero if the operation overflows.
584 UNS nonzero says do unsigned division. */
587 div_and_round_double (enum tree_code code, int uns,
588 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
589 HOST_WIDE_INT hnum_orig,
590 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
591 HOST_WIDE_INT hden_orig,
592 unsigned HOST_WIDE_INT *lquo,
593 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
594 HOST_WIDE_INT *hrem)
596 int quo_neg = 0;
597 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
598 HOST_WIDE_INT den[4], quo[4];
599 int i, j;
600 unsigned HOST_WIDE_INT work;
601 unsigned HOST_WIDE_INT carry = 0;
602 unsigned HOST_WIDE_INT lnum = lnum_orig;
603 HOST_WIDE_INT hnum = hnum_orig;
604 unsigned HOST_WIDE_INT lden = lden_orig;
605 HOST_WIDE_INT hden = hden_orig;
606 int overflow = 0;
608 if (hden == 0 && lden == 0)
609 overflow = 1, lden = 1;
611 /* Calculate quotient sign and convert operands to unsigned. */
612 if (!uns)
614 if (hnum < 0)
616 quo_neg = ~ quo_neg;
617 /* (minimum integer) / (-1) is the only overflow case. */
618 if (neg_double (lnum, hnum, &lnum, &hnum)
619 && ((HOST_WIDE_INT) lden & hden) == -1)
620 overflow = 1;
622 if (hden < 0)
624 quo_neg = ~ quo_neg;
625 neg_double (lden, hden, &lden, &hden);
629 if (hnum == 0 && hden == 0)
630 { /* single precision */
631 *hquo = *hrem = 0;
632 /* This unsigned division rounds toward zero. */
633 *lquo = lnum / lden;
634 goto finish_up;
637 if (hnum == 0)
638 { /* trivial case: dividend < divisor */
639 /* hden != 0 already checked. */
640 *hquo = *lquo = 0;
641 *hrem = hnum;
642 *lrem = lnum;
643 goto finish_up;
646 memset (quo, 0, sizeof quo);
648 memset (num, 0, sizeof num); /* to zero 9th element */
649 memset (den, 0, sizeof den);
651 encode (num, lnum, hnum);
652 encode (den, lden, hden);
654 /* Special code for when the divisor < BASE. */
655 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
657 /* hnum != 0 already checked. */
658 for (i = 4 - 1; i >= 0; i--)
660 work = num[i] + carry * BASE;
661 quo[i] = work / lden;
662 carry = work % lden;
665 else
667 /* Full double precision division,
668 with thanks to Don Knuth's "Seminumerical Algorithms". */
669 int num_hi_sig, den_hi_sig;
670 unsigned HOST_WIDE_INT quo_est, scale;
672 /* Find the highest nonzero divisor digit. */
673 for (i = 4 - 1;; i--)
674 if (den[i] != 0)
676 den_hi_sig = i;
677 break;
680 /* Insure that the first digit of the divisor is at least BASE/2.
681 This is required by the quotient digit estimation algorithm. */
683 scale = BASE / (den[den_hi_sig] + 1);
684 if (scale > 1)
685 { /* scale divisor and dividend */
686 carry = 0;
687 for (i = 0; i <= 4 - 1; i++)
689 work = (num[i] * scale) + carry;
690 num[i] = LOWPART (work);
691 carry = HIGHPART (work);
694 num[4] = carry;
695 carry = 0;
696 for (i = 0; i <= 4 - 1; i++)
698 work = (den[i] * scale) + carry;
699 den[i] = LOWPART (work);
700 carry = HIGHPART (work);
701 if (den[i] != 0) den_hi_sig = i;
705 num_hi_sig = 4;
707 /* Main loop */
708 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
710 /* Guess the next quotient digit, quo_est, by dividing the first
711 two remaining dividend digits by the high order quotient digit.
712 quo_est is never low and is at most 2 high. */
713 unsigned HOST_WIDE_INT tmp;
715 num_hi_sig = i + den_hi_sig + 1;
716 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
717 if (num[num_hi_sig] != den[den_hi_sig])
718 quo_est = work / den[den_hi_sig];
719 else
720 quo_est = BASE - 1;
722 /* Refine quo_est so it's usually correct, and at most one high. */
723 tmp = work - quo_est * den[den_hi_sig];
724 if (tmp < BASE
725 && (den[den_hi_sig - 1] * quo_est
726 > (tmp * BASE + num[num_hi_sig - 2])))
727 quo_est--;
729 /* Try QUO_EST as the quotient digit, by multiplying the
730 divisor by QUO_EST and subtracting from the remaining dividend.
731 Keep in mind that QUO_EST is the I - 1st digit. */
733 carry = 0;
734 for (j = 0; j <= den_hi_sig; j++)
736 work = quo_est * den[j] + carry;
737 carry = HIGHPART (work);
738 work = num[i + j] - LOWPART (work);
739 num[i + j] = LOWPART (work);
740 carry += HIGHPART (work) != 0;
743 /* If quo_est was high by one, then num[i] went negative and
744 we need to correct things. */
745 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
747 quo_est--;
748 carry = 0; /* add divisor back in */
749 for (j = 0; j <= den_hi_sig; j++)
751 work = num[i + j] + den[j] + carry;
752 carry = HIGHPART (work);
753 num[i + j] = LOWPART (work);
756 num [num_hi_sig] += carry;
759 /* Store the quotient digit. */
760 quo[i] = quo_est;
764 decode (quo, lquo, hquo);
766 finish_up:
767 /* If result is negative, make it so. */
768 if (quo_neg)
769 neg_double (*lquo, *hquo, lquo, hquo);
771 /* Compute trial remainder: rem = num - (quo * den) */
772 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773 neg_double (*lrem, *hrem, lrem, hrem);
774 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
776 switch (code)
778 case TRUNC_DIV_EXPR:
779 case TRUNC_MOD_EXPR: /* round toward zero */
780 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
781 return overflow;
783 case FLOOR_DIV_EXPR:
784 case FLOOR_MOD_EXPR: /* round toward negative infinity */
785 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
787 /* quo = quo - 1; */
788 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
789 lquo, hquo);
791 else
792 return overflow;
793 break;
795 case CEIL_DIV_EXPR:
796 case CEIL_MOD_EXPR: /* round toward positive infinity */
797 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800 lquo, hquo);
802 else
803 return overflow;
804 break;
806 case ROUND_DIV_EXPR:
807 case ROUND_MOD_EXPR: /* round to closest integer */
809 unsigned HOST_WIDE_INT labs_rem = *lrem;
810 HOST_WIDE_INT habs_rem = *hrem;
811 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
812 HOST_WIDE_INT habs_den = hden, htwice;
814 /* Get absolute values. */
815 if (*hrem < 0)
816 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
817 if (hden < 0)
818 neg_double (lden, hden, &labs_den, &habs_den);
820 /* If (2 * abs (lrem) >= abs (lden)) */
821 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
822 labs_rem, habs_rem, &ltwice, &htwice);
824 if (((unsigned HOST_WIDE_INT) habs_den
825 < (unsigned HOST_WIDE_INT) htwice)
826 || (((unsigned HOST_WIDE_INT) habs_den
827 == (unsigned HOST_WIDE_INT) htwice)
828 && (labs_den < ltwice)))
830 if (*hquo < 0)
831 /* quo = quo - 1; */
832 add_double (*lquo, *hquo,
833 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834 else
835 /* quo = quo + 1; */
836 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
837 lquo, hquo);
839 else
840 return overflow;
842 break;
844 default:
845 gcc_unreachable ();
848 /* Compute true remainder: rem = num - (quo * den) */
849 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
850 neg_double (*lrem, *hrem, lrem, hrem);
851 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
852 return overflow;
855 /* If ARG2 divides ARG1 with zero remainder, carries out the division
856 of type CODE and returns the quotient.
857 Otherwise returns NULL_TREE. */
859 static tree
860 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
862 unsigned HOST_WIDE_INT int1l, int2l;
863 HOST_WIDE_INT int1h, int2h;
864 unsigned HOST_WIDE_INT quol, reml;
865 HOST_WIDE_INT quoh, remh;
866 tree type = TREE_TYPE (arg1);
867 int uns = TYPE_UNSIGNED (type);
869 int1l = TREE_INT_CST_LOW (arg1);
870 int1h = TREE_INT_CST_HIGH (arg1);
871 int2l = TREE_INT_CST_LOW (arg2);
872 int2h = TREE_INT_CST_HIGH (arg2);
874 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
875 &quol, &quoh, &reml, &remh);
876 if (remh != 0 || reml != 0)
877 return NULL_TREE;
879 return build_int_cst_wide (type, quol, quoh);
882 /* This is non-zero if we should defer warnings about undefined
883 overflow. This facility exists because these warnings are a
884 special case. The code to estimate loop iterations does not want
885 to issue any warnings, since it works with expressions which do not
886 occur in user code. Various bits of cleanup code call fold(), but
887 only use the result if it has certain characteristics (e.g., is a
888 constant); that code only wants to issue a warning if the result is
889 used. */
891 static int fold_deferring_overflow_warnings;
893 /* If a warning about undefined overflow is deferred, this is the
894 warning. Note that this may cause us to turn two warnings into
895 one, but that is fine since it is sufficient to only give one
896 warning per expression. */
898 static const char* fold_deferred_overflow_warning;
900 /* If a warning about undefined overflow is deferred, this is the
901 level at which the warning should be emitted. */
903 static enum warn_strict_overflow_code fold_deferred_overflow_code;
905 /* Start deferring overflow warnings. We could use a stack here to
906 permit nested calls, but at present it is not necessary. */
908 void
909 fold_defer_overflow_warnings (void)
911 ++fold_deferring_overflow_warnings;
914 /* Stop deferring overflow warnings. If there is a pending warning,
915 and ISSUE is true, then issue the warning if appropriate. STMT is
916 the statement with which the warning should be associated (used for
917 location information); STMT may be NULL. CODE is the level of the
918 warning--a warn_strict_overflow_code value. This function will use
919 the smaller of CODE and the deferred code when deciding whether to
920 issue the warning. CODE may be zero to mean to always use the
921 deferred code. */
923 void
924 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
926 const char *warnmsg;
927 location_t locus;
929 gcc_assert (fold_deferring_overflow_warnings > 0);
930 --fold_deferring_overflow_warnings;
931 if (fold_deferring_overflow_warnings > 0)
933 if (fold_deferred_overflow_warning != NULL
934 && code != 0
935 && code < (int) fold_deferred_overflow_code)
936 fold_deferred_overflow_code = code;
937 return;
940 warnmsg = fold_deferred_overflow_warning;
941 fold_deferred_overflow_warning = NULL;
943 if (!issue || warnmsg == NULL)
944 return;
946 /* Use the smallest code level when deciding to issue the
947 warning. */
948 if (code == 0 || code > (int) fold_deferred_overflow_code)
949 code = fold_deferred_overflow_code;
951 if (!issue_strict_overflow_warning (code))
952 return;
954 if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
955 locus = input_location;
956 else
957 locus = EXPR_LOCATION (stmt);
958 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
961 /* Stop deferring overflow warnings, ignoring any deferred
962 warnings. */
964 void
965 fold_undefer_and_ignore_overflow_warnings (void)
967 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
970 /* Whether we are deferring overflow warnings. */
972 bool
973 fold_deferring_overflow_warnings_p (void)
975 return fold_deferring_overflow_warnings > 0;
978 /* This is called when we fold something based on the fact that signed
979 overflow is undefined. */
981 static void
982 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
984 gcc_assert (!flag_wrapv && !flag_trapv);
985 if (fold_deferring_overflow_warnings > 0)
987 if (fold_deferred_overflow_warning == NULL
988 || wc < fold_deferred_overflow_code)
990 fold_deferred_overflow_warning = gmsgid;
991 fold_deferred_overflow_code = wc;
994 else if (issue_strict_overflow_warning (wc))
995 warning (OPT_Wstrict_overflow, gmsgid);
998 /* Return true if the built-in mathematical function specified by CODE
999 is odd, i.e. -f(x) == f(-x). */
1001 static bool
1002 negate_mathfn_p (enum built_in_function code)
1004 switch (code)
1006 CASE_FLT_FN (BUILT_IN_ASIN):
1007 CASE_FLT_FN (BUILT_IN_ASINH):
1008 CASE_FLT_FN (BUILT_IN_ATAN):
1009 CASE_FLT_FN (BUILT_IN_ATANH):
1010 CASE_FLT_FN (BUILT_IN_CBRT):
1011 CASE_FLT_FN (BUILT_IN_SIN):
1012 CASE_FLT_FN (BUILT_IN_SINH):
1013 CASE_FLT_FN (BUILT_IN_TAN):
1014 CASE_FLT_FN (BUILT_IN_TANH):
1015 return true;
1017 default:
1018 break;
1020 return false;
1023 /* Check whether we may negate an integer constant T without causing
1024 overflow. */
1026 bool
1027 may_negate_without_overflow_p (tree t)
1029 unsigned HOST_WIDE_INT val;
1030 unsigned int prec;
1031 tree type;
1033 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1035 type = TREE_TYPE (t);
1036 if (TYPE_UNSIGNED (type))
1037 return false;
1039 prec = TYPE_PRECISION (type);
1040 if (prec > HOST_BITS_PER_WIDE_INT)
1042 if (TREE_INT_CST_LOW (t) != 0)
1043 return true;
1044 prec -= HOST_BITS_PER_WIDE_INT;
1045 val = TREE_INT_CST_HIGH (t);
1047 else
1048 val = TREE_INT_CST_LOW (t);
1049 if (prec < HOST_BITS_PER_WIDE_INT)
1050 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1051 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1054 /* Determine whether an expression T can be cheaply negated using
1055 the function negate_expr without introducing undefined overflow. */
1057 static bool
1058 negate_expr_p (tree t)
1060 tree type;
1062 if (t == 0)
1063 return false;
1065 type = TREE_TYPE (t);
1067 STRIP_SIGN_NOPS (t);
1068 switch (TREE_CODE (t))
1070 case INTEGER_CST:
1071 if (TYPE_OVERFLOW_WRAPS (type))
1072 return true;
1074 /* Check that -CST will not overflow type. */
1075 return may_negate_without_overflow_p (t);
1076 case BIT_NOT_EXPR:
1077 return (INTEGRAL_TYPE_P (type)
1078 && TYPE_OVERFLOW_WRAPS (type));
1080 case REAL_CST:
1081 case NEGATE_EXPR:
1082 return true;
1084 case COMPLEX_CST:
1085 return negate_expr_p (TREE_REALPART (t))
1086 && negate_expr_p (TREE_IMAGPART (t));
1088 case PLUS_EXPR:
1089 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1090 return false;
1091 /* -(A + B) -> (-B) - A. */
1092 if (negate_expr_p (TREE_OPERAND (t, 1))
1093 && reorder_operands_p (TREE_OPERAND (t, 0),
1094 TREE_OPERAND (t, 1)))
1095 return true;
1096 /* -(A + B) -> (-A) - B. */
1097 return negate_expr_p (TREE_OPERAND (t, 0));
1099 case MINUS_EXPR:
1100 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1101 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1102 && reorder_operands_p (TREE_OPERAND (t, 0),
1103 TREE_OPERAND (t, 1));
1105 case MULT_EXPR:
1106 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1107 break;
1109 /* Fall through. */
1111 case RDIV_EXPR:
1112 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1113 return negate_expr_p (TREE_OPERAND (t, 1))
1114 || negate_expr_p (TREE_OPERAND (t, 0));
1115 break;
1117 case TRUNC_DIV_EXPR:
1118 case ROUND_DIV_EXPR:
1119 case FLOOR_DIV_EXPR:
1120 case CEIL_DIV_EXPR:
1121 case EXACT_DIV_EXPR:
1122 /* In general we can't negate A / B, because if A is INT_MIN and
1123 B is 1, we may turn this into INT_MIN / -1 which is undefined
1124 and actually traps on some architectures. But if overflow is
1125 undefined, we can negate, because - (INT_MIN / 1) is an
1126 overflow. */
1127 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1128 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1129 break;
1130 return negate_expr_p (TREE_OPERAND (t, 1))
1131 || negate_expr_p (TREE_OPERAND (t, 0));
1133 case NOP_EXPR:
1134 /* Negate -((double)float) as (double)(-float). */
1135 if (TREE_CODE (type) == REAL_TYPE)
1137 tree tem = strip_float_extensions (t);
1138 if (tem != t)
1139 return negate_expr_p (tem);
1141 break;
1143 case CALL_EXPR:
1144 /* Negate -f(x) as f(-x). */
1145 if (negate_mathfn_p (builtin_mathfn_code (t)))
1146 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1147 break;
1149 case RSHIFT_EXPR:
1150 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1151 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1153 tree op1 = TREE_OPERAND (t, 1);
1154 if (TREE_INT_CST_HIGH (op1) == 0
1155 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1156 == TREE_INT_CST_LOW (op1))
1157 return true;
1159 break;
1161 default:
1162 break;
1164 return false;
1167 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1168 simplification is possible.
1169 If negate_expr_p would return true for T, NULL_TREE will never be
1170 returned. */
1172 static tree
1173 fold_negate_expr (tree t)
1175 tree type = TREE_TYPE (t);
1176 tree tem;
1178 switch (TREE_CODE (t))
1180 /* Convert - (~A) to A + 1. */
1181 case BIT_NOT_EXPR:
1182 if (INTEGRAL_TYPE_P (type))
1183 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1184 build_int_cst (type, 1));
1185 break;
1187 case INTEGER_CST:
1188 tem = fold_negate_const (t, type);
1189 if (!TREE_OVERFLOW (tem)
1190 || !TYPE_OVERFLOW_TRAPS (type))
1191 return tem;
1192 break;
1194 case REAL_CST:
1195 tem = fold_negate_const (t, type);
1196 /* Two's complement FP formats, such as c4x, may overflow. */
1197 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1198 return tem;
1199 break;
1201 case COMPLEX_CST:
1203 tree rpart = negate_expr (TREE_REALPART (t));
1204 tree ipart = negate_expr (TREE_IMAGPART (t));
1206 if ((TREE_CODE (rpart) == REAL_CST
1207 && TREE_CODE (ipart) == REAL_CST)
1208 || (TREE_CODE (rpart) == INTEGER_CST
1209 && TREE_CODE (ipart) == INTEGER_CST))
1210 return build_complex (type, rpart, ipart);
1212 break;
1214 case NEGATE_EXPR:
1215 return TREE_OPERAND (t, 0);
1217 case PLUS_EXPR:
1218 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1220 /* -(A + B) -> (-B) - A. */
1221 if (negate_expr_p (TREE_OPERAND (t, 1))
1222 && reorder_operands_p (TREE_OPERAND (t, 0),
1223 TREE_OPERAND (t, 1)))
1225 tem = negate_expr (TREE_OPERAND (t, 1));
1226 return fold_build2 (MINUS_EXPR, type,
1227 tem, TREE_OPERAND (t, 0));
1230 /* -(A + B) -> (-A) - B. */
1231 if (negate_expr_p (TREE_OPERAND (t, 0)))
1233 tem = negate_expr (TREE_OPERAND (t, 0));
1234 return fold_build2 (MINUS_EXPR, type,
1235 tem, TREE_OPERAND (t, 1));
1238 break;
1240 case MINUS_EXPR:
1241 /* - (A - B) -> B - A */
1242 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1243 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1244 return fold_build2 (MINUS_EXPR, type,
1245 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1246 break;
1248 case MULT_EXPR:
1249 if (TYPE_UNSIGNED (type))
1250 break;
1252 /* Fall through. */
1254 case RDIV_EXPR:
1255 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1257 tem = TREE_OPERAND (t, 1);
1258 if (negate_expr_p (tem))
1259 return fold_build2 (TREE_CODE (t), type,
1260 TREE_OPERAND (t, 0), negate_expr (tem));
1261 tem = TREE_OPERAND (t, 0);
1262 if (negate_expr_p (tem))
1263 return fold_build2 (TREE_CODE (t), type,
1264 negate_expr (tem), TREE_OPERAND (t, 1));
1266 break;
1268 case TRUNC_DIV_EXPR:
1269 case ROUND_DIV_EXPR:
1270 case FLOOR_DIV_EXPR:
1271 case CEIL_DIV_EXPR:
1272 case EXACT_DIV_EXPR:
1273 /* In general we can't negate A / B, because if A is INT_MIN and
1274 B is 1, we may turn this into INT_MIN / -1 which is undefined
1275 and actually traps on some architectures. But if overflow is
1276 undefined, we can negate, because - (INT_MIN / 1) is an
1277 overflow. */
1278 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1280 const char * const warnmsg = G_("assuming signed overflow does not "
1281 "occur when negating a division");
1282 tem = TREE_OPERAND (t, 1);
1283 if (negate_expr_p (tem))
1285 if (INTEGRAL_TYPE_P (type)
1286 && (TREE_CODE (tem) != INTEGER_CST
1287 || integer_onep (tem)))
1288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1289 return fold_build2 (TREE_CODE (t), type,
1290 TREE_OPERAND (t, 0), negate_expr (tem));
1292 tem = TREE_OPERAND (t, 0);
1293 if (negate_expr_p (tem))
1295 if (INTEGRAL_TYPE_P (type)
1296 && (TREE_CODE (tem) != INTEGER_CST
1297 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1298 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1299 return fold_build2 (TREE_CODE (t), type,
1300 negate_expr (tem), TREE_OPERAND (t, 1));
1303 break;
1305 case NOP_EXPR:
1306 /* Convert -((double)float) into (double)(-float). */
1307 if (TREE_CODE (type) == REAL_TYPE)
1309 tem = strip_float_extensions (t);
1310 if (tem != t && negate_expr_p (tem))
1311 return negate_expr (tem);
1313 break;
1315 case CALL_EXPR:
1316 /* Negate -f(x) as f(-x). */
1317 if (negate_mathfn_p (builtin_mathfn_code (t))
1318 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1320 tree fndecl, arg, arglist;
1322 fndecl = get_callee_fndecl (t);
1323 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1324 arglist = build_tree_list (NULL_TREE, arg);
1325 return build_function_call_expr (fndecl, arglist);
1327 break;
1329 case RSHIFT_EXPR:
1330 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1331 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1333 tree op1 = TREE_OPERAND (t, 1);
1334 if (TREE_INT_CST_HIGH (op1) == 0
1335 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1336 == TREE_INT_CST_LOW (op1))
1338 tree ntype = TYPE_UNSIGNED (type)
1339 ? lang_hooks.types.signed_type (type)
1340 : lang_hooks.types.unsigned_type (type);
1341 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1342 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1343 return fold_convert (type, temp);
1346 break;
1348 default:
1349 break;
1352 return NULL_TREE;
1355 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1356 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1357 return NULL_TREE. */
1359 static tree
1360 negate_expr (tree t)
1362 tree type, tem;
1364 if (t == NULL_TREE)
1365 return NULL_TREE;
1367 type = TREE_TYPE (t);
1368 STRIP_SIGN_NOPS (t);
1370 tem = fold_negate_expr (t);
1371 if (!tem)
1372 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1373 return fold_convert (type, tem);
1376 /* Split a tree IN into a constant, literal and variable parts that could be
1377 combined with CODE to make IN. "constant" means an expression with
1378 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1379 commutative arithmetic operation. Store the constant part into *CONP,
1380 the literal in *LITP and return the variable part. If a part isn't
1381 present, set it to null. If the tree does not decompose in this way,
1382 return the entire tree as the variable part and the other parts as null.
1384 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1385 case, we negate an operand that was subtracted. Except if it is a
1386 literal for which we use *MINUS_LITP instead.
1388 If NEGATE_P is true, we are negating all of IN, again except a literal
1389 for which we use *MINUS_LITP instead.
1391 If IN is itself a literal or constant, return it as appropriate.
1393 Note that we do not guarantee that any of the three values will be the
1394 same type as IN, but they will have the same signedness and mode. */
1396 static tree
1397 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1398 tree *minus_litp, int negate_p)
1400 tree var = 0;
1402 *conp = 0;
1403 *litp = 0;
1404 *minus_litp = 0;
1406 /* Strip any conversions that don't change the machine mode or signedness. */
1407 STRIP_SIGN_NOPS (in);
1409 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1410 *litp = in;
1411 else if (TREE_CODE (in) == code
1412 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1413 /* We can associate addition and subtraction together (even
1414 though the C standard doesn't say so) for integers because
1415 the value is not affected. For reals, the value might be
1416 affected, so we can't. */
1417 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1418 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1420 tree op0 = TREE_OPERAND (in, 0);
1421 tree op1 = TREE_OPERAND (in, 1);
1422 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1423 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1425 /* First see if either of the operands is a literal, then a constant. */
1426 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1427 *litp = op0, op0 = 0;
1428 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1429 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1431 if (op0 != 0 && TREE_CONSTANT (op0))
1432 *conp = op0, op0 = 0;
1433 else if (op1 != 0 && TREE_CONSTANT (op1))
1434 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1436 /* If we haven't dealt with either operand, this is not a case we can
1437 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1438 if (op0 != 0 && op1 != 0)
1439 var = in;
1440 else if (op0 != 0)
1441 var = op0;
1442 else
1443 var = op1, neg_var_p = neg1_p;
1445 /* Now do any needed negations. */
1446 if (neg_litp_p)
1447 *minus_litp = *litp, *litp = 0;
1448 if (neg_conp_p)
1449 *conp = negate_expr (*conp);
1450 if (neg_var_p)
1451 var = negate_expr (var);
1453 else if (TREE_CONSTANT (in))
1454 *conp = in;
1455 else
1456 var = in;
1458 if (negate_p)
1460 if (*litp)
1461 *minus_litp = *litp, *litp = 0;
1462 else if (*minus_litp)
1463 *litp = *minus_litp, *minus_litp = 0;
1464 *conp = negate_expr (*conp);
1465 var = negate_expr (var);
1468 return var;
1471 /* Re-associate trees split by the above function. T1 and T2 are either
1472 expressions to associate or null. Return the new expression, if any. If
1473 we build an operation, do it in TYPE and with CODE. */
1475 static tree
1476 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1478 if (t1 == 0)
1479 return t2;
1480 else if (t2 == 0)
1481 return t1;
1483 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1484 try to fold this since we will have infinite recursion. But do
1485 deal with any NEGATE_EXPRs. */
1486 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1487 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1489 if (code == PLUS_EXPR)
1491 if (TREE_CODE (t1) == NEGATE_EXPR)
1492 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1493 fold_convert (type, TREE_OPERAND (t1, 0)));
1494 else if (TREE_CODE (t2) == NEGATE_EXPR)
1495 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1496 fold_convert (type, TREE_OPERAND (t2, 0)));
1497 else if (integer_zerop (t2))
1498 return fold_convert (type, t1);
1500 else if (code == MINUS_EXPR)
1502 if (integer_zerop (t2))
1503 return fold_convert (type, t1);
1506 return build2 (code, type, fold_convert (type, t1),
1507 fold_convert (type, t2));
1510 return fold_build2 (code, type, fold_convert (type, t1),
1511 fold_convert (type, t2));
1514 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1515 to produce a new constant. Return NULL_TREE if we don't know how
1516 to evaluate CODE at compile-time.
1518 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1520 tree
1521 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1523 unsigned HOST_WIDE_INT int1l, int2l;
1524 HOST_WIDE_INT int1h, int2h;
1525 unsigned HOST_WIDE_INT low;
1526 HOST_WIDE_INT hi;
1527 unsigned HOST_WIDE_INT garbagel;
1528 HOST_WIDE_INT garbageh;
1529 tree t;
1530 tree type = TREE_TYPE (arg1);
1531 int uns = TYPE_UNSIGNED (type);
1532 int is_sizetype
1533 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1534 int overflow = 0;
1536 int1l = TREE_INT_CST_LOW (arg1);
1537 int1h = TREE_INT_CST_HIGH (arg1);
1538 int2l = TREE_INT_CST_LOW (arg2);
1539 int2h = TREE_INT_CST_HIGH (arg2);
1541 switch (code)
1543 case BIT_IOR_EXPR:
1544 low = int1l | int2l, hi = int1h | int2h;
1545 break;
1547 case BIT_XOR_EXPR:
1548 low = int1l ^ int2l, hi = int1h ^ int2h;
1549 break;
1551 case BIT_AND_EXPR:
1552 low = int1l & int2l, hi = int1h & int2h;
1553 break;
1555 case RSHIFT_EXPR:
1556 int2l = -int2l;
1557 case LSHIFT_EXPR:
1558 /* It's unclear from the C standard whether shifts can overflow.
1559 The following code ignores overflow; perhaps a C standard
1560 interpretation ruling is needed. */
1561 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1562 &low, &hi, !uns);
1563 break;
1565 case RROTATE_EXPR:
1566 int2l = - int2l;
1567 case LROTATE_EXPR:
1568 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1569 &low, &hi);
1570 break;
1572 case PLUS_EXPR:
1573 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1574 break;
1576 case MINUS_EXPR:
1577 neg_double (int2l, int2h, &low, &hi);
1578 add_double (int1l, int1h, low, hi, &low, &hi);
1579 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1580 break;
1582 case MULT_EXPR:
1583 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1584 break;
1586 case TRUNC_DIV_EXPR:
1587 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1588 case EXACT_DIV_EXPR:
1589 /* This is a shortcut for a common special case. */
1590 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1591 && ! TREE_CONSTANT_OVERFLOW (arg1)
1592 && ! TREE_CONSTANT_OVERFLOW (arg2)
1593 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1595 if (code == CEIL_DIV_EXPR)
1596 int1l += int2l - 1;
1598 low = int1l / int2l, hi = 0;
1599 break;
1602 /* ... fall through ... */
1604 case ROUND_DIV_EXPR:
1605 if (int2h == 0 && int2l == 0)
1606 return NULL_TREE;
1607 if (int2h == 0 && int2l == 1)
1609 low = int1l, hi = int1h;
1610 break;
1612 if (int1l == int2l && int1h == int2h
1613 && ! (int1l == 0 && int1h == 0))
1615 low = 1, hi = 0;
1616 break;
1618 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1619 &low, &hi, &garbagel, &garbageh);
1620 break;
1622 case TRUNC_MOD_EXPR:
1623 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1624 /* This is a shortcut for a common special case. */
1625 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1626 && ! TREE_CONSTANT_OVERFLOW (arg1)
1627 && ! TREE_CONSTANT_OVERFLOW (arg2)
1628 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1630 if (code == CEIL_MOD_EXPR)
1631 int1l += int2l - 1;
1632 low = int1l % int2l, hi = 0;
1633 break;
1636 /* ... fall through ... */
1638 case ROUND_MOD_EXPR:
1639 if (int2h == 0 && int2l == 0)
1640 return NULL_TREE;
1641 overflow = div_and_round_double (code, uns,
1642 int1l, int1h, int2l, int2h,
1643 &garbagel, &garbageh, &low, &hi);
1644 break;
1646 case MIN_EXPR:
1647 case MAX_EXPR:
1648 if (uns)
1649 low = (((unsigned HOST_WIDE_INT) int1h
1650 < (unsigned HOST_WIDE_INT) int2h)
1651 || (((unsigned HOST_WIDE_INT) int1h
1652 == (unsigned HOST_WIDE_INT) int2h)
1653 && int1l < int2l));
1654 else
1655 low = (int1h < int2h
1656 || (int1h == int2h && int1l < int2l));
1658 if (low == (code == MIN_EXPR))
1659 low = int1l, hi = int1h;
1660 else
1661 low = int2l, hi = int2h;
1662 break;
1664 default:
1665 return NULL_TREE;
1668 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1670 if (notrunc)
1672 /* Propagate overflow flags ourselves. */
1673 if (((!uns || is_sizetype) && overflow)
1674 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1676 t = copy_node (t);
1677 TREE_OVERFLOW (t) = 1;
1678 TREE_CONSTANT_OVERFLOW (t) = 1;
1680 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1682 t = copy_node (t);
1683 TREE_CONSTANT_OVERFLOW (t) = 1;
1686 else
1687 t = force_fit_type (t, 1,
1688 ((!uns || is_sizetype) && overflow)
1689 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1690 TREE_CONSTANT_OVERFLOW (arg1)
1691 | TREE_CONSTANT_OVERFLOW (arg2));
1693 return t;
1696 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1697 constant. We assume ARG1 and ARG2 have the same data type, or at least
1698 are the same kind of constant and the same machine mode. Return zero if
1699 combining the constants is not allowed in the current operating mode.
1701 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1703 static tree
1704 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1706 /* Sanity check for the recursive cases. */
1707 if (!arg1 || !arg2)
1708 return NULL_TREE;
1710 STRIP_NOPS (arg1);
1711 STRIP_NOPS (arg2);
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1714 return int_const_binop (code, arg1, arg2, notrunc);
1716 if (TREE_CODE (arg1) == REAL_CST)
1718 enum machine_mode mode;
1719 REAL_VALUE_TYPE d1;
1720 REAL_VALUE_TYPE d2;
1721 REAL_VALUE_TYPE value;
1722 REAL_VALUE_TYPE result;
1723 bool inexact;
1724 tree t, type;
1726 /* The following codes are handled by real_arithmetic. */
1727 switch (code)
1729 case PLUS_EXPR:
1730 case MINUS_EXPR:
1731 case MULT_EXPR:
1732 case RDIV_EXPR:
1733 case MIN_EXPR:
1734 case MAX_EXPR:
1735 break;
1737 default:
1738 return NULL_TREE;
1741 d1 = TREE_REAL_CST (arg1);
1742 d2 = TREE_REAL_CST (arg2);
1744 type = TREE_TYPE (arg1);
1745 mode = TYPE_MODE (type);
1747 /* Don't perform operation if we honor signaling NaNs and
1748 either operand is a NaN. */
1749 if (HONOR_SNANS (mode)
1750 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1751 return NULL_TREE;
1753 /* Don't perform operation if it would raise a division
1754 by zero exception. */
1755 if (code == RDIV_EXPR
1756 && REAL_VALUES_EQUAL (d2, dconst0)
1757 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1758 return NULL_TREE;
1760 /* If either operand is a NaN, just return it. Otherwise, set up
1761 for floating-point trap; we return an overflow. */
1762 if (REAL_VALUE_ISNAN (d1))
1763 return arg1;
1764 else if (REAL_VALUE_ISNAN (d2))
1765 return arg2;
1767 inexact = real_arithmetic (&value, code, &d1, &d2);
1768 real_convert (&result, mode, &value);
1770 /* Don't constant fold this floating point operation if
1771 the result has overflowed and flag_trapping_math. */
1772 if (flag_trapping_math
1773 && MODE_HAS_INFINITIES (mode)
1774 && REAL_VALUE_ISINF (result)
1775 && !REAL_VALUE_ISINF (d1)
1776 && !REAL_VALUE_ISINF (d2))
1777 return NULL_TREE;
1779 /* Don't constant fold this floating point operation if the
1780 result may dependent upon the run-time rounding mode and
1781 flag_rounding_math is set, or if GCC's software emulation
1782 is unable to accurately represent the result. */
1783 if ((flag_rounding_math
1784 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1785 && !flag_unsafe_math_optimizations))
1786 && (inexact || !real_identical (&result, &value)))
1787 return NULL_TREE;
1789 t = build_real (type, result);
1791 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1792 TREE_CONSTANT_OVERFLOW (t)
1793 = TREE_OVERFLOW (t)
1794 | TREE_CONSTANT_OVERFLOW (arg1)
1795 | TREE_CONSTANT_OVERFLOW (arg2);
1796 return t;
1799 if (TREE_CODE (arg1) == COMPLEX_CST)
1801 tree type = TREE_TYPE (arg1);
1802 tree r1 = TREE_REALPART (arg1);
1803 tree i1 = TREE_IMAGPART (arg1);
1804 tree r2 = TREE_REALPART (arg2);
1805 tree i2 = TREE_IMAGPART (arg2);
1806 tree real, imag;
1808 switch (code)
1810 case PLUS_EXPR:
1811 case MINUS_EXPR:
1812 real = const_binop (code, r1, r2, notrunc);
1813 imag = const_binop (code, i1, i2, notrunc);
1814 break;
1816 case MULT_EXPR:
1817 real = const_binop (MINUS_EXPR,
1818 const_binop (MULT_EXPR, r1, r2, notrunc),
1819 const_binop (MULT_EXPR, i1, i2, notrunc),
1820 notrunc);
1821 imag = const_binop (PLUS_EXPR,
1822 const_binop (MULT_EXPR, r1, i2, notrunc),
1823 const_binop (MULT_EXPR, i1, r2, notrunc),
1824 notrunc);
1825 break;
1827 case RDIV_EXPR:
1829 tree magsquared
1830 = const_binop (PLUS_EXPR,
1831 const_binop (MULT_EXPR, r2, r2, notrunc),
1832 const_binop (MULT_EXPR, i2, i2, notrunc),
1833 notrunc);
1834 tree t1
1835 = const_binop (PLUS_EXPR,
1836 const_binop (MULT_EXPR, r1, r2, notrunc),
1837 const_binop (MULT_EXPR, i1, i2, notrunc),
1838 notrunc);
1839 tree t2
1840 = const_binop (MINUS_EXPR,
1841 const_binop (MULT_EXPR, i1, r2, notrunc),
1842 const_binop (MULT_EXPR, r1, i2, notrunc),
1843 notrunc);
1845 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1846 code = TRUNC_DIV_EXPR;
1848 real = const_binop (code, t1, magsquared, notrunc);
1849 imag = const_binop (code, t2, magsquared, notrunc);
1851 break;
1853 default:
1854 return NULL_TREE;
1857 if (real && imag)
1858 return build_complex (type, real, imag);
1861 return NULL_TREE;
1864 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1865 indicates which particular sizetype to create. */
1867 tree
1868 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1870 return build_int_cst (sizetype_tab[(int) kind], number);
1873 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1874 is a tree code. The type of the result is taken from the operands.
1875 Both must be the same type integer type and it must be a size type.
1876 If the operands are constant, so is the result. */
1878 tree
1879 size_binop (enum tree_code code, tree arg0, tree arg1)
1881 tree type = TREE_TYPE (arg0);
1883 if (arg0 == error_mark_node || arg1 == error_mark_node)
1884 return error_mark_node;
1886 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1887 && type == TREE_TYPE (arg1));
1889 /* Handle the special case of two integer constants faster. */
1890 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1892 /* And some specific cases even faster than that. */
1893 if (code == PLUS_EXPR && integer_zerop (arg0))
1894 return arg1;
1895 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1896 && integer_zerop (arg1))
1897 return arg0;
1898 else if (code == MULT_EXPR && integer_onep (arg0))
1899 return arg1;
1901 /* Handle general case of two integer constants. */
1902 return int_const_binop (code, arg0, arg1, 0);
1905 return fold_build2 (code, type, arg0, arg1);
1908 /* Given two values, either both of sizetype or both of bitsizetype,
1909 compute the difference between the two values. Return the value
1910 in signed type corresponding to the type of the operands. */
1912 tree
1913 size_diffop (tree arg0, tree arg1)
1915 tree type = TREE_TYPE (arg0);
1916 tree ctype;
1918 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1919 && type == TREE_TYPE (arg1));
1921 /* If the type is already signed, just do the simple thing. */
1922 if (!TYPE_UNSIGNED (type))
1923 return size_binop (MINUS_EXPR, arg0, arg1);
1925 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1927 /* If either operand is not a constant, do the conversions to the signed
1928 type and subtract. The hardware will do the right thing with any
1929 overflow in the subtraction. */
1930 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1931 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1932 fold_convert (ctype, arg1));
1934 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1935 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1936 overflow) and negate (which can't either). Special-case a result
1937 of zero while we're here. */
1938 if (tree_int_cst_equal (arg0, arg1))
1939 return build_int_cst (ctype, 0);
1940 else if (tree_int_cst_lt (arg1, arg0))
1941 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1942 else
1943 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1944 fold_convert (ctype, size_binop (MINUS_EXPR,
1945 arg1, arg0)));
1948 /* A subroutine of fold_convert_const handling conversions of an
1949 INTEGER_CST to another integer type. */
1951 static tree
1952 fold_convert_const_int_from_int (tree type, tree arg1)
1954 tree t;
1956 /* Given an integer constant, make new constant with new type,
1957 appropriately sign-extended or truncated. */
1958 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1959 TREE_INT_CST_HIGH (arg1));
1961 t = force_fit_type (t,
1962 /* Don't set the overflow when
1963 converting a pointer */
1964 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1965 (TREE_INT_CST_HIGH (arg1) < 0
1966 && (TYPE_UNSIGNED (type)
1967 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1968 | TREE_OVERFLOW (arg1),
1969 TREE_CONSTANT_OVERFLOW (arg1));
1971 return t;
1974 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1975 to an integer type. */
1977 static tree
1978 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1980 int overflow = 0;
1981 tree t;
1983 /* The following code implements the floating point to integer
1984 conversion rules required by the Java Language Specification,
1985 that IEEE NaNs are mapped to zero and values that overflow
1986 the target precision saturate, i.e. values greater than
1987 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1988 are mapped to INT_MIN. These semantics are allowed by the
1989 C and C++ standards that simply state that the behavior of
1990 FP-to-integer conversion is unspecified upon overflow. */
1992 HOST_WIDE_INT high, low;
1993 REAL_VALUE_TYPE r;
1994 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1996 switch (code)
1998 case FIX_TRUNC_EXPR:
1999 real_trunc (&r, VOIDmode, &x);
2000 break;
2002 case FIX_CEIL_EXPR:
2003 real_ceil (&r, VOIDmode, &x);
2004 break;
2006 case FIX_FLOOR_EXPR:
2007 real_floor (&r, VOIDmode, &x);
2008 break;
2010 case FIX_ROUND_EXPR:
2011 real_round (&r, VOIDmode, &x);
2012 break;
2014 default:
2015 gcc_unreachable ();
2018 /* If R is NaN, return zero and show we have an overflow. */
2019 if (REAL_VALUE_ISNAN (r))
2021 overflow = 1;
2022 high = 0;
2023 low = 0;
2026 /* See if R is less than the lower bound or greater than the
2027 upper bound. */
2029 if (! overflow)
2031 tree lt = TYPE_MIN_VALUE (type);
2032 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2033 if (REAL_VALUES_LESS (r, l))
2035 overflow = 1;
2036 high = TREE_INT_CST_HIGH (lt);
2037 low = TREE_INT_CST_LOW (lt);
2041 if (! overflow)
2043 tree ut = TYPE_MAX_VALUE (type);
2044 if (ut)
2046 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 if (REAL_VALUES_LESS (u, r))
2049 overflow = 1;
2050 high = TREE_INT_CST_HIGH (ut);
2051 low = TREE_INT_CST_LOW (ut);
2056 if (! overflow)
2057 REAL_VALUE_TO_INT (&low, &high, r);
2059 t = build_int_cst_wide (type, low, high);
2061 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2062 TREE_CONSTANT_OVERFLOW (arg1));
2063 return t;
2066 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2067 to another floating point type. */
2069 static tree
2070 fold_convert_const_real_from_real (tree type, tree arg1)
2072 REAL_VALUE_TYPE value;
2073 tree t;
2075 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2076 t = build_real (type, value);
2078 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2079 TREE_CONSTANT_OVERFLOW (t)
2080 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2081 return t;
2084 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2085 type TYPE. If no simplification can be done return NULL_TREE. */
2087 static tree
2088 fold_convert_const (enum tree_code code, tree type, tree arg1)
2090 if (TREE_TYPE (arg1) == type)
2091 return arg1;
2093 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2095 if (TREE_CODE (arg1) == INTEGER_CST)
2096 return fold_convert_const_int_from_int (type, arg1);
2097 else if (TREE_CODE (arg1) == REAL_CST)
2098 return fold_convert_const_int_from_real (code, type, arg1);
2100 else if (TREE_CODE (type) == REAL_TYPE)
2102 if (TREE_CODE (arg1) == INTEGER_CST)
2103 return build_real_from_int_cst (type, arg1);
2104 if (TREE_CODE (arg1) == REAL_CST)
2105 return fold_convert_const_real_from_real (type, arg1);
2107 return NULL_TREE;
2110 /* Construct a vector of zero elements of vector type TYPE. */
2112 static tree
2113 build_zero_vector (tree type)
2115 tree elem, list;
2116 int i, units;
2118 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2119 units = TYPE_VECTOR_SUBPARTS (type);
2121 list = NULL_TREE;
2122 for (i = 0; i < units; i++)
2123 list = tree_cons (NULL_TREE, elem, list);
2124 return build_vector (type, list);
2127 /* Convert expression ARG to type TYPE. Used by the middle-end for
2128 simple conversions in preference to calling the front-end's convert. */
2130 tree
2131 fold_convert (tree type, tree arg)
2133 tree orig = TREE_TYPE (arg);
2134 tree tem;
2136 if (type == orig)
2137 return arg;
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return error_mark_node;
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2145 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2146 TYPE_MAIN_VARIANT (orig)))
2147 return fold_build1 (NOP_EXPR, type, arg);
2149 switch (TREE_CODE (type))
2151 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152 case POINTER_TYPE: case REFERENCE_TYPE:
2153 case OFFSET_TYPE:
2154 if (TREE_CODE (arg) == INTEGER_CST)
2156 tem = fold_convert_const (NOP_EXPR, type, arg);
2157 if (tem != NULL_TREE)
2158 return tem;
2160 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2161 || TREE_CODE (orig) == OFFSET_TYPE)
2162 return fold_build1 (NOP_EXPR, type, arg);
2163 if (TREE_CODE (orig) == COMPLEX_TYPE)
2165 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2166 return fold_convert (type, tem);
2168 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2169 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2170 return fold_build1 (NOP_EXPR, type, arg);
2172 case REAL_TYPE:
2173 if (TREE_CODE (arg) == INTEGER_CST)
2175 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2176 if (tem != NULL_TREE)
2177 return tem;
2179 else if (TREE_CODE (arg) == REAL_CST)
2181 tem = fold_convert_const (NOP_EXPR, type, arg);
2182 if (tem != NULL_TREE)
2183 return tem;
2186 switch (TREE_CODE (orig))
2188 case INTEGER_TYPE:
2189 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2190 case POINTER_TYPE: case REFERENCE_TYPE:
2191 return fold_build1 (FLOAT_EXPR, type, arg);
2193 case REAL_TYPE:
2194 return fold_build1 (NOP_EXPR, type, arg);
2196 case COMPLEX_TYPE:
2197 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2198 return fold_convert (type, tem);
2200 default:
2201 gcc_unreachable ();
2204 case COMPLEX_TYPE:
2205 switch (TREE_CODE (orig))
2207 case INTEGER_TYPE:
2208 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2209 case POINTER_TYPE: case REFERENCE_TYPE:
2210 case REAL_TYPE:
2211 return build2 (COMPLEX_EXPR, type,
2212 fold_convert (TREE_TYPE (type), arg),
2213 fold_convert (TREE_TYPE (type), integer_zero_node));
2214 case COMPLEX_TYPE:
2216 tree rpart, ipart;
2218 if (TREE_CODE (arg) == COMPLEX_EXPR)
2220 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2221 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2222 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2225 arg = save_expr (arg);
2226 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2227 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2228 rpart = fold_convert (TREE_TYPE (type), rpart);
2229 ipart = fold_convert (TREE_TYPE (type), ipart);
2230 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2233 default:
2234 gcc_unreachable ();
2237 case VECTOR_TYPE:
2238 if (integer_zerop (arg))
2239 return build_zero_vector (type);
2240 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2241 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2242 || TREE_CODE (orig) == VECTOR_TYPE);
2243 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2245 case VOID_TYPE:
2246 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2248 default:
2249 gcc_unreachable ();
2253 /* Return false if expr can be assumed not to be an lvalue, true
2254 otherwise. */
2256 static bool
2257 maybe_lvalue_p (tree x)
2259 /* We only need to wrap lvalue tree codes. */
2260 switch (TREE_CODE (x))
2262 case VAR_DECL:
2263 case PARM_DECL:
2264 case RESULT_DECL:
2265 case LABEL_DECL:
2266 case FUNCTION_DECL:
2267 case SSA_NAME:
2269 case COMPONENT_REF:
2270 case INDIRECT_REF:
2271 case ALIGN_INDIRECT_REF:
2272 case MISALIGNED_INDIRECT_REF:
2273 case ARRAY_REF:
2274 case ARRAY_RANGE_REF:
2275 case BIT_FIELD_REF:
2276 case OBJ_TYPE_REF:
2278 case REALPART_EXPR:
2279 case IMAGPART_EXPR:
2280 case PREINCREMENT_EXPR:
2281 case PREDECREMENT_EXPR:
2282 case SAVE_EXPR:
2283 case TRY_CATCH_EXPR:
2284 case WITH_CLEANUP_EXPR:
2285 case COMPOUND_EXPR:
2286 case MODIFY_EXPR:
2287 case TARGET_EXPR:
2288 case COND_EXPR:
2289 case BIND_EXPR:
2290 case MIN_EXPR:
2291 case MAX_EXPR:
2292 break;
2294 default:
2295 /* Assume the worst for front-end tree codes. */
2296 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2297 break;
2298 return false;
2301 return true;
2304 /* Return an expr equal to X but certainly not valid as an lvalue. */
2306 tree
2307 non_lvalue (tree x)
2309 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2310 us. */
2311 if (in_gimple_form)
2312 return x;
2314 if (! maybe_lvalue_p (x))
2315 return x;
2316 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2319 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2320 Zero means allow extended lvalues. */
2322 int pedantic_lvalues;
2324 /* When pedantic, return an expr equal to X but certainly not valid as a
2325 pedantic lvalue. Otherwise, return X. */
2327 static tree
2328 pedantic_non_lvalue (tree x)
2330 if (pedantic_lvalues)
2331 return non_lvalue (x);
2332 else
2333 return x;
2336 /* Given a tree comparison code, return the code that is the logical inverse
2337 of the given code. It is not safe to do this for floating-point
2338 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2339 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2341 enum tree_code
2342 invert_tree_comparison (enum tree_code code, bool honor_nans)
2344 if (honor_nans && flag_trapping_math)
2345 return ERROR_MARK;
2347 switch (code)
2349 case EQ_EXPR:
2350 return NE_EXPR;
2351 case NE_EXPR:
2352 return EQ_EXPR;
2353 case GT_EXPR:
2354 return honor_nans ? UNLE_EXPR : LE_EXPR;
2355 case GE_EXPR:
2356 return honor_nans ? UNLT_EXPR : LT_EXPR;
2357 case LT_EXPR:
2358 return honor_nans ? UNGE_EXPR : GE_EXPR;
2359 case LE_EXPR:
2360 return honor_nans ? UNGT_EXPR : GT_EXPR;
2361 case LTGT_EXPR:
2362 return UNEQ_EXPR;
2363 case UNEQ_EXPR:
2364 return LTGT_EXPR;
2365 case UNGT_EXPR:
2366 return LE_EXPR;
2367 case UNGE_EXPR:
2368 return LT_EXPR;
2369 case UNLT_EXPR:
2370 return GE_EXPR;
2371 case UNLE_EXPR:
2372 return GT_EXPR;
2373 case ORDERED_EXPR:
2374 return UNORDERED_EXPR;
2375 case UNORDERED_EXPR:
2376 return ORDERED_EXPR;
2377 default:
2378 gcc_unreachable ();
2382 /* Similar, but return the comparison that results if the operands are
2383 swapped. This is safe for floating-point. */
2385 enum tree_code
2386 swap_tree_comparison (enum tree_code code)
2388 switch (code)
2390 case EQ_EXPR:
2391 case NE_EXPR:
2392 case ORDERED_EXPR:
2393 case UNORDERED_EXPR:
2394 case LTGT_EXPR:
2395 case UNEQ_EXPR:
2396 return code;
2397 case GT_EXPR:
2398 return LT_EXPR;
2399 case GE_EXPR:
2400 return LE_EXPR;
2401 case LT_EXPR:
2402 return GT_EXPR;
2403 case LE_EXPR:
2404 return GE_EXPR;
2405 case UNGT_EXPR:
2406 return UNLT_EXPR;
2407 case UNGE_EXPR:
2408 return UNLE_EXPR;
2409 case UNLT_EXPR:
2410 return UNGT_EXPR;
2411 case UNLE_EXPR:
2412 return UNGE_EXPR;
2413 default:
2414 gcc_unreachable ();
2419 /* Convert a comparison tree code from an enum tree_code representation
2420 into a compcode bit-based encoding. This function is the inverse of
2421 compcode_to_comparison. */
2423 static enum comparison_code
2424 comparison_to_compcode (enum tree_code code)
2426 switch (code)
2428 case LT_EXPR:
2429 return COMPCODE_LT;
2430 case EQ_EXPR:
2431 return COMPCODE_EQ;
2432 case LE_EXPR:
2433 return COMPCODE_LE;
2434 case GT_EXPR:
2435 return COMPCODE_GT;
2436 case NE_EXPR:
2437 return COMPCODE_NE;
2438 case GE_EXPR:
2439 return COMPCODE_GE;
2440 case ORDERED_EXPR:
2441 return COMPCODE_ORD;
2442 case UNORDERED_EXPR:
2443 return COMPCODE_UNORD;
2444 case UNLT_EXPR:
2445 return COMPCODE_UNLT;
2446 case UNEQ_EXPR:
2447 return COMPCODE_UNEQ;
2448 case UNLE_EXPR:
2449 return COMPCODE_UNLE;
2450 case UNGT_EXPR:
2451 return COMPCODE_UNGT;
2452 case LTGT_EXPR:
2453 return COMPCODE_LTGT;
2454 case UNGE_EXPR:
2455 return COMPCODE_UNGE;
2456 default:
2457 gcc_unreachable ();
2461 /* Convert a compcode bit-based encoding of a comparison operator back
2462 to GCC's enum tree_code representation. This function is the
2463 inverse of comparison_to_compcode. */
2465 static enum tree_code
2466 compcode_to_comparison (enum comparison_code code)
2468 switch (code)
2470 case COMPCODE_LT:
2471 return LT_EXPR;
2472 case COMPCODE_EQ:
2473 return EQ_EXPR;
2474 case COMPCODE_LE:
2475 return LE_EXPR;
2476 case COMPCODE_GT:
2477 return GT_EXPR;
2478 case COMPCODE_NE:
2479 return NE_EXPR;
2480 case COMPCODE_GE:
2481 return GE_EXPR;
2482 case COMPCODE_ORD:
2483 return ORDERED_EXPR;
2484 case COMPCODE_UNORD:
2485 return UNORDERED_EXPR;
2486 case COMPCODE_UNLT:
2487 return UNLT_EXPR;
2488 case COMPCODE_UNEQ:
2489 return UNEQ_EXPR;
2490 case COMPCODE_UNLE:
2491 return UNLE_EXPR;
2492 case COMPCODE_UNGT:
2493 return UNGT_EXPR;
2494 case COMPCODE_LTGT:
2495 return LTGT_EXPR;
2496 case COMPCODE_UNGE:
2497 return UNGE_EXPR;
2498 default:
2499 gcc_unreachable ();
2503 /* Return a tree for the comparison which is the combination of
2504 doing the AND or OR (depending on CODE) of the two operations LCODE
2505 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2506 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2507 if this makes the transformation invalid. */
2509 tree
2510 combine_comparisons (enum tree_code code, enum tree_code lcode,
2511 enum tree_code rcode, tree truth_type,
2512 tree ll_arg, tree lr_arg)
2514 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2515 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2516 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2517 enum comparison_code compcode;
2519 switch (code)
2521 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2522 compcode = lcompcode & rcompcode;
2523 break;
2525 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2526 compcode = lcompcode | rcompcode;
2527 break;
2529 default:
2530 return NULL_TREE;
2533 if (!honor_nans)
2535 /* Eliminate unordered comparisons, as well as LTGT and ORD
2536 which are not used unless the mode has NaNs. */
2537 compcode &= ~COMPCODE_UNORD;
2538 if (compcode == COMPCODE_LTGT)
2539 compcode = COMPCODE_NE;
2540 else if (compcode == COMPCODE_ORD)
2541 compcode = COMPCODE_TRUE;
2543 else if (flag_trapping_math)
2545 /* Check that the original operation and the optimized ones will trap
2546 under the same condition. */
2547 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2548 && (lcompcode != COMPCODE_EQ)
2549 && (lcompcode != COMPCODE_ORD);
2550 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2551 && (rcompcode != COMPCODE_EQ)
2552 && (rcompcode != COMPCODE_ORD);
2553 bool trap = (compcode & COMPCODE_UNORD) == 0
2554 && (compcode != COMPCODE_EQ)
2555 && (compcode != COMPCODE_ORD);
2557 /* In a short-circuited boolean expression the LHS might be
2558 such that the RHS, if evaluated, will never trap. For
2559 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2560 if neither x nor y is NaN. (This is a mixed blessing: for
2561 example, the expression above will never trap, hence
2562 optimizing it to x < y would be invalid). */
2563 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2564 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2565 rtrap = false;
2567 /* If the comparison was short-circuited, and only the RHS
2568 trapped, we may now generate a spurious trap. */
2569 if (rtrap && !ltrap
2570 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2571 return NULL_TREE;
2573 /* If we changed the conditions that cause a trap, we lose. */
2574 if ((ltrap || rtrap) != trap)
2575 return NULL_TREE;
2578 if (compcode == COMPCODE_TRUE)
2579 return constant_boolean_node (true, truth_type);
2580 else if (compcode == COMPCODE_FALSE)
2581 return constant_boolean_node (false, truth_type);
2582 else
2583 return fold_build2 (compcode_to_comparison (compcode),
2584 truth_type, ll_arg, lr_arg);
2587 /* Return nonzero if CODE is a tree code that represents a truth value. */
2589 static int
2590 truth_value_p (enum tree_code code)
2592 return (TREE_CODE_CLASS (code) == tcc_comparison
2593 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2594 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2595 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2598 /* Return nonzero if two operands (typically of the same tree node)
2599 are necessarily equal. If either argument has side-effects this
2600 function returns zero. FLAGS modifies behavior as follows:
2602 If OEP_ONLY_CONST is set, only return nonzero for constants.
2603 This function tests whether the operands are indistinguishable;
2604 it does not test whether they are equal using C's == operation.
2605 The distinction is important for IEEE floating point, because
2606 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2607 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2609 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2610 even though it may hold multiple values during a function.
2611 This is because a GCC tree node guarantees that nothing else is
2612 executed between the evaluation of its "operands" (which may often
2613 be evaluated in arbitrary order). Hence if the operands themselves
2614 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2615 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2616 unset means assuming isochronic (or instantaneous) tree equivalence.
2617 Unless comparing arbitrary expression trees, such as from different
2618 statements, this flag can usually be left unset.
2620 If OEP_PURE_SAME is set, then pure functions with identical arguments
2621 are considered the same. It is used when the caller has other ways
2622 to ensure that global memory is unchanged in between. */
2625 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2627 /* If either is ERROR_MARK, they aren't equal. */
2628 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2629 return 0;
2631 /* If both types don't have the same signedness, then we can't consider
2632 them equal. We must check this before the STRIP_NOPS calls
2633 because they may change the signedness of the arguments. */
2634 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635 return 0;
2637 /* If both types don't have the same precision, then it is not safe
2638 to strip NOPs. */
2639 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2640 return 0;
2642 STRIP_NOPS (arg0);
2643 STRIP_NOPS (arg1);
2645 /* In case both args are comparisons but with different comparison
2646 code, try to swap the comparison operands of one arg to produce
2647 a match and compare that variant. */
2648 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2649 && COMPARISON_CLASS_P (arg0)
2650 && COMPARISON_CLASS_P (arg1))
2652 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2654 if (TREE_CODE (arg0) == swap_code)
2655 return operand_equal_p (TREE_OPERAND (arg0, 0),
2656 TREE_OPERAND (arg1, 1), flags)
2657 && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 TREE_OPERAND (arg1, 0), flags);
2661 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2662 /* This is needed for conversions and for COMPONENT_REF.
2663 Might as well play it safe and always test this. */
2664 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2665 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2666 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2667 return 0;
2669 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2670 We don't care about side effects in that case because the SAVE_EXPR
2671 takes care of that for us. In all other cases, two expressions are
2672 equal if they have no side effects. If we have two identical
2673 expressions with side effects that should be treated the same due
2674 to the only side effects being identical SAVE_EXPR's, that will
2675 be detected in the recursive calls below. */
2676 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2677 && (TREE_CODE (arg0) == SAVE_EXPR
2678 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2679 return 1;
2681 /* Next handle constant cases, those for which we can return 1 even
2682 if ONLY_CONST is set. */
2683 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2684 switch (TREE_CODE (arg0))
2686 case INTEGER_CST:
2687 return (! TREE_CONSTANT_OVERFLOW (arg0)
2688 && ! TREE_CONSTANT_OVERFLOW (arg1)
2689 && tree_int_cst_equal (arg0, arg1));
2691 case REAL_CST:
2692 return (! TREE_CONSTANT_OVERFLOW (arg0)
2693 && ! TREE_CONSTANT_OVERFLOW (arg1)
2694 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2695 TREE_REAL_CST (arg1)));
2697 case VECTOR_CST:
2699 tree v1, v2;
2701 if (TREE_CONSTANT_OVERFLOW (arg0)
2702 || TREE_CONSTANT_OVERFLOW (arg1))
2703 return 0;
2705 v1 = TREE_VECTOR_CST_ELTS (arg0);
2706 v2 = TREE_VECTOR_CST_ELTS (arg1);
2707 while (v1 && v2)
2709 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2710 flags))
2711 return 0;
2712 v1 = TREE_CHAIN (v1);
2713 v2 = TREE_CHAIN (v2);
2716 return v1 == v2;
2719 case COMPLEX_CST:
2720 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2721 flags)
2722 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2723 flags));
2725 case STRING_CST:
2726 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2727 && ! memcmp (TREE_STRING_POINTER (arg0),
2728 TREE_STRING_POINTER (arg1),
2729 TREE_STRING_LENGTH (arg0)));
2731 case ADDR_EXPR:
2732 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2734 default:
2735 break;
2738 if (flags & OEP_ONLY_CONST)
2739 return 0;
2741 /* Define macros to test an operand from arg0 and arg1 for equality and a
2742 variant that allows null and views null as being different from any
2743 non-null value. In the latter case, if either is null, the both
2744 must be; otherwise, do the normal comparison. */
2745 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2746 TREE_OPERAND (arg1, N), flags)
2748 #define OP_SAME_WITH_NULL(N) \
2749 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2750 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2752 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2754 case tcc_unary:
2755 /* Two conversions are equal only if signedness and modes match. */
2756 switch (TREE_CODE (arg0))
2758 case NOP_EXPR:
2759 case CONVERT_EXPR:
2760 case FIX_CEIL_EXPR:
2761 case FIX_TRUNC_EXPR:
2762 case FIX_FLOOR_EXPR:
2763 case FIX_ROUND_EXPR:
2764 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2765 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2766 return 0;
2767 break;
2768 default:
2769 break;
2772 return OP_SAME (0);
2775 case tcc_comparison:
2776 case tcc_binary:
2777 if (OP_SAME (0) && OP_SAME (1))
2778 return 1;
2780 /* For commutative ops, allow the other order. */
2781 return (commutative_tree_code (TREE_CODE (arg0))
2782 && operand_equal_p (TREE_OPERAND (arg0, 0),
2783 TREE_OPERAND (arg1, 1), flags)
2784 && operand_equal_p (TREE_OPERAND (arg0, 1),
2785 TREE_OPERAND (arg1, 0), flags));
2787 case tcc_reference:
2788 /* If either of the pointer (or reference) expressions we are
2789 dereferencing contain a side effect, these cannot be equal. */
2790 if (TREE_SIDE_EFFECTS (arg0)
2791 || TREE_SIDE_EFFECTS (arg1))
2792 return 0;
2794 switch (TREE_CODE (arg0))
2796 case INDIRECT_REF:
2797 case ALIGN_INDIRECT_REF:
2798 case MISALIGNED_INDIRECT_REF:
2799 case REALPART_EXPR:
2800 case IMAGPART_EXPR:
2801 return OP_SAME (0);
2803 case ARRAY_REF:
2804 case ARRAY_RANGE_REF:
2805 /* Operands 2 and 3 may be null. */
2806 return (OP_SAME (0)
2807 && OP_SAME (1)
2808 && OP_SAME_WITH_NULL (2)
2809 && OP_SAME_WITH_NULL (3));
2811 case COMPONENT_REF:
2812 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2813 may be NULL when we're called to compare MEM_EXPRs. */
2814 return OP_SAME_WITH_NULL (0)
2815 && OP_SAME (1)
2816 && OP_SAME_WITH_NULL (2);
2818 case BIT_FIELD_REF:
2819 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2821 default:
2822 return 0;
2825 case tcc_expression:
2826 switch (TREE_CODE (arg0))
2828 case ADDR_EXPR:
2829 case TRUTH_NOT_EXPR:
2830 return OP_SAME (0);
2832 case TRUTH_ANDIF_EXPR:
2833 case TRUTH_ORIF_EXPR:
2834 return OP_SAME (0) && OP_SAME (1);
2836 case TRUTH_AND_EXPR:
2837 case TRUTH_OR_EXPR:
2838 case TRUTH_XOR_EXPR:
2839 if (OP_SAME (0) && OP_SAME (1))
2840 return 1;
2842 /* Otherwise take into account this is a commutative operation. */
2843 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2844 TREE_OPERAND (arg1, 1), flags)
2845 && operand_equal_p (TREE_OPERAND (arg0, 1),
2846 TREE_OPERAND (arg1, 0), flags));
2848 case CALL_EXPR:
2849 /* If the CALL_EXPRs call different functions, then they
2850 clearly can not be equal. */
2851 if (!OP_SAME (0))
2852 return 0;
2855 unsigned int cef = call_expr_flags (arg0);
2856 if (flags & OEP_PURE_SAME)
2857 cef &= ECF_CONST | ECF_PURE;
2858 else
2859 cef &= ECF_CONST;
2860 if (!cef)
2861 return 0;
2864 /* Now see if all the arguments are the same. operand_equal_p
2865 does not handle TREE_LIST, so we walk the operands here
2866 feeding them to operand_equal_p. */
2867 arg0 = TREE_OPERAND (arg0, 1);
2868 arg1 = TREE_OPERAND (arg1, 1);
2869 while (arg0 && arg1)
2871 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2872 flags))
2873 return 0;
2875 arg0 = TREE_CHAIN (arg0);
2876 arg1 = TREE_CHAIN (arg1);
2879 /* If we get here and both argument lists are exhausted
2880 then the CALL_EXPRs are equal. */
2881 return ! (arg0 || arg1);
2883 default:
2884 return 0;
2887 case tcc_declaration:
2888 /* Consider __builtin_sqrt equal to sqrt. */
2889 return (TREE_CODE (arg0) == FUNCTION_DECL
2890 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2891 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2892 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2894 default:
2895 return 0;
2898 #undef OP_SAME
2899 #undef OP_SAME_WITH_NULL
2902 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2903 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2905 When in doubt, return 0. */
2907 static int
2908 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2910 int unsignedp1, unsignedpo;
2911 tree primarg0, primarg1, primother;
2912 unsigned int correct_width;
2914 if (operand_equal_p (arg0, arg1, 0))
2915 return 1;
2917 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2918 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2919 return 0;
2921 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2922 and see if the inner values are the same. This removes any
2923 signedness comparison, which doesn't matter here. */
2924 primarg0 = arg0, primarg1 = arg1;
2925 STRIP_NOPS (primarg0);
2926 STRIP_NOPS (primarg1);
2927 if (operand_equal_p (primarg0, primarg1, 0))
2928 return 1;
2930 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2931 actual comparison operand, ARG0.
2933 First throw away any conversions to wider types
2934 already present in the operands. */
2936 primarg1 = get_narrower (arg1, &unsignedp1);
2937 primother = get_narrower (other, &unsignedpo);
2939 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2940 if (unsignedp1 == unsignedpo
2941 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2942 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2944 tree type = TREE_TYPE (arg0);
2946 /* Make sure shorter operand is extended the right way
2947 to match the longer operand. */
2948 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2949 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2951 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2952 return 1;
2955 return 0;
2958 /* See if ARG is an expression that is either a comparison or is performing
2959 arithmetic on comparisons. The comparisons must only be comparing
2960 two different values, which will be stored in *CVAL1 and *CVAL2; if
2961 they are nonzero it means that some operands have already been found.
2962 No variables may be used anywhere else in the expression except in the
2963 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2964 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2966 If this is true, return 1. Otherwise, return zero. */
2968 static int
2969 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2971 enum tree_code code = TREE_CODE (arg);
2972 enum tree_code_class class = TREE_CODE_CLASS (code);
2974 /* We can handle some of the tcc_expression cases here. */
2975 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2976 class = tcc_unary;
2977 else if (class == tcc_expression
2978 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2979 || code == COMPOUND_EXPR))
2980 class = tcc_binary;
2982 else if (class == tcc_expression && code == SAVE_EXPR
2983 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2985 /* If we've already found a CVAL1 or CVAL2, this expression is
2986 two complex to handle. */
2987 if (*cval1 || *cval2)
2988 return 0;
2990 class = tcc_unary;
2991 *save_p = 1;
2994 switch (class)
2996 case tcc_unary:
2997 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2999 case tcc_binary:
3000 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3001 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3002 cval1, cval2, save_p));
3004 case tcc_constant:
3005 return 1;
3007 case tcc_expression:
3008 if (code == COND_EXPR)
3009 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3010 cval1, cval2, save_p)
3011 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3012 cval1, cval2, save_p)
3013 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3014 cval1, cval2, save_p));
3015 return 0;
3017 case tcc_comparison:
3018 /* First see if we can handle the first operand, then the second. For
3019 the second operand, we know *CVAL1 can't be zero. It must be that
3020 one side of the comparison is each of the values; test for the
3021 case where this isn't true by failing if the two operands
3022 are the same. */
3024 if (operand_equal_p (TREE_OPERAND (arg, 0),
3025 TREE_OPERAND (arg, 1), 0))
3026 return 0;
3028 if (*cval1 == 0)
3029 *cval1 = TREE_OPERAND (arg, 0);
3030 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3032 else if (*cval2 == 0)
3033 *cval2 = TREE_OPERAND (arg, 0);
3034 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3036 else
3037 return 0;
3039 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3041 else if (*cval2 == 0)
3042 *cval2 = TREE_OPERAND (arg, 1);
3043 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3045 else
3046 return 0;
3048 return 1;
3050 default:
3051 return 0;
3055 /* ARG is a tree that is known to contain just arithmetic operations and
3056 comparisons. Evaluate the operations in the tree substituting NEW0 for
3057 any occurrence of OLD0 as an operand of a comparison and likewise for
3058 NEW1 and OLD1. */
3060 static tree
3061 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3063 tree type = TREE_TYPE (arg);
3064 enum tree_code code = TREE_CODE (arg);
3065 enum tree_code_class class = TREE_CODE_CLASS (code);
3067 /* We can handle some of the tcc_expression cases here. */
3068 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3069 class = tcc_unary;
3070 else if (class == tcc_expression
3071 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3072 class = tcc_binary;
3074 switch (class)
3076 case tcc_unary:
3077 return fold_build1 (code, type,
3078 eval_subst (TREE_OPERAND (arg, 0),
3079 old0, new0, old1, new1));
3081 case tcc_binary:
3082 return fold_build2 (code, type,
3083 eval_subst (TREE_OPERAND (arg, 0),
3084 old0, new0, old1, new1),
3085 eval_subst (TREE_OPERAND (arg, 1),
3086 old0, new0, old1, new1));
3088 case tcc_expression:
3089 switch (code)
3091 case SAVE_EXPR:
3092 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3094 case COMPOUND_EXPR:
3095 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3097 case COND_EXPR:
3098 return fold_build3 (code, type,
3099 eval_subst (TREE_OPERAND (arg, 0),
3100 old0, new0, old1, new1),
3101 eval_subst (TREE_OPERAND (arg, 1),
3102 old0, new0, old1, new1),
3103 eval_subst (TREE_OPERAND (arg, 2),
3104 old0, new0, old1, new1));
3105 default:
3106 break;
3108 /* Fall through - ??? */
3110 case tcc_comparison:
3112 tree arg0 = TREE_OPERAND (arg, 0);
3113 tree arg1 = TREE_OPERAND (arg, 1);
3115 /* We need to check both for exact equality and tree equality. The
3116 former will be true if the operand has a side-effect. In that
3117 case, we know the operand occurred exactly once. */
3119 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3120 arg0 = new0;
3121 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3122 arg0 = new1;
3124 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3125 arg1 = new0;
3126 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3127 arg1 = new1;
3129 return fold_build2 (code, type, arg0, arg1);
3132 default:
3133 return arg;
3137 /* Return a tree for the case when the result of an expression is RESULT
3138 converted to TYPE and OMITTED was previously an operand of the expression
3139 but is now not needed (e.g., we folded OMITTED * 0).
3141 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3142 the conversion of RESULT to TYPE. */
3144 tree
3145 omit_one_operand (tree type, tree result, tree omitted)
3147 tree t = fold_convert (type, result);
3149 if (TREE_SIDE_EFFECTS (omitted))
3150 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3152 return non_lvalue (t);
3155 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3157 static tree
3158 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3160 tree t = fold_convert (type, result);
3162 if (TREE_SIDE_EFFECTS (omitted))
3163 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3165 return pedantic_non_lvalue (t);
3168 /* Return a tree for the case when the result of an expression is RESULT
3169 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3170 of the expression but are now not needed.
3172 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3173 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3174 evaluated before OMITTED2. Otherwise, if neither has side effects,
3175 just do the conversion of RESULT to TYPE. */
3177 tree
3178 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3180 tree t = fold_convert (type, result);
3182 if (TREE_SIDE_EFFECTS (omitted2))
3183 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3184 if (TREE_SIDE_EFFECTS (omitted1))
3185 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3187 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3191 /* Return a simplified tree node for the truth-negation of ARG. This
3192 never alters ARG itself. We assume that ARG is an operation that
3193 returns a truth value (0 or 1).
3195 FIXME: one would think we would fold the result, but it causes
3196 problems with the dominator optimizer. */
3198 tree
3199 fold_truth_not_expr (tree arg)
3201 tree type = TREE_TYPE (arg);
3202 enum tree_code code = TREE_CODE (arg);
3204 /* If this is a comparison, we can simply invert it, except for
3205 floating-point non-equality comparisons, in which case we just
3206 enclose a TRUTH_NOT_EXPR around what we have. */
3208 if (TREE_CODE_CLASS (code) == tcc_comparison)
3210 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3211 if (FLOAT_TYPE_P (op_type)
3212 && flag_trapping_math
3213 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3214 && code != NE_EXPR && code != EQ_EXPR)
3215 return NULL_TREE;
3216 else
3218 code = invert_tree_comparison (code,
3219 HONOR_NANS (TYPE_MODE (op_type)));
3220 if (code == ERROR_MARK)
3221 return NULL_TREE;
3222 else
3223 return build2 (code, type,
3224 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3228 switch (code)
3230 case INTEGER_CST:
3231 return constant_boolean_node (integer_zerop (arg), type);
3233 case TRUTH_AND_EXPR:
3234 return build2 (TRUTH_OR_EXPR, type,
3235 invert_truthvalue (TREE_OPERAND (arg, 0)),
3236 invert_truthvalue (TREE_OPERAND (arg, 1)));
3238 case TRUTH_OR_EXPR:
3239 return build2 (TRUTH_AND_EXPR, type,
3240 invert_truthvalue (TREE_OPERAND (arg, 0)),
3241 invert_truthvalue (TREE_OPERAND (arg, 1)));
3243 case TRUTH_XOR_EXPR:
3244 /* Here we can invert either operand. We invert the first operand
3245 unless the second operand is a TRUTH_NOT_EXPR in which case our
3246 result is the XOR of the first operand with the inside of the
3247 negation of the second operand. */
3249 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3250 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3251 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3252 else
3253 return build2 (TRUTH_XOR_EXPR, type,
3254 invert_truthvalue (TREE_OPERAND (arg, 0)),
3255 TREE_OPERAND (arg, 1));
3257 case TRUTH_ANDIF_EXPR:
3258 return build2 (TRUTH_ORIF_EXPR, type,
3259 invert_truthvalue (TREE_OPERAND (arg, 0)),
3260 invert_truthvalue (TREE_OPERAND (arg, 1)));
3262 case TRUTH_ORIF_EXPR:
3263 return build2 (TRUTH_ANDIF_EXPR, type,
3264 invert_truthvalue (TREE_OPERAND (arg, 0)),
3265 invert_truthvalue (TREE_OPERAND (arg, 1)));
3267 case TRUTH_NOT_EXPR:
3268 return TREE_OPERAND (arg, 0);
3270 case COND_EXPR:
3272 tree arg1 = TREE_OPERAND (arg, 1);
3273 tree arg2 = TREE_OPERAND (arg, 2);
3274 /* A COND_EXPR may have a throw as one operand, which
3275 then has void type. Just leave void operands
3276 as they are. */
3277 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3278 VOID_TYPE_P (TREE_TYPE (arg1))
3279 ? arg1 : invert_truthvalue (arg1),
3280 VOID_TYPE_P (TREE_TYPE (arg2))
3281 ? arg2 : invert_truthvalue (arg2));
3284 case COMPOUND_EXPR:
3285 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3286 invert_truthvalue (TREE_OPERAND (arg, 1)));
3288 case NON_LVALUE_EXPR:
3289 return invert_truthvalue (TREE_OPERAND (arg, 0));
3291 case NOP_EXPR:
3292 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3293 return build1 (TRUTH_NOT_EXPR, type, arg);
3295 case CONVERT_EXPR:
3296 case FLOAT_EXPR:
3297 return build1 (TREE_CODE (arg), type,
3298 invert_truthvalue (TREE_OPERAND (arg, 0)));
3300 case BIT_AND_EXPR:
3301 if (!integer_onep (TREE_OPERAND (arg, 1)))
3302 break;
3303 return build2 (EQ_EXPR, type, arg,
3304 build_int_cst (type, 0));
3306 case SAVE_EXPR:
3307 return build1 (TRUTH_NOT_EXPR, type, arg);
3309 case CLEANUP_POINT_EXPR:
3310 return build1 (CLEANUP_POINT_EXPR, type,
3311 invert_truthvalue (TREE_OPERAND (arg, 0)));
3313 default:
3314 break;
3317 return NULL_TREE;
3320 /* Return a simplified tree node for the truth-negation of ARG. This
3321 never alters ARG itself. We assume that ARG is an operation that
3322 returns a truth value (0 or 1).
3324 FIXME: one would think we would fold the result, but it causes
3325 problems with the dominator optimizer. */
3327 tree
3328 invert_truthvalue (tree arg)
3330 tree tem;
3332 if (TREE_CODE (arg) == ERROR_MARK)
3333 return arg;
3335 tem = fold_truth_not_expr (arg);
3336 if (!tem)
3337 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3339 return tem;
3342 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3343 operands are another bit-wise operation with a common input. If so,
3344 distribute the bit operations to save an operation and possibly two if
3345 constants are involved. For example, convert
3346 (A | B) & (A | C) into A | (B & C)
3347 Further simplification will occur if B and C are constants.
3349 If this optimization cannot be done, 0 will be returned. */
3351 static tree
3352 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3354 tree common;
3355 tree left, right;
3357 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3358 || TREE_CODE (arg0) == code
3359 || (TREE_CODE (arg0) != BIT_AND_EXPR
3360 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3361 return 0;
3363 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3365 common = TREE_OPERAND (arg0, 0);
3366 left = TREE_OPERAND (arg0, 1);
3367 right = TREE_OPERAND (arg1, 1);
3369 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3371 common = TREE_OPERAND (arg0, 0);
3372 left = TREE_OPERAND (arg0, 1);
3373 right = TREE_OPERAND (arg1, 0);
3375 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3377 common = TREE_OPERAND (arg0, 1);
3378 left = TREE_OPERAND (arg0, 0);
3379 right = TREE_OPERAND (arg1, 1);
3381 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3383 common = TREE_OPERAND (arg0, 1);
3384 left = TREE_OPERAND (arg0, 0);
3385 right = TREE_OPERAND (arg1, 0);
3387 else
3388 return 0;
3390 return fold_build2 (TREE_CODE (arg0), type, common,
3391 fold_build2 (code, type, left, right));
3394 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3395 with code CODE. This optimization is unsafe. */
3396 static tree
3397 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3399 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3400 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3402 /* (A / C) +- (B / C) -> (A +- B) / C. */
3403 if (mul0 == mul1
3404 && operand_equal_p (TREE_OPERAND (arg0, 1),
3405 TREE_OPERAND (arg1, 1), 0))
3406 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3407 fold_build2 (code, type,
3408 TREE_OPERAND (arg0, 0),
3409 TREE_OPERAND (arg1, 0)),
3410 TREE_OPERAND (arg0, 1));
3412 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3413 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3414 TREE_OPERAND (arg1, 0), 0)
3415 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3416 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3418 REAL_VALUE_TYPE r0, r1;
3419 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3420 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3421 if (!mul0)
3422 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3423 if (!mul1)
3424 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3425 real_arithmetic (&r0, code, &r0, &r1);
3426 return fold_build2 (MULT_EXPR, type,
3427 TREE_OPERAND (arg0, 0),
3428 build_real (type, r0));
3431 return NULL_TREE;
3434 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3435 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3437 static tree
3438 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3439 int unsignedp)
3441 tree result;
3443 if (bitpos == 0)
3445 tree size = TYPE_SIZE (TREE_TYPE (inner));
3446 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3447 || POINTER_TYPE_P (TREE_TYPE (inner)))
3448 && host_integerp (size, 0)
3449 && tree_low_cst (size, 0) == bitsize)
3450 return fold_convert (type, inner);
3453 result = build3 (BIT_FIELD_REF, type, inner,
3454 size_int (bitsize), bitsize_int (bitpos));
3456 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3458 return result;
3461 /* Optimize a bit-field compare.
3463 There are two cases: First is a compare against a constant and the
3464 second is a comparison of two items where the fields are at the same
3465 bit position relative to the start of a chunk (byte, halfword, word)
3466 large enough to contain it. In these cases we can avoid the shift
3467 implicit in bitfield extractions.
3469 For constants, we emit a compare of the shifted constant with the
3470 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3471 compared. For two fields at the same position, we do the ANDs with the
3472 similar mask and compare the result of the ANDs.
3474 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3475 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3476 are the left and right operands of the comparison, respectively.
3478 If the optimization described above can be done, we return the resulting
3479 tree. Otherwise we return zero. */
3481 static tree
3482 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3483 tree lhs, tree rhs)
3485 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3486 tree type = TREE_TYPE (lhs);
3487 tree signed_type, unsigned_type;
3488 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3489 enum machine_mode lmode, rmode, nmode;
3490 int lunsignedp, runsignedp;
3491 int lvolatilep = 0, rvolatilep = 0;
3492 tree linner, rinner = NULL_TREE;
3493 tree mask;
3494 tree offset;
3496 /* Get all the information about the extractions being done. If the bit size
3497 if the same as the size of the underlying object, we aren't doing an
3498 extraction at all and so can do nothing. We also don't want to
3499 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3500 then will no longer be able to replace it. */
3501 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3502 &lunsignedp, &lvolatilep, false);
3503 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3504 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3505 return 0;
3507 if (!const_p)
3509 /* If this is not a constant, we can only do something if bit positions,
3510 sizes, and signedness are the same. */
3511 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3512 &runsignedp, &rvolatilep, false);
3514 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3515 || lunsignedp != runsignedp || offset != 0
3516 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3517 return 0;
3520 /* See if we can find a mode to refer to this field. We should be able to,
3521 but fail if we can't. */
3522 nmode = get_best_mode (lbitsize, lbitpos,
3523 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3524 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3525 TYPE_ALIGN (TREE_TYPE (rinner))),
3526 word_mode, lvolatilep || rvolatilep);
3527 if (nmode == VOIDmode)
3528 return 0;
3530 /* Set signed and unsigned types of the precision of this mode for the
3531 shifts below. */
3532 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3533 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3535 /* Compute the bit position and size for the new reference and our offset
3536 within it. If the new reference is the same size as the original, we
3537 won't optimize anything, so return zero. */
3538 nbitsize = GET_MODE_BITSIZE (nmode);
3539 nbitpos = lbitpos & ~ (nbitsize - 1);
3540 lbitpos -= nbitpos;
3541 if (nbitsize == lbitsize)
3542 return 0;
3544 if (BYTES_BIG_ENDIAN)
3545 lbitpos = nbitsize - lbitsize - lbitpos;
3547 /* Make the mask to be used against the extracted field. */
3548 mask = build_int_cst (unsigned_type, -1);
3549 mask = force_fit_type (mask, 0, false, false);
3550 mask = fold_convert (unsigned_type, mask);
3551 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3552 mask = const_binop (RSHIFT_EXPR, mask,
3553 size_int (nbitsize - lbitsize - lbitpos), 0);
3555 if (! const_p)
3556 /* If not comparing with constant, just rework the comparison
3557 and return. */
3558 return build2 (code, compare_type,
3559 build2 (BIT_AND_EXPR, unsigned_type,
3560 make_bit_field_ref (linner, unsigned_type,
3561 nbitsize, nbitpos, 1),
3562 mask),
3563 build2 (BIT_AND_EXPR, unsigned_type,
3564 make_bit_field_ref (rinner, unsigned_type,
3565 nbitsize, nbitpos, 1),
3566 mask));
3568 /* Otherwise, we are handling the constant case. See if the constant is too
3569 big for the field. Warn and return a tree of for 0 (false) if so. We do
3570 this not only for its own sake, but to avoid having to test for this
3571 error case below. If we didn't, we might generate wrong code.
3573 For unsigned fields, the constant shifted right by the field length should
3574 be all zero. For signed fields, the high-order bits should agree with
3575 the sign bit. */
3577 if (lunsignedp)
3579 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3580 fold_convert (unsigned_type, rhs),
3581 size_int (lbitsize), 0)))
3583 warning (0, "comparison is always %d due to width of bit-field",
3584 code == NE_EXPR);
3585 return constant_boolean_node (code == NE_EXPR, compare_type);
3588 else
3590 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3591 size_int (lbitsize - 1), 0);
3592 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3594 warning (0, "comparison is always %d due to width of bit-field",
3595 code == NE_EXPR);
3596 return constant_boolean_node (code == NE_EXPR, compare_type);
3600 /* Single-bit compares should always be against zero. */
3601 if (lbitsize == 1 && ! integer_zerop (rhs))
3603 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3604 rhs = build_int_cst (type, 0);
3607 /* Make a new bitfield reference, shift the constant over the
3608 appropriate number of bits and mask it with the computed mask
3609 (in case this was a signed field). If we changed it, make a new one. */
3610 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3611 if (lvolatilep)
3613 TREE_SIDE_EFFECTS (lhs) = 1;
3614 TREE_THIS_VOLATILE (lhs) = 1;
3617 rhs = const_binop (BIT_AND_EXPR,
3618 const_binop (LSHIFT_EXPR,
3619 fold_convert (unsigned_type, rhs),
3620 size_int (lbitpos), 0),
3621 mask, 0);
3623 return build2 (code, compare_type,
3624 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3625 rhs);
3628 /* Subroutine for fold_truthop: decode a field reference.
3630 If EXP is a comparison reference, we return the innermost reference.
3632 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3633 set to the starting bit number.
3635 If the innermost field can be completely contained in a mode-sized
3636 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3638 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3639 otherwise it is not changed.
3641 *PUNSIGNEDP is set to the signedness of the field.
3643 *PMASK is set to the mask used. This is either contained in a
3644 BIT_AND_EXPR or derived from the width of the field.
3646 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3648 Return 0 if this is not a component reference or is one that we can't
3649 do anything with. */
3651 static tree
3652 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3653 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3654 int *punsignedp, int *pvolatilep,
3655 tree *pmask, tree *pand_mask)
3657 tree outer_type = 0;
3658 tree and_mask = 0;
3659 tree mask, inner, offset;
3660 tree unsigned_type;
3661 unsigned int precision;
3663 /* All the optimizations using this function assume integer fields.
3664 There are problems with FP fields since the type_for_size call
3665 below can fail for, e.g., XFmode. */
3666 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3667 return 0;
3669 /* We are interested in the bare arrangement of bits, so strip everything
3670 that doesn't affect the machine mode. However, record the type of the
3671 outermost expression if it may matter below. */
3672 if (TREE_CODE (exp) == NOP_EXPR
3673 || TREE_CODE (exp) == CONVERT_EXPR
3674 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3675 outer_type = TREE_TYPE (exp);
3676 STRIP_NOPS (exp);
3678 if (TREE_CODE (exp) == BIT_AND_EXPR)
3680 and_mask = TREE_OPERAND (exp, 1);
3681 exp = TREE_OPERAND (exp, 0);
3682 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3683 if (TREE_CODE (and_mask) != INTEGER_CST)
3684 return 0;
3687 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3688 punsignedp, pvolatilep, false);
3689 if ((inner == exp && and_mask == 0)
3690 || *pbitsize < 0 || offset != 0
3691 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3692 return 0;
3694 /* If the number of bits in the reference is the same as the bitsize of
3695 the outer type, then the outer type gives the signedness. Otherwise
3696 (in case of a small bitfield) the signedness is unchanged. */
3697 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3698 *punsignedp = TYPE_UNSIGNED (outer_type);
3700 /* Compute the mask to access the bitfield. */
3701 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3702 precision = TYPE_PRECISION (unsigned_type);
3704 mask = build_int_cst (unsigned_type, -1);
3705 mask = force_fit_type (mask, 0, false, false);
3707 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3708 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3710 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3711 if (and_mask != 0)
3712 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3713 fold_convert (unsigned_type, and_mask), mask);
3715 *pmask = mask;
3716 *pand_mask = and_mask;
3717 return inner;
3720 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3721 bit positions. */
3723 static int
3724 all_ones_mask_p (tree mask, int size)
3726 tree type = TREE_TYPE (mask);
3727 unsigned int precision = TYPE_PRECISION (type);
3728 tree tmask;
3730 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3731 tmask = force_fit_type (tmask, 0, false, false);
3733 return
3734 tree_int_cst_equal (mask,
3735 const_binop (RSHIFT_EXPR,
3736 const_binop (LSHIFT_EXPR, tmask,
3737 size_int (precision - size),
3739 size_int (precision - size), 0));
3742 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3743 represents the sign bit of EXP's type. If EXP represents a sign
3744 or zero extension, also test VAL against the unextended type.
3745 The return value is the (sub)expression whose sign bit is VAL,
3746 or NULL_TREE otherwise. */
3748 static tree
3749 sign_bit_p (tree exp, tree val)
3751 unsigned HOST_WIDE_INT mask_lo, lo;
3752 HOST_WIDE_INT mask_hi, hi;
3753 int width;
3754 tree t;
3756 /* Tree EXP must have an integral type. */
3757 t = TREE_TYPE (exp);
3758 if (! INTEGRAL_TYPE_P (t))
3759 return NULL_TREE;
3761 /* Tree VAL must be an integer constant. */
3762 if (TREE_CODE (val) != INTEGER_CST
3763 || TREE_CONSTANT_OVERFLOW (val))
3764 return NULL_TREE;
3766 width = TYPE_PRECISION (t);
3767 if (width > HOST_BITS_PER_WIDE_INT)
3769 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3770 lo = 0;
3772 mask_hi = ((unsigned HOST_WIDE_INT) -1
3773 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3774 mask_lo = -1;
3776 else
3778 hi = 0;
3779 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3781 mask_hi = 0;
3782 mask_lo = ((unsigned HOST_WIDE_INT) -1
3783 >> (HOST_BITS_PER_WIDE_INT - width));
3786 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3787 treat VAL as if it were unsigned. */
3788 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3789 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3790 return exp;
3792 /* Handle extension from a narrower type. */
3793 if (TREE_CODE (exp) == NOP_EXPR
3794 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3795 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3797 return NULL_TREE;
3800 /* Subroutine for fold_truthop: determine if an operand is simple enough
3801 to be evaluated unconditionally. */
3803 static int
3804 simple_operand_p (tree exp)
3806 /* Strip any conversions that don't change the machine mode. */
3807 STRIP_NOPS (exp);
3809 return (CONSTANT_CLASS_P (exp)
3810 || TREE_CODE (exp) == SSA_NAME
3811 || (DECL_P (exp)
3812 && ! TREE_ADDRESSABLE (exp)
3813 && ! TREE_THIS_VOLATILE (exp)
3814 && ! DECL_NONLOCAL (exp)
3815 /* Don't regard global variables as simple. They may be
3816 allocated in ways unknown to the compiler (shared memory,
3817 #pragma weak, etc). */
3818 && ! TREE_PUBLIC (exp)
3819 && ! DECL_EXTERNAL (exp)
3820 /* Loading a static variable is unduly expensive, but global
3821 registers aren't expensive. */
3822 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3825 /* The following functions are subroutines to fold_range_test and allow it to
3826 try to change a logical combination of comparisons into a range test.
3828 For example, both
3829 X == 2 || X == 3 || X == 4 || X == 5
3831 X >= 2 && X <= 5
3832 are converted to
3833 (unsigned) (X - 2) <= 3
3835 We describe each set of comparisons as being either inside or outside
3836 a range, using a variable named like IN_P, and then describe the
3837 range with a lower and upper bound. If one of the bounds is omitted,
3838 it represents either the highest or lowest value of the type.
3840 In the comments below, we represent a range by two numbers in brackets
3841 preceded by a "+" to designate being inside that range, or a "-" to
3842 designate being outside that range, so the condition can be inverted by
3843 flipping the prefix. An omitted bound is represented by a "-". For
3844 example, "- [-, 10]" means being outside the range starting at the lowest
3845 possible value and ending at 10, in other words, being greater than 10.
3846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3847 always false.
3849 We set up things so that the missing bounds are handled in a consistent
3850 manner so neither a missing bound nor "true" and "false" need to be
3851 handled using a special case. */
3853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3855 and UPPER1_P are nonzero if the respective argument is an upper bound
3856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3857 must be specified for a comparison. ARG1 will be converted to ARG0's
3858 type if both are specified. */
3860 static tree
3861 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3862 tree arg1, int upper1_p)
3864 tree tem;
3865 int result;
3866 int sgn0, sgn1;
3868 /* If neither arg represents infinity, do the normal operation.
3869 Else, if not a comparison, return infinity. Else handle the special
3870 comparison rules. Note that most of the cases below won't occur, but
3871 are handled for consistency. */
3873 if (arg0 != 0 && arg1 != 0)
3875 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3876 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3877 STRIP_NOPS (tem);
3878 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3881 if (TREE_CODE_CLASS (code) != tcc_comparison)
3882 return 0;
3884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3885 for neither. In real maths, we cannot assume open ended ranges are
3886 the same. But, this is computer arithmetic, where numbers are finite.
3887 We can therefore make the transformation of any unbounded range with
3888 the value Z, Z being greater than any representable number. This permits
3889 us to treat unbounded ranges as equal. */
3890 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3891 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3892 switch (code)
3894 case EQ_EXPR:
3895 result = sgn0 == sgn1;
3896 break;
3897 case NE_EXPR:
3898 result = sgn0 != sgn1;
3899 break;
3900 case LT_EXPR:
3901 result = sgn0 < sgn1;
3902 break;
3903 case LE_EXPR:
3904 result = sgn0 <= sgn1;
3905 break;
3906 case GT_EXPR:
3907 result = sgn0 > sgn1;
3908 break;
3909 case GE_EXPR:
3910 result = sgn0 >= sgn1;
3911 break;
3912 default:
3913 gcc_unreachable ();
3916 return constant_boolean_node (result, type);
3919 /* Given EXP, a logical expression, set the range it is testing into
3920 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3921 actually being tested. *PLOW and *PHIGH will be made of the same
3922 type as the returned expression. If EXP is not a comparison, we
3923 will most likely not be returning a useful value and range. Set
3924 *STRICT_OVERFLOW_P to true if the return value is only valid
3925 because signed overflow is undefined; otherwise, do not change
3926 *STRICT_OVERFLOW_P. */
3928 static tree
3929 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3930 bool *strict_overflow_p)
3932 enum tree_code code;
3933 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3934 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3935 int in_p, n_in_p;
3936 tree low, high, n_low, n_high;
3938 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3939 and see if we can refine the range. Some of the cases below may not
3940 happen, but it doesn't seem worth worrying about this. We "continue"
3941 the outer loop when we've changed something; otherwise we "break"
3942 the switch, which will "break" the while. */
3944 in_p = 0;
3945 low = high = build_int_cst (TREE_TYPE (exp), 0);
3947 while (1)
3949 code = TREE_CODE (exp);
3950 exp_type = TREE_TYPE (exp);
3952 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3954 if (TREE_CODE_LENGTH (code) > 0)
3955 arg0 = TREE_OPERAND (exp, 0);
3956 if (TREE_CODE_CLASS (code) == tcc_comparison
3957 || TREE_CODE_CLASS (code) == tcc_unary
3958 || TREE_CODE_CLASS (code) == tcc_binary)
3959 arg0_type = TREE_TYPE (arg0);
3960 if (TREE_CODE_CLASS (code) == tcc_binary
3961 || TREE_CODE_CLASS (code) == tcc_comparison
3962 || (TREE_CODE_CLASS (code) == tcc_expression
3963 && TREE_CODE_LENGTH (code) > 1))
3964 arg1 = TREE_OPERAND (exp, 1);
3967 switch (code)
3969 case TRUTH_NOT_EXPR:
3970 in_p = ! in_p, exp = arg0;
3971 continue;
3973 case EQ_EXPR: case NE_EXPR:
3974 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3975 /* We can only do something if the range is testing for zero
3976 and if the second operand is an integer constant. Note that
3977 saying something is "in" the range we make is done by
3978 complementing IN_P since it will set in the initial case of
3979 being not equal to zero; "out" is leaving it alone. */
3980 if (low == 0 || high == 0
3981 || ! integer_zerop (low) || ! integer_zerop (high)
3982 || TREE_CODE (arg1) != INTEGER_CST)
3983 break;
3985 switch (code)
3987 case NE_EXPR: /* - [c, c] */
3988 low = high = arg1;
3989 break;
3990 case EQ_EXPR: /* + [c, c] */
3991 in_p = ! in_p, low = high = arg1;
3992 break;
3993 case GT_EXPR: /* - [-, c] */
3994 low = 0, high = arg1;
3995 break;
3996 case GE_EXPR: /* + [c, -] */
3997 in_p = ! in_p, low = arg1, high = 0;
3998 break;
3999 case LT_EXPR: /* - [c, -] */
4000 low = arg1, high = 0;
4001 break;
4002 case LE_EXPR: /* + [-, c] */
4003 in_p = ! in_p, low = 0, high = arg1;
4004 break;
4005 default:
4006 gcc_unreachable ();
4009 /* If this is an unsigned comparison, we also know that EXP is
4010 greater than or equal to zero. We base the range tests we make
4011 on that fact, so we record it here so we can parse existing
4012 range tests. We test arg0_type since often the return type
4013 of, e.g. EQ_EXPR, is boolean. */
4014 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4016 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4017 in_p, low, high, 1,
4018 build_int_cst (arg0_type, 0),
4019 NULL_TREE))
4020 break;
4022 in_p = n_in_p, low = n_low, high = n_high;
4024 /* If the high bound is missing, but we have a nonzero low
4025 bound, reverse the range so it goes from zero to the low bound
4026 minus 1. */
4027 if (high == 0 && low && ! integer_zerop (low))
4029 in_p = ! in_p;
4030 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4031 integer_one_node, 0);
4032 low = build_int_cst (arg0_type, 0);
4036 exp = arg0;
4037 continue;
4039 case NEGATE_EXPR:
4040 /* (-x) IN [a,b] -> x in [-b, -a] */
4041 n_low = range_binop (MINUS_EXPR, exp_type,
4042 build_int_cst (exp_type, 0),
4043 0, high, 1);
4044 n_high = range_binop (MINUS_EXPR, exp_type,
4045 build_int_cst (exp_type, 0),
4046 0, low, 0);
4047 low = n_low, high = n_high;
4048 exp = arg0;
4049 continue;
4051 case BIT_NOT_EXPR:
4052 /* ~ X -> -X - 1 */
4053 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4054 build_int_cst (exp_type, 1));
4055 continue;
4057 case PLUS_EXPR: case MINUS_EXPR:
4058 if (TREE_CODE (arg1) != INTEGER_CST)
4059 break;
4061 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4062 move a constant to the other side. */
4063 if (!TYPE_UNSIGNED (arg0_type)
4064 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4065 break;
4067 /* If EXP is signed, any overflow in the computation is undefined,
4068 so we don't worry about it so long as our computations on
4069 the bounds don't overflow. For unsigned, overflow is defined
4070 and this is exactly the right thing. */
4071 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4072 arg0_type, low, 0, arg1, 0);
4073 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4074 arg0_type, high, 1, arg1, 0);
4075 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4076 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4077 break;
4079 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4080 *strict_overflow_p = true;
4082 /* Check for an unsigned range which has wrapped around the maximum
4083 value thus making n_high < n_low, and normalize it. */
4084 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4086 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4087 integer_one_node, 0);
4088 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4089 integer_one_node, 0);
4091 /* If the range is of the form +/- [ x+1, x ], we won't
4092 be able to normalize it. But then, it represents the
4093 whole range or the empty set, so make it
4094 +/- [ -, - ]. */
4095 if (tree_int_cst_equal (n_low, low)
4096 && tree_int_cst_equal (n_high, high))
4097 low = high = 0;
4098 else
4099 in_p = ! in_p;
4101 else
4102 low = n_low, high = n_high;
4104 exp = arg0;
4105 continue;
4107 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4108 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4109 break;
4111 if (! INTEGRAL_TYPE_P (arg0_type)
4112 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4113 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4114 break;
4116 n_low = low, n_high = high;
4118 if (n_low != 0)
4119 n_low = fold_convert (arg0_type, n_low);
4121 if (n_high != 0)
4122 n_high = fold_convert (arg0_type, n_high);
4125 /* If we're converting arg0 from an unsigned type, to exp,
4126 a signed type, we will be doing the comparison as unsigned.
4127 The tests above have already verified that LOW and HIGH
4128 are both positive.
4130 So we have to ensure that we will handle large unsigned
4131 values the same way that the current signed bounds treat
4132 negative values. */
4134 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4136 tree high_positive;
4137 tree equiv_type = lang_hooks.types.type_for_mode
4138 (TYPE_MODE (arg0_type), 1);
4140 /* A range without an upper bound is, naturally, unbounded.
4141 Since convert would have cropped a very large value, use
4142 the max value for the destination type. */
4143 high_positive
4144 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4145 : TYPE_MAX_VALUE (arg0_type);
4147 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4148 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4149 fold_convert (arg0_type,
4150 high_positive),
4151 fold_convert (arg0_type,
4152 integer_one_node));
4154 /* If the low bound is specified, "and" the range with the
4155 range for which the original unsigned value will be
4156 positive. */
4157 if (low != 0)
4159 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4160 1, n_low, n_high, 1,
4161 fold_convert (arg0_type,
4162 integer_zero_node),
4163 high_positive))
4164 break;
4166 in_p = (n_in_p == in_p);
4168 else
4170 /* Otherwise, "or" the range with the range of the input
4171 that will be interpreted as negative. */
4172 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4173 0, n_low, n_high, 1,
4174 fold_convert (arg0_type,
4175 integer_zero_node),
4176 high_positive))
4177 break;
4179 in_p = (in_p != n_in_p);
4183 exp = arg0;
4184 low = n_low, high = n_high;
4185 continue;
4187 default:
4188 break;
4191 break;
4194 /* If EXP is a constant, we can evaluate whether this is true or false. */
4195 if (TREE_CODE (exp) == INTEGER_CST)
4197 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4198 exp, 0, low, 0))
4199 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4200 exp, 1, high, 1)));
4201 low = high = 0;
4202 exp = 0;
4205 *pin_p = in_p, *plow = low, *phigh = high;
4206 return exp;
4209 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4210 type, TYPE, return an expression to test if EXP is in (or out of, depending
4211 on IN_P) the range. Return 0 if the test couldn't be created. */
4213 static tree
4214 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4216 tree etype = TREE_TYPE (exp);
4217 tree value;
4219 #ifdef HAVE_canonicalize_funcptr_for_compare
4220 /* Disable this optimization for function pointer expressions
4221 on targets that require function pointer canonicalization. */
4222 if (HAVE_canonicalize_funcptr_for_compare
4223 && TREE_CODE (etype) == POINTER_TYPE
4224 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4225 return NULL_TREE;
4226 #endif
4228 if (! in_p)
4230 value = build_range_check (type, exp, 1, low, high);
4231 if (value != 0)
4232 return invert_truthvalue (value);
4234 return 0;
4237 if (low == 0 && high == 0)
4238 return build_int_cst (type, 1);
4240 if (low == 0)
4241 return fold_build2 (LE_EXPR, type, exp,
4242 fold_convert (etype, high));
4244 if (high == 0)
4245 return fold_build2 (GE_EXPR, type, exp,
4246 fold_convert (etype, low));
4248 if (operand_equal_p (low, high, 0))
4249 return fold_build2 (EQ_EXPR, type, exp,
4250 fold_convert (etype, low));
4252 if (integer_zerop (low))
4254 if (! TYPE_UNSIGNED (etype))
4256 etype = lang_hooks.types.unsigned_type (etype);
4257 high = fold_convert (etype, high);
4258 exp = fold_convert (etype, exp);
4260 return build_range_check (type, exp, 1, 0, high);
4263 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4264 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4266 unsigned HOST_WIDE_INT lo;
4267 HOST_WIDE_INT hi;
4268 int prec;
4270 prec = TYPE_PRECISION (etype);
4271 if (prec <= HOST_BITS_PER_WIDE_INT)
4273 hi = 0;
4274 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4276 else
4278 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4279 lo = (unsigned HOST_WIDE_INT) -1;
4282 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4284 if (TYPE_UNSIGNED (etype))
4286 etype = lang_hooks.types.signed_type (etype);
4287 exp = fold_convert (etype, exp);
4289 return fold_build2 (GT_EXPR, type, exp,
4290 build_int_cst (etype, 0));
4294 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4295 This requires wrap-around arithmetics for the type of the expression. */
4296 switch (TREE_CODE (etype))
4298 case INTEGER_TYPE:
4299 /* There is no requirement that LOW be within the range of ETYPE
4300 if the latter is a subtype. It must, however, be within the base
4301 type of ETYPE. So be sure we do the subtraction in that type. */
4302 if (TREE_TYPE (etype))
4303 etype = TREE_TYPE (etype);
4304 break;
4306 case ENUMERAL_TYPE:
4307 case BOOLEAN_TYPE:
4308 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4309 TYPE_UNSIGNED (etype));
4310 break;
4312 default:
4313 break;
4316 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4317 if (TREE_CODE (etype) == INTEGER_TYPE
4318 && !TYPE_OVERFLOW_WRAPS (etype))
4320 tree utype, minv, maxv;
4322 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4323 for the type in question, as we rely on this here. */
4324 utype = lang_hooks.types.unsigned_type (etype);
4325 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4326 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4327 integer_one_node, 1);
4328 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4330 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4331 minv, 1, maxv, 1)))
4332 etype = utype;
4333 else
4334 return 0;
4337 high = fold_convert (etype, high);
4338 low = fold_convert (etype, low);
4339 exp = fold_convert (etype, exp);
4341 value = const_binop (MINUS_EXPR, high, low, 0);
4343 if (value != 0 && !TREE_OVERFLOW (value))
4344 return build_range_check (type,
4345 fold_build2 (MINUS_EXPR, etype, exp, low),
4346 1, build_int_cst (etype, 0), value);
4348 return 0;
4351 /* Return the predecessor of VAL in its type, handling the infinite case. */
4353 static tree
4354 range_predecessor (tree val)
4356 tree type = TREE_TYPE (val);
4358 if (INTEGRAL_TYPE_P (type)
4359 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4360 return 0;
4361 else
4362 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4365 /* Return the successor of VAL in its type, handling the infinite case. */
4367 static tree
4368 range_successor (tree val)
4370 tree type = TREE_TYPE (val);
4372 if (INTEGRAL_TYPE_P (type)
4373 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4374 return 0;
4375 else
4376 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4379 /* Given two ranges, see if we can merge them into one. Return 1 if we
4380 can, 0 if we can't. Set the output range into the specified parameters. */
4382 static int
4383 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4384 tree high0, int in1_p, tree low1, tree high1)
4386 int no_overlap;
4387 int subset;
4388 int temp;
4389 tree tem;
4390 int in_p;
4391 tree low, high;
4392 int lowequal = ((low0 == 0 && low1 == 0)
4393 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4394 low0, 0, low1, 0)));
4395 int highequal = ((high0 == 0 && high1 == 0)
4396 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397 high0, 1, high1, 1)));
4399 /* Make range 0 be the range that starts first, or ends last if they
4400 start at the same value. Swap them if it isn't. */
4401 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4402 low0, 0, low1, 0))
4403 || (lowequal
4404 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4405 high1, 1, high0, 1))))
4407 temp = in0_p, in0_p = in1_p, in1_p = temp;
4408 tem = low0, low0 = low1, low1 = tem;
4409 tem = high0, high0 = high1, high1 = tem;
4412 /* Now flag two cases, whether the ranges are disjoint or whether the
4413 second range is totally subsumed in the first. Note that the tests
4414 below are simplified by the ones above. */
4415 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4416 high0, 1, low1, 0));
4417 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4418 high1, 1, high0, 1));
4420 /* We now have four cases, depending on whether we are including or
4421 excluding the two ranges. */
4422 if (in0_p && in1_p)
4424 /* If they don't overlap, the result is false. If the second range
4425 is a subset it is the result. Otherwise, the range is from the start
4426 of the second to the end of the first. */
4427 if (no_overlap)
4428 in_p = 0, low = high = 0;
4429 else if (subset)
4430 in_p = 1, low = low1, high = high1;
4431 else
4432 in_p = 1, low = low1, high = high0;
4435 else if (in0_p && ! in1_p)
4437 /* If they don't overlap, the result is the first range. If they are
4438 equal, the result is false. If the second range is a subset of the
4439 first, and the ranges begin at the same place, we go from just after
4440 the end of the second range to the end of the first. If the second
4441 range is not a subset of the first, or if it is a subset and both
4442 ranges end at the same place, the range starts at the start of the
4443 first range and ends just before the second range.
4444 Otherwise, we can't describe this as a single range. */
4445 if (no_overlap)
4446 in_p = 1, low = low0, high = high0;
4447 else if (lowequal && highequal)
4448 in_p = 0, low = high = 0;
4449 else if (subset && lowequal)
4451 low = range_successor (high1);
4452 high = high0;
4453 in_p = (low != 0);
4455 else if (! subset || highequal)
4457 low = low0;
4458 high = range_predecessor (low1);
4459 in_p = (high != 0);
4461 else
4462 return 0;
4465 else if (! in0_p && in1_p)
4467 /* If they don't overlap, the result is the second range. If the second
4468 is a subset of the first, the result is false. Otherwise,
4469 the range starts just after the first range and ends at the
4470 end of the second. */
4471 if (no_overlap)
4472 in_p = 1, low = low1, high = high1;
4473 else if (subset || highequal)
4474 in_p = 0, low = high = 0;
4475 else
4477 low = range_successor (high0);
4478 high = high1;
4479 in_p = (low != 0);
4483 else
4485 /* The case where we are excluding both ranges. Here the complex case
4486 is if they don't overlap. In that case, the only time we have a
4487 range is if they are adjacent. If the second is a subset of the
4488 first, the result is the first. Otherwise, the range to exclude
4489 starts at the beginning of the first range and ends at the end of the
4490 second. */
4491 if (no_overlap)
4493 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4494 range_successor (high0),
4495 1, low1, 0)))
4496 in_p = 0, low = low0, high = high1;
4497 else
4499 /* Canonicalize - [min, x] into - [-, x]. */
4500 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4501 switch (TREE_CODE (TREE_TYPE (low0)))
4503 case ENUMERAL_TYPE:
4504 if (TYPE_PRECISION (TREE_TYPE (low0))
4505 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4506 break;
4507 /* FALLTHROUGH */
4508 case INTEGER_TYPE:
4509 if (tree_int_cst_equal (low0,
4510 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4511 low0 = 0;
4512 break;
4513 case POINTER_TYPE:
4514 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4515 && integer_zerop (low0))
4516 low0 = 0;
4517 break;
4518 default:
4519 break;
4522 /* Canonicalize - [x, max] into - [x, -]. */
4523 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4524 switch (TREE_CODE (TREE_TYPE (high1)))
4526 case ENUMERAL_TYPE:
4527 if (TYPE_PRECISION (TREE_TYPE (high1))
4528 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4529 break;
4530 /* FALLTHROUGH */
4531 case INTEGER_TYPE:
4532 if (tree_int_cst_equal (high1,
4533 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4534 high1 = 0;
4535 break;
4536 case POINTER_TYPE:
4537 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4538 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4539 high1, 1,
4540 integer_one_node, 1)))
4541 high1 = 0;
4542 break;
4543 default:
4544 break;
4547 /* The ranges might be also adjacent between the maximum and
4548 minimum values of the given type. For
4549 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4550 return + [x + 1, y - 1]. */
4551 if (low0 == 0 && high1 == 0)
4553 low = range_successor (high0);
4554 high = range_predecessor (low1);
4555 if (low == 0 || high == 0)
4556 return 0;
4558 in_p = 1;
4560 else
4561 return 0;
4564 else if (subset)
4565 in_p = 0, low = low0, high = high0;
4566 else
4567 in_p = 0, low = low0, high = high1;
4570 *pin_p = in_p, *plow = low, *phigh = high;
4571 return 1;
4575 /* Subroutine of fold, looking inside expressions of the form
4576 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4577 of the COND_EXPR. This function is being used also to optimize
4578 A op B ? C : A, by reversing the comparison first.
4580 Return a folded expression whose code is not a COND_EXPR
4581 anymore, or NULL_TREE if no folding opportunity is found. */
4583 static tree
4584 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4586 enum tree_code comp_code = TREE_CODE (arg0);
4587 tree arg00 = TREE_OPERAND (arg0, 0);
4588 tree arg01 = TREE_OPERAND (arg0, 1);
4589 tree arg1_type = TREE_TYPE (arg1);
4590 tree tem;
4592 STRIP_NOPS (arg1);
4593 STRIP_NOPS (arg2);
4595 /* If we have A op 0 ? A : -A, consider applying the following
4596 transformations:
4598 A == 0? A : -A same as -A
4599 A != 0? A : -A same as A
4600 A >= 0? A : -A same as abs (A)
4601 A > 0? A : -A same as abs (A)
4602 A <= 0? A : -A same as -abs (A)
4603 A < 0? A : -A same as -abs (A)
4605 None of these transformations work for modes with signed
4606 zeros. If A is +/-0, the first two transformations will
4607 change the sign of the result (from +0 to -0, or vice
4608 versa). The last four will fix the sign of the result,
4609 even though the original expressions could be positive or
4610 negative, depending on the sign of A.
4612 Note that all these transformations are correct if A is
4613 NaN, since the two alternatives (A and -A) are also NaNs. */
4614 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4615 ? real_zerop (arg01)
4616 : integer_zerop (arg01))
4617 && ((TREE_CODE (arg2) == NEGATE_EXPR
4618 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4619 /* In the case that A is of the form X-Y, '-A' (arg2) may
4620 have already been folded to Y-X, check for that. */
4621 || (TREE_CODE (arg1) == MINUS_EXPR
4622 && TREE_CODE (arg2) == MINUS_EXPR
4623 && operand_equal_p (TREE_OPERAND (arg1, 0),
4624 TREE_OPERAND (arg2, 1), 0)
4625 && operand_equal_p (TREE_OPERAND (arg1, 1),
4626 TREE_OPERAND (arg2, 0), 0))))
4627 switch (comp_code)
4629 case EQ_EXPR:
4630 case UNEQ_EXPR:
4631 tem = fold_convert (arg1_type, arg1);
4632 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4633 case NE_EXPR:
4634 case LTGT_EXPR:
4635 return pedantic_non_lvalue (fold_convert (type, arg1));
4636 case UNGE_EXPR:
4637 case UNGT_EXPR:
4638 if (flag_trapping_math)
4639 break;
4640 /* Fall through. */
4641 case GE_EXPR:
4642 case GT_EXPR:
4643 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4644 arg1 = fold_convert (lang_hooks.types.signed_type
4645 (TREE_TYPE (arg1)), arg1);
4646 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4647 return pedantic_non_lvalue (fold_convert (type, tem));
4648 case UNLE_EXPR:
4649 case UNLT_EXPR:
4650 if (flag_trapping_math)
4651 break;
4652 case LE_EXPR:
4653 case LT_EXPR:
4654 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4655 arg1 = fold_convert (lang_hooks.types.signed_type
4656 (TREE_TYPE (arg1)), arg1);
4657 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4658 return negate_expr (fold_convert (type, tem));
4659 default:
4660 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4661 break;
4664 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4665 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4666 both transformations are correct when A is NaN: A != 0
4667 is then true, and A == 0 is false. */
4669 if (integer_zerop (arg01) && integer_zerop (arg2))
4671 if (comp_code == NE_EXPR)
4672 return pedantic_non_lvalue (fold_convert (type, arg1));
4673 else if (comp_code == EQ_EXPR)
4674 return build_int_cst (type, 0);
4677 /* Try some transformations of A op B ? A : B.
4679 A == B? A : B same as B
4680 A != B? A : B same as A
4681 A >= B? A : B same as max (A, B)
4682 A > B? A : B same as max (B, A)
4683 A <= B? A : B same as min (A, B)
4684 A < B? A : B same as min (B, A)
4686 As above, these transformations don't work in the presence
4687 of signed zeros. For example, if A and B are zeros of
4688 opposite sign, the first two transformations will change
4689 the sign of the result. In the last four, the original
4690 expressions give different results for (A=+0, B=-0) and
4691 (A=-0, B=+0), but the transformed expressions do not.
4693 The first two transformations are correct if either A or B
4694 is a NaN. In the first transformation, the condition will
4695 be false, and B will indeed be chosen. In the case of the
4696 second transformation, the condition A != B will be true,
4697 and A will be chosen.
4699 The conversions to max() and min() are not correct if B is
4700 a number and A is not. The conditions in the original
4701 expressions will be false, so all four give B. The min()
4702 and max() versions would give a NaN instead. */
4703 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4704 /* Avoid these transformations if the COND_EXPR may be used
4705 as an lvalue in the C++ front-end. PR c++/19199. */
4706 && (in_gimple_form
4707 || (strcmp (lang_hooks.name, "GNU C++") != 0
4708 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4709 || ! maybe_lvalue_p (arg1)
4710 || ! maybe_lvalue_p (arg2)))
4712 tree comp_op0 = arg00;
4713 tree comp_op1 = arg01;
4714 tree comp_type = TREE_TYPE (comp_op0);
4716 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4717 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4719 comp_type = type;
4720 comp_op0 = arg1;
4721 comp_op1 = arg2;
4724 switch (comp_code)
4726 case EQ_EXPR:
4727 return pedantic_non_lvalue (fold_convert (type, arg2));
4728 case NE_EXPR:
4729 return pedantic_non_lvalue (fold_convert (type, arg1));
4730 case LE_EXPR:
4731 case LT_EXPR:
4732 case UNLE_EXPR:
4733 case UNLT_EXPR:
4734 /* In C++ a ?: expression can be an lvalue, so put the
4735 operand which will be used if they are equal first
4736 so that we can convert this back to the
4737 corresponding COND_EXPR. */
4738 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4740 comp_op0 = fold_convert (comp_type, comp_op0);
4741 comp_op1 = fold_convert (comp_type, comp_op1);
4742 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4743 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4744 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4745 return pedantic_non_lvalue (fold_convert (type, tem));
4747 break;
4748 case GE_EXPR:
4749 case GT_EXPR:
4750 case UNGE_EXPR:
4751 case UNGT_EXPR:
4752 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4754 comp_op0 = fold_convert (comp_type, comp_op0);
4755 comp_op1 = fold_convert (comp_type, comp_op1);
4756 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4757 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4758 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4759 return pedantic_non_lvalue (fold_convert (type, tem));
4761 break;
4762 case UNEQ_EXPR:
4763 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4764 return pedantic_non_lvalue (fold_convert (type, arg2));
4765 break;
4766 case LTGT_EXPR:
4767 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4768 return pedantic_non_lvalue (fold_convert (type, arg1));
4769 break;
4770 default:
4771 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4772 break;
4776 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4777 we might still be able to simplify this. For example,
4778 if C1 is one less or one more than C2, this might have started
4779 out as a MIN or MAX and been transformed by this function.
4780 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4782 if (INTEGRAL_TYPE_P (type)
4783 && TREE_CODE (arg01) == INTEGER_CST
4784 && TREE_CODE (arg2) == INTEGER_CST)
4785 switch (comp_code)
4787 case EQ_EXPR:
4788 /* We can replace A with C1 in this case. */
4789 arg1 = fold_convert (type, arg01);
4790 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4792 case LT_EXPR:
4793 /* If C1 is C2 + 1, this is min(A, C2). */
4794 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4795 OEP_ONLY_CONST)
4796 && operand_equal_p (arg01,
4797 const_binop (PLUS_EXPR, arg2,
4798 integer_one_node, 0),
4799 OEP_ONLY_CONST))
4800 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4801 type, arg1, arg2));
4802 break;
4804 case LE_EXPR:
4805 /* If C1 is C2 - 1, this is min(A, C2). */
4806 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4807 OEP_ONLY_CONST)
4808 && operand_equal_p (arg01,
4809 const_binop (MINUS_EXPR, arg2,
4810 integer_one_node, 0),
4811 OEP_ONLY_CONST))
4812 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4813 type, arg1, arg2));
4814 break;
4816 case GT_EXPR:
4817 /* If C1 is C2 - 1, this is max(A, C2). */
4818 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4819 OEP_ONLY_CONST)
4820 && operand_equal_p (arg01,
4821 const_binop (MINUS_EXPR, arg2,
4822 integer_one_node, 0),
4823 OEP_ONLY_CONST))
4824 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4825 type, arg1, arg2));
4826 break;
4828 case GE_EXPR:
4829 /* If C1 is C2 + 1, this is max(A, C2). */
4830 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4831 OEP_ONLY_CONST)
4832 && operand_equal_p (arg01,
4833 const_binop (PLUS_EXPR, arg2,
4834 integer_one_node, 0),
4835 OEP_ONLY_CONST))
4836 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4837 type, arg1, arg2));
4838 break;
4839 case NE_EXPR:
4840 break;
4841 default:
4842 gcc_unreachable ();
4845 return NULL_TREE;
4850 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4851 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4852 #endif
4854 /* EXP is some logical combination of boolean tests. See if we can
4855 merge it into some range test. Return the new tree if so. */
4857 static tree
4858 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4860 int or_op = (code == TRUTH_ORIF_EXPR
4861 || code == TRUTH_OR_EXPR);
4862 int in0_p, in1_p, in_p;
4863 tree low0, low1, low, high0, high1, high;
4864 bool strict_overflow_p = false;
4865 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4866 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4867 tree tem;
4868 const char * const warnmsg = G_("assuming signed overflow does not occur "
4869 "when simplifying range test");
4871 /* If this is an OR operation, invert both sides; we will invert
4872 again at the end. */
4873 if (or_op)
4874 in0_p = ! in0_p, in1_p = ! in1_p;
4876 /* If both expressions are the same, if we can merge the ranges, and we
4877 can build the range test, return it or it inverted. If one of the
4878 ranges is always true or always false, consider it to be the same
4879 expression as the other. */
4880 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4881 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4882 in1_p, low1, high1)
4883 && 0 != (tem = (build_range_check (type,
4884 lhs != 0 ? lhs
4885 : rhs != 0 ? rhs : integer_zero_node,
4886 in_p, low, high))))
4888 if (strict_overflow_p)
4889 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4890 return or_op ? invert_truthvalue (tem) : tem;
4893 /* On machines where the branch cost is expensive, if this is a
4894 short-circuited branch and the underlying object on both sides
4895 is the same, make a non-short-circuit operation. */
4896 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4897 && lhs != 0 && rhs != 0
4898 && (code == TRUTH_ANDIF_EXPR
4899 || code == TRUTH_ORIF_EXPR)
4900 && operand_equal_p (lhs, rhs, 0))
4902 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4903 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4904 which cases we can't do this. */
4905 if (simple_operand_p (lhs))
4906 return build2 (code == TRUTH_ANDIF_EXPR
4907 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4908 type, op0, op1);
4910 else if (lang_hooks.decls.global_bindings_p () == 0
4911 && ! CONTAINS_PLACEHOLDER_P (lhs))
4913 tree common = save_expr (lhs);
4915 if (0 != (lhs = build_range_check (type, common,
4916 or_op ? ! in0_p : in0_p,
4917 low0, high0))
4918 && (0 != (rhs = build_range_check (type, common,
4919 or_op ? ! in1_p : in1_p,
4920 low1, high1))))
4922 if (strict_overflow_p)
4923 fold_overflow_warning (warnmsg,
4924 WARN_STRICT_OVERFLOW_COMPARISON);
4925 return build2 (code == TRUTH_ANDIF_EXPR
4926 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4927 type, lhs, rhs);
4932 return 0;
4935 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4936 bit value. Arrange things so the extra bits will be set to zero if and
4937 only if C is signed-extended to its full width. If MASK is nonzero,
4938 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4940 static tree
4941 unextend (tree c, int p, int unsignedp, tree mask)
4943 tree type = TREE_TYPE (c);
4944 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4945 tree temp;
4947 if (p == modesize || unsignedp)
4948 return c;
4950 /* We work by getting just the sign bit into the low-order bit, then
4951 into the high-order bit, then sign-extend. We then XOR that value
4952 with C. */
4953 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4954 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4956 /* We must use a signed type in order to get an arithmetic right shift.
4957 However, we must also avoid introducing accidental overflows, so that
4958 a subsequent call to integer_zerop will work. Hence we must
4959 do the type conversion here. At this point, the constant is either
4960 zero or one, and the conversion to a signed type can never overflow.
4961 We could get an overflow if this conversion is done anywhere else. */
4962 if (TYPE_UNSIGNED (type))
4963 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4965 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4966 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4967 if (mask != 0)
4968 temp = const_binop (BIT_AND_EXPR, temp,
4969 fold_convert (TREE_TYPE (c), mask), 0);
4970 /* If necessary, convert the type back to match the type of C. */
4971 if (TYPE_UNSIGNED (type))
4972 temp = fold_convert (type, temp);
4974 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4977 /* Find ways of folding logical expressions of LHS and RHS:
4978 Try to merge two comparisons to the same innermost item.
4979 Look for range tests like "ch >= '0' && ch <= '9'".
4980 Look for combinations of simple terms on machines with expensive branches
4981 and evaluate the RHS unconditionally.
4983 For example, if we have p->a == 2 && p->b == 4 and we can make an
4984 object large enough to span both A and B, we can do this with a comparison
4985 against the object ANDed with the a mask.
4987 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4988 operations to do this with one comparison.
4990 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4991 function and the one above.
4993 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4994 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4996 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4997 two operands.
4999 We return the simplified tree or 0 if no optimization is possible. */
5001 static tree
5002 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5004 /* If this is the "or" of two comparisons, we can do something if
5005 the comparisons are NE_EXPR. If this is the "and", we can do something
5006 if the comparisons are EQ_EXPR. I.e.,
5007 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5009 WANTED_CODE is this operation code. For single bit fields, we can
5010 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5011 comparison for one-bit fields. */
5013 enum tree_code wanted_code;
5014 enum tree_code lcode, rcode;
5015 tree ll_arg, lr_arg, rl_arg, rr_arg;
5016 tree ll_inner, lr_inner, rl_inner, rr_inner;
5017 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5018 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5019 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5020 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5021 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5022 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5023 enum machine_mode lnmode, rnmode;
5024 tree ll_mask, lr_mask, rl_mask, rr_mask;
5025 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5026 tree l_const, r_const;
5027 tree lntype, rntype, result;
5028 int first_bit, end_bit;
5029 int volatilep;
5030 tree orig_lhs = lhs, orig_rhs = rhs;
5031 enum tree_code orig_code = code;
5033 /* Start by getting the comparison codes. Fail if anything is volatile.
5034 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5035 it were surrounded with a NE_EXPR. */
5037 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5038 return 0;
5040 lcode = TREE_CODE (lhs);
5041 rcode = TREE_CODE (rhs);
5043 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5045 lhs = build2 (NE_EXPR, truth_type, lhs,
5046 build_int_cst (TREE_TYPE (lhs), 0));
5047 lcode = NE_EXPR;
5050 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5052 rhs = build2 (NE_EXPR, truth_type, rhs,
5053 build_int_cst (TREE_TYPE (rhs), 0));
5054 rcode = NE_EXPR;
5057 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5058 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5059 return 0;
5061 ll_arg = TREE_OPERAND (lhs, 0);
5062 lr_arg = TREE_OPERAND (lhs, 1);
5063 rl_arg = TREE_OPERAND (rhs, 0);
5064 rr_arg = TREE_OPERAND (rhs, 1);
5066 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5067 if (simple_operand_p (ll_arg)
5068 && simple_operand_p (lr_arg))
5070 tree result;
5071 if (operand_equal_p (ll_arg, rl_arg, 0)
5072 && operand_equal_p (lr_arg, rr_arg, 0))
5074 result = combine_comparisons (code, lcode, rcode,
5075 truth_type, ll_arg, lr_arg);
5076 if (result)
5077 return result;
5079 else if (operand_equal_p (ll_arg, rr_arg, 0)
5080 && operand_equal_p (lr_arg, rl_arg, 0))
5082 result = combine_comparisons (code, lcode,
5083 swap_tree_comparison (rcode),
5084 truth_type, ll_arg, lr_arg);
5085 if (result)
5086 return result;
5090 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5091 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5093 /* If the RHS can be evaluated unconditionally and its operands are
5094 simple, it wins to evaluate the RHS unconditionally on machines
5095 with expensive branches. In this case, this isn't a comparison
5096 that can be merged. Avoid doing this if the RHS is a floating-point
5097 comparison since those can trap. */
5099 if (BRANCH_COST >= 2
5100 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5101 && simple_operand_p (rl_arg)
5102 && simple_operand_p (rr_arg))
5104 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5105 if (code == TRUTH_OR_EXPR
5106 && lcode == NE_EXPR && integer_zerop (lr_arg)
5107 && rcode == NE_EXPR && integer_zerop (rr_arg)
5108 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5109 return build2 (NE_EXPR, truth_type,
5110 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5111 ll_arg, rl_arg),
5112 build_int_cst (TREE_TYPE (ll_arg), 0));
5114 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5115 if (code == TRUTH_AND_EXPR
5116 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5117 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5118 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5119 return build2 (EQ_EXPR, truth_type,
5120 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5121 ll_arg, rl_arg),
5122 build_int_cst (TREE_TYPE (ll_arg), 0));
5124 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5126 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5127 return build2 (code, truth_type, lhs, rhs);
5128 return NULL_TREE;
5132 /* See if the comparisons can be merged. Then get all the parameters for
5133 each side. */
5135 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5136 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5137 return 0;
5139 volatilep = 0;
5140 ll_inner = decode_field_reference (ll_arg,
5141 &ll_bitsize, &ll_bitpos, &ll_mode,
5142 &ll_unsignedp, &volatilep, &ll_mask,
5143 &ll_and_mask);
5144 lr_inner = decode_field_reference (lr_arg,
5145 &lr_bitsize, &lr_bitpos, &lr_mode,
5146 &lr_unsignedp, &volatilep, &lr_mask,
5147 &lr_and_mask);
5148 rl_inner = decode_field_reference (rl_arg,
5149 &rl_bitsize, &rl_bitpos, &rl_mode,
5150 &rl_unsignedp, &volatilep, &rl_mask,
5151 &rl_and_mask);
5152 rr_inner = decode_field_reference (rr_arg,
5153 &rr_bitsize, &rr_bitpos, &rr_mode,
5154 &rr_unsignedp, &volatilep, &rr_mask,
5155 &rr_and_mask);
5157 /* It must be true that the inner operation on the lhs of each
5158 comparison must be the same if we are to be able to do anything.
5159 Then see if we have constants. If not, the same must be true for
5160 the rhs's. */
5161 if (volatilep || ll_inner == 0 || rl_inner == 0
5162 || ! operand_equal_p (ll_inner, rl_inner, 0))
5163 return 0;
5165 if (TREE_CODE (lr_arg) == INTEGER_CST
5166 && TREE_CODE (rr_arg) == INTEGER_CST)
5167 l_const = lr_arg, r_const = rr_arg;
5168 else if (lr_inner == 0 || rr_inner == 0
5169 || ! operand_equal_p (lr_inner, rr_inner, 0))
5170 return 0;
5171 else
5172 l_const = r_const = 0;
5174 /* If either comparison code is not correct for our logical operation,
5175 fail. However, we can convert a one-bit comparison against zero into
5176 the opposite comparison against that bit being set in the field. */
5178 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5179 if (lcode != wanted_code)
5181 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5183 /* Make the left operand unsigned, since we are only interested
5184 in the value of one bit. Otherwise we are doing the wrong
5185 thing below. */
5186 ll_unsignedp = 1;
5187 l_const = ll_mask;
5189 else
5190 return 0;
5193 /* This is analogous to the code for l_const above. */
5194 if (rcode != wanted_code)
5196 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5198 rl_unsignedp = 1;
5199 r_const = rl_mask;
5201 else
5202 return 0;
5205 /* After this point all optimizations will generate bit-field
5206 references, which we might not want. */
5207 if (! lang_hooks.can_use_bit_fields_p ())
5208 return 0;
5210 /* See if we can find a mode that contains both fields being compared on
5211 the left. If we can't, fail. Otherwise, update all constants and masks
5212 to be relative to a field of that size. */
5213 first_bit = MIN (ll_bitpos, rl_bitpos);
5214 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5215 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5216 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5217 volatilep);
5218 if (lnmode == VOIDmode)
5219 return 0;
5221 lnbitsize = GET_MODE_BITSIZE (lnmode);
5222 lnbitpos = first_bit & ~ (lnbitsize - 1);
5223 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5224 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5226 if (BYTES_BIG_ENDIAN)
5228 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5229 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5232 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5233 size_int (xll_bitpos), 0);
5234 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5235 size_int (xrl_bitpos), 0);
5237 if (l_const)
5239 l_const = fold_convert (lntype, l_const);
5240 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5241 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5242 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5243 fold_build1 (BIT_NOT_EXPR,
5244 lntype, ll_mask),
5245 0)))
5247 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5249 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5252 if (r_const)
5254 r_const = fold_convert (lntype, r_const);
5255 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5256 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5257 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5258 fold_build1 (BIT_NOT_EXPR,
5259 lntype, rl_mask),
5260 0)))
5262 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5264 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5268 /* If the right sides are not constant, do the same for it. Also,
5269 disallow this optimization if a size or signedness mismatch occurs
5270 between the left and right sides. */
5271 if (l_const == 0)
5273 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5274 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5275 /* Make sure the two fields on the right
5276 correspond to the left without being swapped. */
5277 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5278 return 0;
5280 first_bit = MIN (lr_bitpos, rr_bitpos);
5281 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5282 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5283 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5284 volatilep);
5285 if (rnmode == VOIDmode)
5286 return 0;
5288 rnbitsize = GET_MODE_BITSIZE (rnmode);
5289 rnbitpos = first_bit & ~ (rnbitsize - 1);
5290 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5291 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5293 if (BYTES_BIG_ENDIAN)
5295 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5296 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5299 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5300 size_int (xlr_bitpos), 0);
5301 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5302 size_int (xrr_bitpos), 0);
5304 /* Make a mask that corresponds to both fields being compared.
5305 Do this for both items being compared. If the operands are the
5306 same size and the bits being compared are in the same position
5307 then we can do this by masking both and comparing the masked
5308 results. */
5309 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5310 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5311 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5313 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5314 ll_unsignedp || rl_unsignedp);
5315 if (! all_ones_mask_p (ll_mask, lnbitsize))
5316 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5318 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5319 lr_unsignedp || rr_unsignedp);
5320 if (! all_ones_mask_p (lr_mask, rnbitsize))
5321 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5323 return build2 (wanted_code, truth_type, lhs, rhs);
5326 /* There is still another way we can do something: If both pairs of
5327 fields being compared are adjacent, we may be able to make a wider
5328 field containing them both.
5330 Note that we still must mask the lhs/rhs expressions. Furthermore,
5331 the mask must be shifted to account for the shift done by
5332 make_bit_field_ref. */
5333 if ((ll_bitsize + ll_bitpos == rl_bitpos
5334 && lr_bitsize + lr_bitpos == rr_bitpos)
5335 || (ll_bitpos == rl_bitpos + rl_bitsize
5336 && lr_bitpos == rr_bitpos + rr_bitsize))
5338 tree type;
5340 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5341 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5342 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5343 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5345 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5346 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5347 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5348 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5350 /* Convert to the smaller type before masking out unwanted bits. */
5351 type = lntype;
5352 if (lntype != rntype)
5354 if (lnbitsize > rnbitsize)
5356 lhs = fold_convert (rntype, lhs);
5357 ll_mask = fold_convert (rntype, ll_mask);
5358 type = rntype;
5360 else if (lnbitsize < rnbitsize)
5362 rhs = fold_convert (lntype, rhs);
5363 lr_mask = fold_convert (lntype, lr_mask);
5364 type = lntype;
5368 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5369 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5371 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5372 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5374 return build2 (wanted_code, truth_type, lhs, rhs);
5377 return 0;
5380 /* Handle the case of comparisons with constants. If there is something in
5381 common between the masks, those bits of the constants must be the same.
5382 If not, the condition is always false. Test for this to avoid generating
5383 incorrect code below. */
5384 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5385 if (! integer_zerop (result)
5386 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5387 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5389 if (wanted_code == NE_EXPR)
5391 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5392 return constant_boolean_node (true, truth_type);
5394 else
5396 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5397 return constant_boolean_node (false, truth_type);
5401 /* Construct the expression we will return. First get the component
5402 reference we will make. Unless the mask is all ones the width of
5403 that field, perform the mask operation. Then compare with the
5404 merged constant. */
5405 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5406 ll_unsignedp || rl_unsignedp);
5408 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5409 if (! all_ones_mask_p (ll_mask, lnbitsize))
5410 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5412 return build2 (wanted_code, truth_type, result,
5413 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5416 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5417 constant. */
5419 static tree
5420 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5422 tree arg0 = op0;
5423 enum tree_code op_code;
5424 tree comp_const = op1;
5425 tree minmax_const;
5426 int consts_equal, consts_lt;
5427 tree inner;
5429 STRIP_SIGN_NOPS (arg0);
5431 op_code = TREE_CODE (arg0);
5432 minmax_const = TREE_OPERAND (arg0, 1);
5433 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5434 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5435 inner = TREE_OPERAND (arg0, 0);
5437 /* If something does not permit us to optimize, return the original tree. */
5438 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5439 || TREE_CODE (comp_const) != INTEGER_CST
5440 || TREE_CONSTANT_OVERFLOW (comp_const)
5441 || TREE_CODE (minmax_const) != INTEGER_CST
5442 || TREE_CONSTANT_OVERFLOW (minmax_const))
5443 return NULL_TREE;
5445 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5446 and GT_EXPR, doing the rest with recursive calls using logical
5447 simplifications. */
5448 switch (code)
5450 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5452 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5453 type, op0, op1);
5454 if (tem)
5455 return invert_truthvalue (tem);
5456 return NULL_TREE;
5459 case GE_EXPR:
5460 return
5461 fold_build2 (TRUTH_ORIF_EXPR, type,
5462 optimize_minmax_comparison
5463 (EQ_EXPR, type, arg0, comp_const),
5464 optimize_minmax_comparison
5465 (GT_EXPR, type, arg0, comp_const));
5467 case EQ_EXPR:
5468 if (op_code == MAX_EXPR && consts_equal)
5469 /* MAX (X, 0) == 0 -> X <= 0 */
5470 return fold_build2 (LE_EXPR, type, inner, comp_const);
5472 else if (op_code == MAX_EXPR && consts_lt)
5473 /* MAX (X, 0) == 5 -> X == 5 */
5474 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5476 else if (op_code == MAX_EXPR)
5477 /* MAX (X, 0) == -1 -> false */
5478 return omit_one_operand (type, integer_zero_node, inner);
5480 else if (consts_equal)
5481 /* MIN (X, 0) == 0 -> X >= 0 */
5482 return fold_build2 (GE_EXPR, type, inner, comp_const);
5484 else if (consts_lt)
5485 /* MIN (X, 0) == 5 -> false */
5486 return omit_one_operand (type, integer_zero_node, inner);
5488 else
5489 /* MIN (X, 0) == -1 -> X == -1 */
5490 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5492 case GT_EXPR:
5493 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5494 /* MAX (X, 0) > 0 -> X > 0
5495 MAX (X, 0) > 5 -> X > 5 */
5496 return fold_build2 (GT_EXPR, type, inner, comp_const);
5498 else if (op_code == MAX_EXPR)
5499 /* MAX (X, 0) > -1 -> true */
5500 return omit_one_operand (type, integer_one_node, inner);
5502 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5503 /* MIN (X, 0) > 0 -> false
5504 MIN (X, 0) > 5 -> false */
5505 return omit_one_operand (type, integer_zero_node, inner);
5507 else
5508 /* MIN (X, 0) > -1 -> X > -1 */
5509 return fold_build2 (GT_EXPR, type, inner, comp_const);
5511 default:
5512 return NULL_TREE;
5516 /* T is an integer expression that is being multiplied, divided, or taken a
5517 modulus (CODE says which and what kind of divide or modulus) by a
5518 constant C. See if we can eliminate that operation by folding it with
5519 other operations already in T. WIDE_TYPE, if non-null, is a type that
5520 should be used for the computation if wider than our type.
5522 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5523 (X * 2) + (Y * 4). We must, however, be assured that either the original
5524 expression would not overflow or that overflow is undefined for the type
5525 in the language in question.
5527 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5528 the machine has a multiply-accumulate insn or that this is part of an
5529 addressing calculation.
5531 If we return a non-null expression, it is an equivalent form of the
5532 original computation, but need not be in the original type.
5534 We set *STRICT_OVERFLOW_P to true if the return values depends on
5535 signed overflow being undefined. Otherwise we do not change
5536 *STRICT_OVERFLOW_P. */
5538 static tree
5539 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5540 bool *strict_overflow_p)
5542 /* To avoid exponential search depth, refuse to allow recursion past
5543 three levels. Beyond that (1) it's highly unlikely that we'll find
5544 something interesting and (2) we've probably processed it before
5545 when we built the inner expression. */
5547 static int depth;
5548 tree ret;
5550 if (depth > 3)
5551 return NULL;
5553 depth++;
5554 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5555 depth--;
5557 return ret;
5560 static tree
5561 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5562 bool *strict_overflow_p)
5564 tree type = TREE_TYPE (t);
5565 enum tree_code tcode = TREE_CODE (t);
5566 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5567 > GET_MODE_SIZE (TYPE_MODE (type)))
5568 ? wide_type : type);
5569 tree t1, t2;
5570 int same_p = tcode == code;
5571 tree op0 = NULL_TREE, op1 = NULL_TREE;
5572 bool sub_strict_overflow_p;
5574 /* Don't deal with constants of zero here; they confuse the code below. */
5575 if (integer_zerop (c))
5576 return NULL_TREE;
5578 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5579 op0 = TREE_OPERAND (t, 0);
5581 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5582 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5584 /* Note that we need not handle conditional operations here since fold
5585 already handles those cases. So just do arithmetic here. */
5586 switch (tcode)
5588 case INTEGER_CST:
5589 /* For a constant, we can always simplify if we are a multiply
5590 or (for divide and modulus) if it is a multiple of our constant. */
5591 if (code == MULT_EXPR
5592 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5593 return const_binop (code, fold_convert (ctype, t),
5594 fold_convert (ctype, c), 0);
5595 break;
5597 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5598 /* If op0 is an expression ... */
5599 if ((COMPARISON_CLASS_P (op0)
5600 || UNARY_CLASS_P (op0)
5601 || BINARY_CLASS_P (op0)
5602 || EXPRESSION_CLASS_P (op0))
5603 /* ... and is unsigned, and its type is smaller than ctype,
5604 then we cannot pass through as widening. */
5605 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5606 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5607 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5608 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5609 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5610 /* ... or this is a truncation (t is narrower than op0),
5611 then we cannot pass through this narrowing. */
5612 || (GET_MODE_SIZE (TYPE_MODE (type))
5613 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5614 /* ... or signedness changes for division or modulus,
5615 then we cannot pass through this conversion. */
5616 || (code != MULT_EXPR
5617 && (TYPE_UNSIGNED (ctype)
5618 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5619 break;
5621 /* Pass the constant down and see if we can make a simplification. If
5622 we can, replace this expression with the inner simplification for
5623 possible later conversion to our or some other type. */
5624 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5625 && TREE_CODE (t2) == INTEGER_CST
5626 && ! TREE_CONSTANT_OVERFLOW (t2)
5627 && (0 != (t1 = extract_muldiv (op0, t2, code,
5628 code == MULT_EXPR
5629 ? ctype : NULL_TREE,
5630 strict_overflow_p))))
5631 return t1;
5632 break;
5634 case ABS_EXPR:
5635 /* If widening the type changes it from signed to unsigned, then we
5636 must avoid building ABS_EXPR itself as unsigned. */
5637 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5639 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5640 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5641 != 0)
5643 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5644 return fold_convert (ctype, t1);
5646 break;
5648 /* FALLTHROUGH */
5649 case NEGATE_EXPR:
5650 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5651 != 0)
5652 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5653 break;
5655 case MIN_EXPR: case MAX_EXPR:
5656 /* If widening the type changes the signedness, then we can't perform
5657 this optimization as that changes the result. */
5658 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5659 break;
5661 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5662 sub_strict_overflow_p = false;
5663 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5664 &sub_strict_overflow_p)) != 0
5665 && (t2 = extract_muldiv (op1, c, code, wide_type,
5666 &sub_strict_overflow_p)) != 0)
5668 if (tree_int_cst_sgn (c) < 0)
5669 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5670 if (sub_strict_overflow_p)
5671 *strict_overflow_p = true;
5672 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5673 fold_convert (ctype, t2));
5675 break;
5677 case LSHIFT_EXPR: case RSHIFT_EXPR:
5678 /* If the second operand is constant, this is a multiplication
5679 or floor division, by a power of two, so we can treat it that
5680 way unless the multiplier or divisor overflows. Signed
5681 left-shift overflow is implementation-defined rather than
5682 undefined in C90, so do not convert signed left shift into
5683 multiplication. */
5684 if (TREE_CODE (op1) == INTEGER_CST
5685 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5686 /* const_binop may not detect overflow correctly,
5687 so check for it explicitly here. */
5688 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5689 && TREE_INT_CST_HIGH (op1) == 0
5690 && 0 != (t1 = fold_convert (ctype,
5691 const_binop (LSHIFT_EXPR,
5692 size_one_node,
5693 op1, 0)))
5694 && ! TREE_OVERFLOW (t1))
5695 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5696 ? MULT_EXPR : FLOOR_DIV_EXPR,
5697 ctype, fold_convert (ctype, op0), t1),
5698 c, code, wide_type, strict_overflow_p);
5699 break;
5701 case PLUS_EXPR: case MINUS_EXPR:
5702 /* See if we can eliminate the operation on both sides. If we can, we
5703 can return a new PLUS or MINUS. If we can't, the only remaining
5704 cases where we can do anything are if the second operand is a
5705 constant. */
5706 sub_strict_overflow_p = false;
5707 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5708 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5709 if (t1 != 0 && t2 != 0
5710 && (code == MULT_EXPR
5711 /* If not multiplication, we can only do this if both operands
5712 are divisible by c. */
5713 || (multiple_of_p (ctype, op0, c)
5714 && multiple_of_p (ctype, op1, c))))
5716 if (sub_strict_overflow_p)
5717 *strict_overflow_p = true;
5718 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5719 fold_convert (ctype, t2));
5722 /* If this was a subtraction, negate OP1 and set it to be an addition.
5723 This simplifies the logic below. */
5724 if (tcode == MINUS_EXPR)
5725 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5727 if (TREE_CODE (op1) != INTEGER_CST)
5728 break;
5730 /* If either OP1 or C are negative, this optimization is not safe for
5731 some of the division and remainder types while for others we need
5732 to change the code. */
5733 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5735 if (code == CEIL_DIV_EXPR)
5736 code = FLOOR_DIV_EXPR;
5737 else if (code == FLOOR_DIV_EXPR)
5738 code = CEIL_DIV_EXPR;
5739 else if (code != MULT_EXPR
5740 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5741 break;
5744 /* If it's a multiply or a division/modulus operation of a multiple
5745 of our constant, do the operation and verify it doesn't overflow. */
5746 if (code == MULT_EXPR
5747 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5749 op1 = const_binop (code, fold_convert (ctype, op1),
5750 fold_convert (ctype, c), 0);
5751 /* We allow the constant to overflow with wrapping semantics. */
5752 if (op1 == 0
5753 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5754 break;
5756 else
5757 break;
5759 /* If we have an unsigned type is not a sizetype, we cannot widen
5760 the operation since it will change the result if the original
5761 computation overflowed. */
5762 if (TYPE_UNSIGNED (ctype)
5763 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5764 && ctype != type)
5765 break;
5767 /* If we were able to eliminate our operation from the first side,
5768 apply our operation to the second side and reform the PLUS. */
5769 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5770 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5772 /* The last case is if we are a multiply. In that case, we can
5773 apply the distributive law to commute the multiply and addition
5774 if the multiplication of the constants doesn't overflow. */
5775 if (code == MULT_EXPR)
5776 return fold_build2 (tcode, ctype,
5777 fold_build2 (code, ctype,
5778 fold_convert (ctype, op0),
5779 fold_convert (ctype, c)),
5780 op1);
5782 break;
5784 case MULT_EXPR:
5785 /* We have a special case here if we are doing something like
5786 (C * 8) % 4 since we know that's zero. */
5787 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5788 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5789 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5790 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5791 return omit_one_operand (type, integer_zero_node, op0);
5793 /* ... fall through ... */
5795 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5796 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5797 /* If we can extract our operation from the LHS, do so and return a
5798 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5799 do something only if the second operand is a constant. */
5800 if (same_p
5801 && (t1 = extract_muldiv (op0, c, code, wide_type,
5802 strict_overflow_p)) != 0)
5803 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5804 fold_convert (ctype, op1));
5805 else if (tcode == MULT_EXPR && code == MULT_EXPR
5806 && (t1 = extract_muldiv (op1, c, code, wide_type,
5807 strict_overflow_p)) != 0)
5808 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5809 fold_convert (ctype, t1));
5810 else if (TREE_CODE (op1) != INTEGER_CST)
5811 return 0;
5813 /* If these are the same operation types, we can associate them
5814 assuming no overflow. */
5815 if (tcode == code
5816 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5817 fold_convert (ctype, c), 0))
5818 && ! TREE_OVERFLOW (t1))
5819 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5821 /* If these operations "cancel" each other, we have the main
5822 optimizations of this pass, which occur when either constant is a
5823 multiple of the other, in which case we replace this with either an
5824 operation or CODE or TCODE.
5826 If we have an unsigned type that is not a sizetype, we cannot do
5827 this since it will change the result if the original computation
5828 overflowed. */
5829 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5830 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5831 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5832 || (tcode == MULT_EXPR
5833 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5834 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5836 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5838 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5839 *strict_overflow_p = true;
5840 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5841 fold_convert (ctype,
5842 const_binop (TRUNC_DIV_EXPR,
5843 op1, c, 0)));
5845 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5847 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5848 *strict_overflow_p = true;
5849 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5850 fold_convert (ctype,
5851 const_binop (TRUNC_DIV_EXPR,
5852 c, op1, 0)));
5855 break;
5857 default:
5858 break;
5861 return 0;
5864 /* Return a node which has the indicated constant VALUE (either 0 or
5865 1), and is of the indicated TYPE. */
5867 tree
5868 constant_boolean_node (int value, tree type)
5870 if (type == integer_type_node)
5871 return value ? integer_one_node : integer_zero_node;
5872 else if (type == boolean_type_node)
5873 return value ? boolean_true_node : boolean_false_node;
5874 else
5875 return build_int_cst (type, value);
5879 /* Return true if expr looks like an ARRAY_REF and set base and
5880 offset to the appropriate trees. If there is no offset,
5881 offset is set to NULL_TREE. Base will be canonicalized to
5882 something you can get the element type from using
5883 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5884 in bytes to the base. */
5886 static bool
5887 extract_array_ref (tree expr, tree *base, tree *offset)
5889 /* One canonical form is a PLUS_EXPR with the first
5890 argument being an ADDR_EXPR with a possible NOP_EXPR
5891 attached. */
5892 if (TREE_CODE (expr) == PLUS_EXPR)
5894 tree op0 = TREE_OPERAND (expr, 0);
5895 tree inner_base, dummy1;
5896 /* Strip NOP_EXPRs here because the C frontends and/or
5897 folders present us (int *)&x.a + 4B possibly. */
5898 STRIP_NOPS (op0);
5899 if (extract_array_ref (op0, &inner_base, &dummy1))
5901 *base = inner_base;
5902 if (dummy1 == NULL_TREE)
5903 *offset = TREE_OPERAND (expr, 1);
5904 else
5905 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5906 dummy1, TREE_OPERAND (expr, 1));
5907 return true;
5910 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5911 which we transform into an ADDR_EXPR with appropriate
5912 offset. For other arguments to the ADDR_EXPR we assume
5913 zero offset and as such do not care about the ADDR_EXPR
5914 type and strip possible nops from it. */
5915 else if (TREE_CODE (expr) == ADDR_EXPR)
5917 tree op0 = TREE_OPERAND (expr, 0);
5918 if (TREE_CODE (op0) == ARRAY_REF)
5920 tree idx = TREE_OPERAND (op0, 1);
5921 *base = TREE_OPERAND (op0, 0);
5922 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5923 array_ref_element_size (op0));
5925 else
5927 /* Handle array-to-pointer decay as &a. */
5928 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5929 *base = TREE_OPERAND (expr, 0);
5930 else
5931 *base = expr;
5932 *offset = NULL_TREE;
5934 return true;
5936 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5937 else if (SSA_VAR_P (expr)
5938 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5940 *base = expr;
5941 *offset = NULL_TREE;
5942 return true;
5945 return false;
5949 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5950 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5951 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5952 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5953 COND is the first argument to CODE; otherwise (as in the example
5954 given here), it is the second argument. TYPE is the type of the
5955 original expression. Return NULL_TREE if no simplification is
5956 possible. */
5958 static tree
5959 fold_binary_op_with_conditional_arg (enum tree_code code,
5960 tree type, tree op0, tree op1,
5961 tree cond, tree arg, int cond_first_p)
5963 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5964 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5965 tree test, true_value, false_value;
5966 tree lhs = NULL_TREE;
5967 tree rhs = NULL_TREE;
5969 /* This transformation is only worthwhile if we don't have to wrap
5970 arg in a SAVE_EXPR, and the operation can be simplified on at least
5971 one of the branches once its pushed inside the COND_EXPR. */
5972 if (!TREE_CONSTANT (arg))
5973 return NULL_TREE;
5975 if (TREE_CODE (cond) == COND_EXPR)
5977 test = TREE_OPERAND (cond, 0);
5978 true_value = TREE_OPERAND (cond, 1);
5979 false_value = TREE_OPERAND (cond, 2);
5980 /* If this operand throws an expression, then it does not make
5981 sense to try to perform a logical or arithmetic operation
5982 involving it. */
5983 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5984 lhs = true_value;
5985 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5986 rhs = false_value;
5988 else
5990 tree testtype = TREE_TYPE (cond);
5991 test = cond;
5992 true_value = constant_boolean_node (true, testtype);
5993 false_value = constant_boolean_node (false, testtype);
5996 arg = fold_convert (arg_type, arg);
5997 if (lhs == 0)
5999 true_value = fold_convert (cond_type, true_value);
6000 if (cond_first_p)
6001 lhs = fold_build2 (code, type, true_value, arg);
6002 else
6003 lhs = fold_build2 (code, type, arg, true_value);
6005 if (rhs == 0)
6007 false_value = fold_convert (cond_type, false_value);
6008 if (cond_first_p)
6009 rhs = fold_build2 (code, type, false_value, arg);
6010 else
6011 rhs = fold_build2 (code, type, arg, false_value);
6014 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6015 return fold_convert (type, test);
6019 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6021 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6022 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6023 ADDEND is the same as X.
6025 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6026 and finite. The problematic cases are when X is zero, and its mode
6027 has signed zeros. In the case of rounding towards -infinity,
6028 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6029 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6031 static bool
6032 fold_real_zero_addition_p (tree type, tree addend, int negate)
6034 if (!real_zerop (addend))
6035 return false;
6037 /* Don't allow the fold with -fsignaling-nans. */
6038 if (HONOR_SNANS (TYPE_MODE (type)))
6039 return false;
6041 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6042 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6043 return true;
6045 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6046 if (TREE_CODE (addend) == REAL_CST
6047 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6048 negate = !negate;
6050 /* The mode has signed zeros, and we have to honor their sign.
6051 In this situation, there is only one case we can return true for.
6052 X - 0 is the same as X unless rounding towards -infinity is
6053 supported. */
6054 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6057 /* Subroutine of fold() that checks comparisons of built-in math
6058 functions against real constants.
6060 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6061 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6062 is the type of the result and ARG0 and ARG1 are the operands of the
6063 comparison. ARG1 must be a TREE_REAL_CST.
6065 The function returns the constant folded tree if a simplification
6066 can be made, and NULL_TREE otherwise. */
6068 static tree
6069 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6070 tree type, tree arg0, tree arg1)
6072 REAL_VALUE_TYPE c;
6074 if (BUILTIN_SQRT_P (fcode))
6076 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6077 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6079 c = TREE_REAL_CST (arg1);
6080 if (REAL_VALUE_NEGATIVE (c))
6082 /* sqrt(x) < y is always false, if y is negative. */
6083 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6084 return omit_one_operand (type, integer_zero_node, arg);
6086 /* sqrt(x) > y is always true, if y is negative and we
6087 don't care about NaNs, i.e. negative values of x. */
6088 if (code == NE_EXPR || !HONOR_NANS (mode))
6089 return omit_one_operand (type, integer_one_node, arg);
6091 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6092 return fold_build2 (GE_EXPR, type, arg,
6093 build_real (TREE_TYPE (arg), dconst0));
6095 else if (code == GT_EXPR || code == GE_EXPR)
6097 REAL_VALUE_TYPE c2;
6099 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6100 real_convert (&c2, mode, &c2);
6102 if (REAL_VALUE_ISINF (c2))
6104 /* sqrt(x) > y is x == +Inf, when y is very large. */
6105 if (HONOR_INFINITIES (mode))
6106 return fold_build2 (EQ_EXPR, type, arg,
6107 build_real (TREE_TYPE (arg), c2));
6109 /* sqrt(x) > y is always false, when y is very large
6110 and we don't care about infinities. */
6111 return omit_one_operand (type, integer_zero_node, arg);
6114 /* sqrt(x) > c is the same as x > c*c. */
6115 return fold_build2 (code, type, arg,
6116 build_real (TREE_TYPE (arg), c2));
6118 else if (code == LT_EXPR || code == LE_EXPR)
6120 REAL_VALUE_TYPE c2;
6122 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6123 real_convert (&c2, mode, &c2);
6125 if (REAL_VALUE_ISINF (c2))
6127 /* sqrt(x) < y is always true, when y is a very large
6128 value and we don't care about NaNs or Infinities. */
6129 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6130 return omit_one_operand (type, integer_one_node, arg);
6132 /* sqrt(x) < y is x != +Inf when y is very large and we
6133 don't care about NaNs. */
6134 if (! HONOR_NANS (mode))
6135 return fold_build2 (NE_EXPR, type, arg,
6136 build_real (TREE_TYPE (arg), c2));
6138 /* sqrt(x) < y is x >= 0 when y is very large and we
6139 don't care about Infinities. */
6140 if (! HONOR_INFINITIES (mode))
6141 return fold_build2 (GE_EXPR, type, arg,
6142 build_real (TREE_TYPE (arg), dconst0));
6144 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6145 if (lang_hooks.decls.global_bindings_p () != 0
6146 || CONTAINS_PLACEHOLDER_P (arg))
6147 return NULL_TREE;
6149 arg = save_expr (arg);
6150 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6151 fold_build2 (GE_EXPR, type, arg,
6152 build_real (TREE_TYPE (arg),
6153 dconst0)),
6154 fold_build2 (NE_EXPR, type, arg,
6155 build_real (TREE_TYPE (arg),
6156 c2)));
6159 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6160 if (! HONOR_NANS (mode))
6161 return fold_build2 (code, type, arg,
6162 build_real (TREE_TYPE (arg), c2));
6164 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6165 if (lang_hooks.decls.global_bindings_p () == 0
6166 && ! CONTAINS_PLACEHOLDER_P (arg))
6168 arg = save_expr (arg);
6169 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6170 fold_build2 (GE_EXPR, type, arg,
6171 build_real (TREE_TYPE (arg),
6172 dconst0)),
6173 fold_build2 (code, type, arg,
6174 build_real (TREE_TYPE (arg),
6175 c2)));
6180 return NULL_TREE;
6183 /* Subroutine of fold() that optimizes comparisons against Infinities,
6184 either +Inf or -Inf.
6186 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6187 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6188 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6190 The function returns the constant folded tree if a simplification
6191 can be made, and NULL_TREE otherwise. */
6193 static tree
6194 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6196 enum machine_mode mode;
6197 REAL_VALUE_TYPE max;
6198 tree temp;
6199 bool neg;
6201 mode = TYPE_MODE (TREE_TYPE (arg0));
6203 /* For negative infinity swap the sense of the comparison. */
6204 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6205 if (neg)
6206 code = swap_tree_comparison (code);
6208 switch (code)
6210 case GT_EXPR:
6211 /* x > +Inf is always false, if with ignore sNANs. */
6212 if (HONOR_SNANS (mode))
6213 return NULL_TREE;
6214 return omit_one_operand (type, integer_zero_node, arg0);
6216 case LE_EXPR:
6217 /* x <= +Inf is always true, if we don't case about NaNs. */
6218 if (! HONOR_NANS (mode))
6219 return omit_one_operand (type, integer_one_node, arg0);
6221 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6222 if (lang_hooks.decls.global_bindings_p () == 0
6223 && ! CONTAINS_PLACEHOLDER_P (arg0))
6225 arg0 = save_expr (arg0);
6226 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6228 break;
6230 case EQ_EXPR:
6231 case GE_EXPR:
6232 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6233 real_maxval (&max, neg, mode);
6234 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6235 arg0, build_real (TREE_TYPE (arg0), max));
6237 case LT_EXPR:
6238 /* x < +Inf is always equal to x <= DBL_MAX. */
6239 real_maxval (&max, neg, mode);
6240 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6241 arg0, build_real (TREE_TYPE (arg0), max));
6243 case NE_EXPR:
6244 /* x != +Inf is always equal to !(x > DBL_MAX). */
6245 real_maxval (&max, neg, mode);
6246 if (! HONOR_NANS (mode))
6247 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6248 arg0, build_real (TREE_TYPE (arg0), max));
6250 /* The transformation below creates non-gimple code and thus is
6251 not appropriate if we are in gimple form. */
6252 if (in_gimple_form)
6253 return NULL_TREE;
6255 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6256 arg0, build_real (TREE_TYPE (arg0), max));
6257 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6259 default:
6260 break;
6263 return NULL_TREE;
6266 /* Subroutine of fold() that optimizes comparisons of a division by
6267 a nonzero integer constant against an integer constant, i.e.
6268 X/C1 op C2.
6270 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6271 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6272 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6274 The function returns the constant folded tree if a simplification
6275 can be made, and NULL_TREE otherwise. */
6277 static tree
6278 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6280 tree prod, tmp, hi, lo;
6281 tree arg00 = TREE_OPERAND (arg0, 0);
6282 tree arg01 = TREE_OPERAND (arg0, 1);
6283 unsigned HOST_WIDE_INT lpart;
6284 HOST_WIDE_INT hpart;
6285 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6286 bool neg_overflow;
6287 int overflow;
6289 /* We have to do this the hard way to detect unsigned overflow.
6290 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6291 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6292 TREE_INT_CST_HIGH (arg01),
6293 TREE_INT_CST_LOW (arg1),
6294 TREE_INT_CST_HIGH (arg1),
6295 &lpart, &hpart, unsigned_p);
6296 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6297 prod = force_fit_type (prod, -1, overflow, false);
6298 neg_overflow = false;
6300 if (unsigned_p)
6302 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6303 lo = prod;
6305 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6306 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6307 TREE_INT_CST_HIGH (prod),
6308 TREE_INT_CST_LOW (tmp),
6309 TREE_INT_CST_HIGH (tmp),
6310 &lpart, &hpart, unsigned_p);
6311 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6312 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6313 TREE_CONSTANT_OVERFLOW (prod));
6315 else if (tree_int_cst_sgn (arg01) >= 0)
6317 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6318 switch (tree_int_cst_sgn (arg1))
6320 case -1:
6321 neg_overflow = true;
6322 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6323 hi = prod;
6324 break;
6326 case 0:
6327 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6328 hi = tmp;
6329 break;
6331 case 1:
6332 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6333 lo = prod;
6334 break;
6336 default:
6337 gcc_unreachable ();
6340 else
6342 /* A negative divisor reverses the relational operators. */
6343 code = swap_tree_comparison (code);
6345 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6346 switch (tree_int_cst_sgn (arg1))
6348 case -1:
6349 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6350 lo = prod;
6351 break;
6353 case 0:
6354 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6355 lo = tmp;
6356 break;
6358 case 1:
6359 neg_overflow = true;
6360 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6361 hi = prod;
6362 break;
6364 default:
6365 gcc_unreachable ();
6369 switch (code)
6371 case EQ_EXPR:
6372 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6373 return omit_one_operand (type, integer_zero_node, arg00);
6374 if (TREE_OVERFLOW (hi))
6375 return fold_build2 (GE_EXPR, type, arg00, lo);
6376 if (TREE_OVERFLOW (lo))
6377 return fold_build2 (LE_EXPR, type, arg00, hi);
6378 return build_range_check (type, arg00, 1, lo, hi);
6380 case NE_EXPR:
6381 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6382 return omit_one_operand (type, integer_one_node, arg00);
6383 if (TREE_OVERFLOW (hi))
6384 return fold_build2 (LT_EXPR, type, arg00, lo);
6385 if (TREE_OVERFLOW (lo))
6386 return fold_build2 (GT_EXPR, type, arg00, hi);
6387 return build_range_check (type, arg00, 0, lo, hi);
6389 case LT_EXPR:
6390 if (TREE_OVERFLOW (lo))
6392 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6393 return omit_one_operand (type, tmp, arg00);
6395 return fold_build2 (LT_EXPR, type, arg00, lo);
6397 case LE_EXPR:
6398 if (TREE_OVERFLOW (hi))
6400 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6401 return omit_one_operand (type, tmp, arg00);
6403 return fold_build2 (LE_EXPR, type, arg00, hi);
6405 case GT_EXPR:
6406 if (TREE_OVERFLOW (hi))
6408 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6409 return omit_one_operand (type, tmp, arg00);
6411 return fold_build2 (GT_EXPR, type, arg00, hi);
6413 case GE_EXPR:
6414 if (TREE_OVERFLOW (lo))
6416 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6417 return omit_one_operand (type, tmp, arg00);
6419 return fold_build2 (GE_EXPR, type, arg00, lo);
6421 default:
6422 break;
6425 return NULL_TREE;
6429 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6430 equality/inequality test, then return a simplified form of the test
6431 using a sign testing. Otherwise return NULL. TYPE is the desired
6432 result type. */
6434 static tree
6435 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6436 tree result_type)
6438 /* If this is testing a single bit, we can optimize the test. */
6439 if ((code == NE_EXPR || code == EQ_EXPR)
6440 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6441 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6443 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6444 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6445 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6447 if (arg00 != NULL_TREE
6448 /* This is only a win if casting to a signed type is cheap,
6449 i.e. when arg00's type is not a partial mode. */
6450 && TYPE_PRECISION (TREE_TYPE (arg00))
6451 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6453 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6454 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6455 result_type, fold_convert (stype, arg00),
6456 build_int_cst (stype, 0));
6460 return NULL_TREE;
6463 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6464 equality/inequality test, then return a simplified form of
6465 the test using shifts and logical operations. Otherwise return
6466 NULL. TYPE is the desired result type. */
6468 tree
6469 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6470 tree result_type)
6472 /* If this is testing a single bit, we can optimize the test. */
6473 if ((code == NE_EXPR || code == EQ_EXPR)
6474 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6475 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6477 tree inner = TREE_OPERAND (arg0, 0);
6478 tree type = TREE_TYPE (arg0);
6479 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6480 enum machine_mode operand_mode = TYPE_MODE (type);
6481 int ops_unsigned;
6482 tree signed_type, unsigned_type, intermediate_type;
6483 tree tem;
6485 /* First, see if we can fold the single bit test into a sign-bit
6486 test. */
6487 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6488 result_type);
6489 if (tem)
6490 return tem;
6492 /* Otherwise we have (A & C) != 0 where C is a single bit,
6493 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6494 Similarly for (A & C) == 0. */
6496 /* If INNER is a right shift of a constant and it plus BITNUM does
6497 not overflow, adjust BITNUM and INNER. */
6498 if (TREE_CODE (inner) == RSHIFT_EXPR
6499 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6500 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6501 && bitnum < TYPE_PRECISION (type)
6502 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6503 bitnum - TYPE_PRECISION (type)))
6505 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6506 inner = TREE_OPERAND (inner, 0);
6509 /* If we are going to be able to omit the AND below, we must do our
6510 operations as unsigned. If we must use the AND, we have a choice.
6511 Normally unsigned is faster, but for some machines signed is. */
6512 #ifdef LOAD_EXTEND_OP
6513 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6514 && !flag_syntax_only) ? 0 : 1;
6515 #else
6516 ops_unsigned = 1;
6517 #endif
6519 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6520 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6521 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6522 inner = fold_convert (intermediate_type, inner);
6524 if (bitnum != 0)
6525 inner = build2 (RSHIFT_EXPR, intermediate_type,
6526 inner, size_int (bitnum));
6528 if (code == EQ_EXPR)
6529 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6530 inner, integer_one_node);
6532 /* Put the AND last so it can combine with more things. */
6533 inner = build2 (BIT_AND_EXPR, intermediate_type,
6534 inner, integer_one_node);
6536 /* Make sure to return the proper type. */
6537 inner = fold_convert (result_type, inner);
6539 return inner;
6541 return NULL_TREE;
6544 /* Check whether we are allowed to reorder operands arg0 and arg1,
6545 such that the evaluation of arg1 occurs before arg0. */
6547 static bool
6548 reorder_operands_p (tree arg0, tree arg1)
6550 if (! flag_evaluation_order)
6551 return true;
6552 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6553 return true;
6554 return ! TREE_SIDE_EFFECTS (arg0)
6555 && ! TREE_SIDE_EFFECTS (arg1);
6558 /* Test whether it is preferable two swap two operands, ARG0 and
6559 ARG1, for example because ARG0 is an integer constant and ARG1
6560 isn't. If REORDER is true, only recommend swapping if we can
6561 evaluate the operands in reverse order. */
6563 bool
6564 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6566 STRIP_SIGN_NOPS (arg0);
6567 STRIP_SIGN_NOPS (arg1);
6569 if (TREE_CODE (arg1) == INTEGER_CST)
6570 return 0;
6571 if (TREE_CODE (arg0) == INTEGER_CST)
6572 return 1;
6574 if (TREE_CODE (arg1) == REAL_CST)
6575 return 0;
6576 if (TREE_CODE (arg0) == REAL_CST)
6577 return 1;
6579 if (TREE_CODE (arg1) == COMPLEX_CST)
6580 return 0;
6581 if (TREE_CODE (arg0) == COMPLEX_CST)
6582 return 1;
6584 if (TREE_CONSTANT (arg1))
6585 return 0;
6586 if (TREE_CONSTANT (arg0))
6587 return 1;
6589 if (optimize_size)
6590 return 0;
6592 if (reorder && flag_evaluation_order
6593 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6594 return 0;
6596 if (DECL_P (arg1))
6597 return 0;
6598 if (DECL_P (arg0))
6599 return 1;
6601 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6602 for commutative and comparison operators. Ensuring a canonical
6603 form allows the optimizers to find additional redundancies without
6604 having to explicitly check for both orderings. */
6605 if (TREE_CODE (arg0) == SSA_NAME
6606 && TREE_CODE (arg1) == SSA_NAME
6607 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6608 return 1;
6610 return 0;
6613 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6614 ARG0 is extended to a wider type. */
6616 static tree
6617 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6619 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6620 tree arg1_unw;
6621 tree shorter_type, outer_type;
6622 tree min, max;
6623 bool above, below;
6625 if (arg0_unw == arg0)
6626 return NULL_TREE;
6627 shorter_type = TREE_TYPE (arg0_unw);
6629 #ifdef HAVE_canonicalize_funcptr_for_compare
6630 /* Disable this optimization if we're casting a function pointer
6631 type on targets that require function pointer canonicalization. */
6632 if (HAVE_canonicalize_funcptr_for_compare
6633 && TREE_CODE (shorter_type) == POINTER_TYPE
6634 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6635 return NULL_TREE;
6636 #endif
6638 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6639 return NULL_TREE;
6641 arg1_unw = get_unwidened (arg1, shorter_type);
6643 /* If possible, express the comparison in the shorter mode. */
6644 if ((code == EQ_EXPR || code == NE_EXPR
6645 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6646 && (TREE_TYPE (arg1_unw) == shorter_type
6647 || (TREE_CODE (arg1_unw) == INTEGER_CST
6648 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6649 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6650 && int_fits_type_p (arg1_unw, shorter_type))))
6651 return fold_build2 (code, type, arg0_unw,
6652 fold_convert (shorter_type, arg1_unw));
6654 if (TREE_CODE (arg1_unw) != INTEGER_CST
6655 || TREE_CODE (shorter_type) != INTEGER_TYPE
6656 || !int_fits_type_p (arg1_unw, shorter_type))
6657 return NULL_TREE;
6659 /* If we are comparing with the integer that does not fit into the range
6660 of the shorter type, the result is known. */
6661 outer_type = TREE_TYPE (arg1_unw);
6662 min = lower_bound_in_type (outer_type, shorter_type);
6663 max = upper_bound_in_type (outer_type, shorter_type);
6665 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6666 max, arg1_unw));
6667 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6668 arg1_unw, min));
6670 switch (code)
6672 case EQ_EXPR:
6673 if (above || below)
6674 return omit_one_operand (type, integer_zero_node, arg0);
6675 break;
6677 case NE_EXPR:
6678 if (above || below)
6679 return omit_one_operand (type, integer_one_node, arg0);
6680 break;
6682 case LT_EXPR:
6683 case LE_EXPR:
6684 if (above)
6685 return omit_one_operand (type, integer_one_node, arg0);
6686 else if (below)
6687 return omit_one_operand (type, integer_zero_node, arg0);
6689 case GT_EXPR:
6690 case GE_EXPR:
6691 if (above)
6692 return omit_one_operand (type, integer_zero_node, arg0);
6693 else if (below)
6694 return omit_one_operand (type, integer_one_node, arg0);
6696 default:
6697 break;
6700 return NULL_TREE;
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6704 ARG0 just the signedness is changed. */
6706 static tree
6707 fold_sign_changed_comparison (enum tree_code code, tree type,
6708 tree arg0, tree arg1)
6710 tree arg0_inner, tmp;
6711 tree inner_type, outer_type;
6713 if (TREE_CODE (arg0) != NOP_EXPR
6714 && TREE_CODE (arg0) != CONVERT_EXPR)
6715 return NULL_TREE;
6717 outer_type = TREE_TYPE (arg0);
6718 arg0_inner = TREE_OPERAND (arg0, 0);
6719 inner_type = TREE_TYPE (arg0_inner);
6721 #ifdef HAVE_canonicalize_funcptr_for_compare
6722 /* Disable this optimization if we're casting a function pointer
6723 type on targets that require function pointer canonicalization. */
6724 if (HAVE_canonicalize_funcptr_for_compare
6725 && TREE_CODE (inner_type) == POINTER_TYPE
6726 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6727 return NULL_TREE;
6728 #endif
6730 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6731 return NULL_TREE;
6733 if (TREE_CODE (arg1) != INTEGER_CST
6734 && !((TREE_CODE (arg1) == NOP_EXPR
6735 || TREE_CODE (arg1) == CONVERT_EXPR)
6736 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6737 return NULL_TREE;
6739 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6740 && code != NE_EXPR
6741 && code != EQ_EXPR)
6742 return NULL_TREE;
6744 if (TREE_CODE (arg1) == INTEGER_CST)
6746 tmp = build_int_cst_wide (inner_type,
6747 TREE_INT_CST_LOW (arg1),
6748 TREE_INT_CST_HIGH (arg1));
6749 arg1 = force_fit_type (tmp, 0,
6750 TREE_OVERFLOW (arg1),
6751 TREE_CONSTANT_OVERFLOW (arg1));
6753 else
6754 arg1 = fold_convert (inner_type, arg1);
6756 return fold_build2 (code, type, arg0_inner, arg1);
6759 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6760 step of the array. Reconstructs s and delta in the case of s * delta
6761 being an integer constant (and thus already folded).
6762 ADDR is the address. MULT is the multiplicative expression.
6763 If the function succeeds, the new address expression is returned. Otherwise
6764 NULL_TREE is returned. */
6766 static tree
6767 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6769 tree s, delta, step;
6770 tree ref = TREE_OPERAND (addr, 0), pref;
6771 tree ret, pos;
6772 tree itype;
6774 /* Canonicalize op1 into a possibly non-constant delta
6775 and an INTEGER_CST s. */
6776 if (TREE_CODE (op1) == MULT_EXPR)
6778 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6780 STRIP_NOPS (arg0);
6781 STRIP_NOPS (arg1);
6783 if (TREE_CODE (arg0) == INTEGER_CST)
6785 s = arg0;
6786 delta = arg1;
6788 else if (TREE_CODE (arg1) == INTEGER_CST)
6790 s = arg1;
6791 delta = arg0;
6793 else
6794 return NULL_TREE;
6796 else if (TREE_CODE (op1) == INTEGER_CST)
6798 delta = op1;
6799 s = NULL_TREE;
6801 else
6803 /* Simulate we are delta * 1. */
6804 delta = op1;
6805 s = integer_one_node;
6808 for (;; ref = TREE_OPERAND (ref, 0))
6810 if (TREE_CODE (ref) == ARRAY_REF)
6812 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6813 if (! itype)
6814 continue;
6816 step = array_ref_element_size (ref);
6817 if (TREE_CODE (step) != INTEGER_CST)
6818 continue;
6820 if (s)
6822 if (! tree_int_cst_equal (step, s))
6823 continue;
6825 else
6827 /* Try if delta is a multiple of step. */
6828 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6829 if (! tmp)
6830 continue;
6831 delta = tmp;
6834 break;
6837 if (!handled_component_p (ref))
6838 return NULL_TREE;
6841 /* We found the suitable array reference. So copy everything up to it,
6842 and replace the index. */
6844 pref = TREE_OPERAND (addr, 0);
6845 ret = copy_node (pref);
6846 pos = ret;
6848 while (pref != ref)
6850 pref = TREE_OPERAND (pref, 0);
6851 TREE_OPERAND (pos, 0) = copy_node (pref);
6852 pos = TREE_OPERAND (pos, 0);
6855 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6856 fold_convert (itype,
6857 TREE_OPERAND (pos, 1)),
6858 fold_convert (itype, delta));
6860 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6864 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6865 means A >= Y && A != MAX, but in this case we know that
6866 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6868 static tree
6869 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6871 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6873 if (TREE_CODE (bound) == LT_EXPR)
6874 a = TREE_OPERAND (bound, 0);
6875 else if (TREE_CODE (bound) == GT_EXPR)
6876 a = TREE_OPERAND (bound, 1);
6877 else
6878 return NULL_TREE;
6880 typea = TREE_TYPE (a);
6881 if (!INTEGRAL_TYPE_P (typea)
6882 && !POINTER_TYPE_P (typea))
6883 return NULL_TREE;
6885 if (TREE_CODE (ineq) == LT_EXPR)
6887 a1 = TREE_OPERAND (ineq, 1);
6888 y = TREE_OPERAND (ineq, 0);
6890 else if (TREE_CODE (ineq) == GT_EXPR)
6892 a1 = TREE_OPERAND (ineq, 0);
6893 y = TREE_OPERAND (ineq, 1);
6895 else
6896 return NULL_TREE;
6898 if (TREE_TYPE (a1) != typea)
6899 return NULL_TREE;
6901 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6902 if (!integer_onep (diff))
6903 return NULL_TREE;
6905 return fold_build2 (GE_EXPR, type, a, y);
6908 /* Fold a sum or difference of at least one multiplication.
6909 Returns the folded tree or NULL if no simplification could be made. */
6911 static tree
6912 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6914 tree arg00, arg01, arg10, arg11;
6915 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6917 /* (A * C) +- (B * C) -> (A+-B) * C.
6918 (A * C) +- A -> A * (C+-1).
6919 We are most concerned about the case where C is a constant,
6920 but other combinations show up during loop reduction. Since
6921 it is not difficult, try all four possibilities. */
6923 if (TREE_CODE (arg0) == MULT_EXPR)
6925 arg00 = TREE_OPERAND (arg0, 0);
6926 arg01 = TREE_OPERAND (arg0, 1);
6928 else
6930 arg00 = arg0;
6931 arg01 = build_one_cst (type);
6933 if (TREE_CODE (arg1) == MULT_EXPR)
6935 arg10 = TREE_OPERAND (arg1, 0);
6936 arg11 = TREE_OPERAND (arg1, 1);
6938 else
6940 arg10 = arg1;
6941 arg11 = build_one_cst (type);
6943 same = NULL_TREE;
6945 if (operand_equal_p (arg01, arg11, 0))
6946 same = arg01, alt0 = arg00, alt1 = arg10;
6947 else if (operand_equal_p (arg00, arg10, 0))
6948 same = arg00, alt0 = arg01, alt1 = arg11;
6949 else if (operand_equal_p (arg00, arg11, 0))
6950 same = arg00, alt0 = arg01, alt1 = arg10;
6951 else if (operand_equal_p (arg01, arg10, 0))
6952 same = arg01, alt0 = arg00, alt1 = arg11;
6954 /* No identical multiplicands; see if we can find a common
6955 power-of-two factor in non-power-of-two multiplies. This
6956 can help in multi-dimensional array access. */
6957 else if (host_integerp (arg01, 0)
6958 && host_integerp (arg11, 0))
6960 HOST_WIDE_INT int01, int11, tmp;
6961 bool swap = false;
6962 tree maybe_same;
6963 int01 = TREE_INT_CST_LOW (arg01);
6964 int11 = TREE_INT_CST_LOW (arg11);
6966 /* Move min of absolute values to int11. */
6967 if ((int01 >= 0 ? int01 : -int01)
6968 < (int11 >= 0 ? int11 : -int11))
6970 tmp = int01, int01 = int11, int11 = tmp;
6971 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6972 maybe_same = arg01;
6973 swap = true;
6975 else
6976 maybe_same = arg11;
6978 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6980 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6981 build_int_cst (TREE_TYPE (arg00),
6982 int01 / int11));
6983 alt1 = arg10;
6984 same = maybe_same;
6985 if (swap)
6986 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6990 if (same)
6991 return fold_build2 (MULT_EXPR, type,
6992 fold_build2 (code, type,
6993 fold_convert (type, alt0),
6994 fold_convert (type, alt1)),
6995 fold_convert (type, same));
6997 return NULL_TREE;
7000 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7001 specified by EXPR into the buffer PTR of length LEN bytes.
7002 Return the number of bytes placed in the buffer, or zero
7003 upon failure. */
7005 static int
7006 native_encode_int (tree expr, unsigned char *ptr, int len)
7008 tree type = TREE_TYPE (expr);
7009 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7010 int byte, offset, word, words;
7011 unsigned char value;
7013 if (total_bytes > len)
7014 return 0;
7015 words = total_bytes / UNITS_PER_WORD;
7017 for (byte = 0; byte < total_bytes; byte++)
7019 int bitpos = byte * BITS_PER_UNIT;
7020 if (bitpos < HOST_BITS_PER_WIDE_INT)
7021 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7022 else
7023 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7024 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7026 if (total_bytes > UNITS_PER_WORD)
7028 word = byte / UNITS_PER_WORD;
7029 if (WORDS_BIG_ENDIAN)
7030 word = (words - 1) - word;
7031 offset = word * UNITS_PER_WORD;
7032 if (BYTES_BIG_ENDIAN)
7033 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7034 else
7035 offset += byte % UNITS_PER_WORD;
7037 else
7038 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7039 ptr[offset] = value;
7041 return total_bytes;
7045 /* Subroutine of native_encode_expr. Encode the REAL_CST
7046 specified by EXPR into the buffer PTR of length LEN bytes.
7047 Return the number of bytes placed in the buffer, or zero
7048 upon failure. */
7050 static int
7051 native_encode_real (tree expr, unsigned char *ptr, int len)
7053 tree type = TREE_TYPE (expr);
7054 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7055 int byte, offset, word, words, bitpos;
7056 unsigned char value;
7058 /* There are always 32 bits in each long, no matter the size of
7059 the hosts long. We handle floating point representations with
7060 up to 192 bits. */
7061 long tmp[6];
7063 if (total_bytes > len)
7064 return 0;
7065 words = 32 / UNITS_PER_WORD;
7067 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7069 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7070 bitpos += BITS_PER_UNIT)
7072 byte = (bitpos / BITS_PER_UNIT) & 3;
7073 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7075 if (UNITS_PER_WORD < 4)
7077 word = byte / UNITS_PER_WORD;
7078 if (WORDS_BIG_ENDIAN)
7079 word = (words - 1) - word;
7080 offset = word * UNITS_PER_WORD;
7081 if (BYTES_BIG_ENDIAN)
7082 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7083 else
7084 offset += byte % UNITS_PER_WORD;
7086 else
7087 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7088 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7090 return total_bytes;
7093 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7094 specified by EXPR into the buffer PTR of length LEN bytes.
7095 Return the number of bytes placed in the buffer, or zero
7096 upon failure. */
7098 static int
7099 native_encode_complex (tree expr, unsigned char *ptr, int len)
7101 int rsize, isize;
7102 tree part;
7104 part = TREE_REALPART (expr);
7105 rsize = native_encode_expr (part, ptr, len);
7106 if (rsize == 0)
7107 return 0;
7108 part = TREE_IMAGPART (expr);
7109 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7110 if (isize != rsize)
7111 return 0;
7112 return rsize + isize;
7116 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7117 specified by EXPR into the buffer PTR of length LEN bytes.
7118 Return the number of bytes placed in the buffer, or zero
7119 upon failure. */
7121 static int
7122 native_encode_vector (tree expr, unsigned char *ptr, int len)
7124 int i, size, offset, count;
7125 tree itype, elem, elements;
7127 offset = 0;
7128 elements = TREE_VECTOR_CST_ELTS (expr);
7129 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7130 itype = TREE_TYPE (TREE_TYPE (expr));
7131 size = GET_MODE_SIZE (TYPE_MODE (itype));
7132 for (i = 0; i < count; i++)
7134 if (elements)
7136 elem = TREE_VALUE (elements);
7137 elements = TREE_CHAIN (elements);
7139 else
7140 elem = NULL_TREE;
7142 if (elem)
7144 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7145 return 0;
7147 else
7149 if (offset + size > len)
7150 return 0;
7151 memset (ptr+offset, 0, size);
7153 offset += size;
7155 return offset;
7159 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7160 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7161 buffer PTR of length LEN bytes. Return the number of bytes
7162 placed in the buffer, or zero upon failure. */
7164 static int
7165 native_encode_expr (tree expr, unsigned char *ptr, int len)
7167 switch (TREE_CODE (expr))
7169 case INTEGER_CST:
7170 return native_encode_int (expr, ptr, len);
7172 case REAL_CST:
7173 return native_encode_real (expr, ptr, len);
7175 case COMPLEX_CST:
7176 return native_encode_complex (expr, ptr, len);
7178 case VECTOR_CST:
7179 return native_encode_vector (expr, ptr, len);
7181 default:
7182 return 0;
7187 /* Subroutine of native_interpret_expr. Interpret the contents of
7188 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7189 If the buffer cannot be interpreted, return NULL_TREE. */
7191 static tree
7192 native_interpret_int (tree type, unsigned char *ptr, int len)
7194 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7195 int byte, offset, word, words;
7196 unsigned char value;
7197 unsigned int HOST_WIDE_INT lo = 0;
7198 HOST_WIDE_INT hi = 0;
7200 if (total_bytes > len)
7201 return NULL_TREE;
7202 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7203 return NULL_TREE;
7204 words = total_bytes / UNITS_PER_WORD;
7206 for (byte = 0; byte < total_bytes; byte++)
7208 int bitpos = byte * BITS_PER_UNIT;
7209 if (total_bytes > UNITS_PER_WORD)
7211 word = byte / UNITS_PER_WORD;
7212 if (WORDS_BIG_ENDIAN)
7213 word = (words - 1) - word;
7214 offset = word * UNITS_PER_WORD;
7215 if (BYTES_BIG_ENDIAN)
7216 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7217 else
7218 offset += byte % UNITS_PER_WORD;
7220 else
7221 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7222 value = ptr[offset];
7224 if (bitpos < HOST_BITS_PER_WIDE_INT)
7225 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7226 else
7227 hi |= (unsigned HOST_WIDE_INT) value
7228 << (bitpos - HOST_BITS_PER_WIDE_INT);
7231 return force_fit_type (build_int_cst_wide (type, lo, hi),
7232 0, false, false);
7236 /* Subroutine of native_interpret_expr. Interpret the contents of
7237 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7238 If the buffer cannot be interpreted, return NULL_TREE. */
7240 static tree
7241 native_interpret_real (tree type, unsigned char *ptr, int len)
7243 enum machine_mode mode = TYPE_MODE (type);
7244 int total_bytes = GET_MODE_SIZE (mode);
7245 int byte, offset, word, words, bitpos;
7246 unsigned char value;
7247 /* There are always 32 bits in each long, no matter the size of
7248 the hosts long. We handle floating point representations with
7249 up to 192 bits. */
7250 REAL_VALUE_TYPE r;
7251 long tmp[6];
7253 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7254 if (total_bytes > len || total_bytes > 24)
7255 return NULL_TREE;
7256 words = 32 / UNITS_PER_WORD;
7258 memset (tmp, 0, sizeof (tmp));
7259 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7260 bitpos += BITS_PER_UNIT)
7262 byte = (bitpos / BITS_PER_UNIT) & 3;
7263 if (UNITS_PER_WORD < 4)
7265 word = byte / UNITS_PER_WORD;
7266 if (WORDS_BIG_ENDIAN)
7267 word = (words - 1) - word;
7268 offset = word * UNITS_PER_WORD;
7269 if (BYTES_BIG_ENDIAN)
7270 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7271 else
7272 offset += byte % UNITS_PER_WORD;
7274 else
7275 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7276 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7278 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7281 real_from_target (&r, tmp, mode);
7282 return build_real (type, r);
7286 /* Subroutine of native_interpret_expr. Interpret the contents of
7287 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7288 If the buffer cannot be interpreted, return NULL_TREE. */
7290 static tree
7291 native_interpret_complex (tree type, unsigned char *ptr, int len)
7293 tree etype, rpart, ipart;
7294 int size;
7296 etype = TREE_TYPE (type);
7297 size = GET_MODE_SIZE (TYPE_MODE (etype));
7298 if (size * 2 > len)
7299 return NULL_TREE;
7300 rpart = native_interpret_expr (etype, ptr, size);
7301 if (!rpart)
7302 return NULL_TREE;
7303 ipart = native_interpret_expr (etype, ptr+size, size);
7304 if (!ipart)
7305 return NULL_TREE;
7306 return build_complex (type, rpart, ipart);
7310 /* Subroutine of native_interpret_expr. Interpret the contents of
7311 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7312 If the buffer cannot be interpreted, return NULL_TREE. */
7314 static tree
7315 native_interpret_vector (tree type, unsigned char *ptr, int len)
7317 tree etype, elem, elements;
7318 int i, size, count;
7320 etype = TREE_TYPE (type);
7321 size = GET_MODE_SIZE (TYPE_MODE (etype));
7322 count = TYPE_VECTOR_SUBPARTS (type);
7323 if (size * count > len)
7324 return NULL_TREE;
7326 elements = NULL_TREE;
7327 for (i = count - 1; i >= 0; i--)
7329 elem = native_interpret_expr (etype, ptr+(i*size), size);
7330 if (!elem)
7331 return NULL_TREE;
7332 elements = tree_cons (NULL_TREE, elem, elements);
7334 return build_vector (type, elements);
7338 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7339 the buffer PTR of length LEN as a constant of type TYPE. For
7340 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7341 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7342 return NULL_TREE. */
7344 static tree
7345 native_interpret_expr (tree type, unsigned char *ptr, int len)
7347 switch (TREE_CODE (type))
7349 case INTEGER_TYPE:
7350 case ENUMERAL_TYPE:
7351 case BOOLEAN_TYPE:
7352 return native_interpret_int (type, ptr, len);
7354 case REAL_TYPE:
7355 return native_interpret_real (type, ptr, len);
7357 case COMPLEX_TYPE:
7358 return native_interpret_complex (type, ptr, len);
7360 case VECTOR_TYPE:
7361 return native_interpret_vector (type, ptr, len);
7363 default:
7364 return NULL_TREE;
7369 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7370 TYPE at compile-time. If we're unable to perform the conversion
7371 return NULL_TREE. */
7373 static tree
7374 fold_view_convert_expr (tree type, tree expr)
7376 /* We support up to 512-bit values (for V8DFmode). */
7377 unsigned char buffer[64];
7378 int len;
7380 /* Check that the host and target are sane. */
7381 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7382 return NULL_TREE;
7384 len = native_encode_expr (expr, buffer, sizeof (buffer));
7385 if (len == 0)
7386 return NULL_TREE;
7388 return native_interpret_expr (type, buffer, len);
7392 /* Fold a unary expression of code CODE and type TYPE with operand
7393 OP0. Return the folded expression if folding is successful.
7394 Otherwise, return NULL_TREE. */
7396 tree
7397 fold_unary (enum tree_code code, tree type, tree op0)
7399 tree tem;
7400 tree arg0;
7401 enum tree_code_class kind = TREE_CODE_CLASS (code);
7403 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7404 && TREE_CODE_LENGTH (code) == 1);
7406 arg0 = op0;
7407 if (arg0)
7409 if (code == NOP_EXPR || code == CONVERT_EXPR
7410 || code == FLOAT_EXPR || code == ABS_EXPR)
7412 /* Don't use STRIP_NOPS, because signedness of argument type
7413 matters. */
7414 STRIP_SIGN_NOPS (arg0);
7416 else
7418 /* Strip any conversions that don't change the mode. This
7419 is safe for every expression, except for a comparison
7420 expression because its signedness is derived from its
7421 operands.
7423 Note that this is done as an internal manipulation within
7424 the constant folder, in order to find the simplest
7425 representation of the arguments so that their form can be
7426 studied. In any cases, the appropriate type conversions
7427 should be put back in the tree that will get out of the
7428 constant folder. */
7429 STRIP_NOPS (arg0);
7433 if (TREE_CODE_CLASS (code) == tcc_unary)
7435 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7436 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7437 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7438 else if (TREE_CODE (arg0) == COND_EXPR)
7440 tree arg01 = TREE_OPERAND (arg0, 1);
7441 tree arg02 = TREE_OPERAND (arg0, 2);
7442 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7443 arg01 = fold_build1 (code, type, arg01);
7444 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7445 arg02 = fold_build1 (code, type, arg02);
7446 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7447 arg01, arg02);
7449 /* If this was a conversion, and all we did was to move into
7450 inside the COND_EXPR, bring it back out. But leave it if
7451 it is a conversion from integer to integer and the
7452 result precision is no wider than a word since such a
7453 conversion is cheap and may be optimized away by combine,
7454 while it couldn't if it were outside the COND_EXPR. Then return
7455 so we don't get into an infinite recursion loop taking the
7456 conversion out and then back in. */
7458 if ((code == NOP_EXPR || code == CONVERT_EXPR
7459 || code == NON_LVALUE_EXPR)
7460 && TREE_CODE (tem) == COND_EXPR
7461 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7462 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7463 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7464 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7465 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7466 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7467 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7468 && (INTEGRAL_TYPE_P
7469 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7470 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7471 || flag_syntax_only))
7472 tem = build1 (code, type,
7473 build3 (COND_EXPR,
7474 TREE_TYPE (TREE_OPERAND
7475 (TREE_OPERAND (tem, 1), 0)),
7476 TREE_OPERAND (tem, 0),
7477 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7478 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7479 return tem;
7481 else if (COMPARISON_CLASS_P (arg0))
7483 if (TREE_CODE (type) == BOOLEAN_TYPE)
7485 arg0 = copy_node (arg0);
7486 TREE_TYPE (arg0) = type;
7487 return arg0;
7489 else if (TREE_CODE (type) != INTEGER_TYPE)
7490 return fold_build3 (COND_EXPR, type, arg0,
7491 fold_build1 (code, type,
7492 integer_one_node),
7493 fold_build1 (code, type,
7494 integer_zero_node));
7498 switch (code)
7500 case NOP_EXPR:
7501 case FLOAT_EXPR:
7502 case CONVERT_EXPR:
7503 case FIX_TRUNC_EXPR:
7504 case FIX_CEIL_EXPR:
7505 case FIX_FLOOR_EXPR:
7506 case FIX_ROUND_EXPR:
7507 if (TREE_TYPE (op0) == type)
7508 return op0;
7510 /* If we have (type) (a CMP b) and type is an integral type, return
7511 new expression involving the new type. */
7512 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7513 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7514 TREE_OPERAND (op0, 1));
7516 /* Handle cases of two conversions in a row. */
7517 if (TREE_CODE (op0) == NOP_EXPR
7518 || TREE_CODE (op0) == CONVERT_EXPR)
7520 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7521 tree inter_type = TREE_TYPE (op0);
7522 int inside_int = INTEGRAL_TYPE_P (inside_type);
7523 int inside_ptr = POINTER_TYPE_P (inside_type);
7524 int inside_float = FLOAT_TYPE_P (inside_type);
7525 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7526 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7527 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7528 int inter_int = INTEGRAL_TYPE_P (inter_type);
7529 int inter_ptr = POINTER_TYPE_P (inter_type);
7530 int inter_float = FLOAT_TYPE_P (inter_type);
7531 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7532 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7533 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7534 int final_int = INTEGRAL_TYPE_P (type);
7535 int final_ptr = POINTER_TYPE_P (type);
7536 int final_float = FLOAT_TYPE_P (type);
7537 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7538 unsigned int final_prec = TYPE_PRECISION (type);
7539 int final_unsignedp = TYPE_UNSIGNED (type);
7541 /* In addition to the cases of two conversions in a row
7542 handled below, if we are converting something to its own
7543 type via an object of identical or wider precision, neither
7544 conversion is needed. */
7545 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7546 && (((inter_int || inter_ptr) && final_int)
7547 || (inter_float && final_float))
7548 && inter_prec >= final_prec)
7549 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7551 /* Likewise, if the intermediate and final types are either both
7552 float or both integer, we don't need the middle conversion if
7553 it is wider than the final type and doesn't change the signedness
7554 (for integers). Avoid this if the final type is a pointer
7555 since then we sometimes need the inner conversion. Likewise if
7556 the outer has a precision not equal to the size of its mode. */
7557 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7558 || (inter_float && inside_float)
7559 || (inter_vec && inside_vec))
7560 && inter_prec >= inside_prec
7561 && (inter_float || inter_vec
7562 || inter_unsignedp == inside_unsignedp)
7563 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7564 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7565 && ! final_ptr
7566 && (! final_vec || inter_prec == inside_prec))
7567 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7569 /* If we have a sign-extension of a zero-extended value, we can
7570 replace that by a single zero-extension. */
7571 if (inside_int && inter_int && final_int
7572 && inside_prec < inter_prec && inter_prec < final_prec
7573 && inside_unsignedp && !inter_unsignedp)
7574 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7576 /* Two conversions in a row are not needed unless:
7577 - some conversion is floating-point (overstrict for now), or
7578 - some conversion is a vector (overstrict for now), or
7579 - the intermediate type is narrower than both initial and
7580 final, or
7581 - the intermediate type and innermost type differ in signedness,
7582 and the outermost type is wider than the intermediate, or
7583 - the initial type is a pointer type and the precisions of the
7584 intermediate and final types differ, or
7585 - the final type is a pointer type and the precisions of the
7586 initial and intermediate types differ.
7587 - the final type is a pointer type and the initial type not
7588 - the initial type is a pointer to an array and the final type
7589 not. */
7590 /* Java pointer type conversions generate checks in some
7591 cases, so we explicitly disallow this optimization. */
7592 if (! inside_float && ! inter_float && ! final_float
7593 && ! inside_vec && ! inter_vec && ! final_vec
7594 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7595 && ! (inside_int && inter_int
7596 && inter_unsignedp != inside_unsignedp
7597 && inter_prec < final_prec)
7598 && ((inter_unsignedp && inter_prec > inside_prec)
7599 == (final_unsignedp && final_prec > inter_prec))
7600 && ! (inside_ptr && inter_prec != final_prec)
7601 && ! (final_ptr && inside_prec != inter_prec)
7602 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7603 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7604 && final_ptr == inside_ptr
7605 && ! (inside_ptr
7606 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7607 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7608 && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7609 && final_ptr))
7610 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7613 /* Handle (T *)&A.B.C for A being of type T and B and C
7614 living at offset zero. This occurs frequently in
7615 C++ upcasting and then accessing the base. */
7616 if (TREE_CODE (op0) == ADDR_EXPR
7617 && POINTER_TYPE_P (type)
7618 && handled_component_p (TREE_OPERAND (op0, 0)))
7620 HOST_WIDE_INT bitsize, bitpos;
7621 tree offset;
7622 enum machine_mode mode;
7623 int unsignedp, volatilep;
7624 tree base = TREE_OPERAND (op0, 0);
7625 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7626 &mode, &unsignedp, &volatilep, false);
7627 /* If the reference was to a (constant) zero offset, we can use
7628 the address of the base if it has the same base type
7629 as the result type. */
7630 if (! offset && bitpos == 0
7631 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7632 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7633 return fold_convert (type, build_fold_addr_expr (base));
7636 if (TREE_CODE (op0) == MODIFY_EXPR
7637 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7638 /* Detect assigning a bitfield. */
7639 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7640 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7642 /* Don't leave an assignment inside a conversion
7643 unless assigning a bitfield. */
7644 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7645 /* First do the assignment, then return converted constant. */
7646 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7647 TREE_NO_WARNING (tem) = 1;
7648 TREE_USED (tem) = 1;
7649 return tem;
7652 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7653 constants (if x has signed type, the sign bit cannot be set
7654 in c). This folds extension into the BIT_AND_EXPR. */
7655 if (INTEGRAL_TYPE_P (type)
7656 && TREE_CODE (type) != BOOLEAN_TYPE
7657 && TREE_CODE (op0) == BIT_AND_EXPR
7658 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7660 tree and = op0;
7661 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7662 int change = 0;
7664 if (TYPE_UNSIGNED (TREE_TYPE (and))
7665 || (TYPE_PRECISION (type)
7666 <= TYPE_PRECISION (TREE_TYPE (and))))
7667 change = 1;
7668 else if (TYPE_PRECISION (TREE_TYPE (and1))
7669 <= HOST_BITS_PER_WIDE_INT
7670 && host_integerp (and1, 1))
7672 unsigned HOST_WIDE_INT cst;
7674 cst = tree_low_cst (and1, 1);
7675 cst &= (HOST_WIDE_INT) -1
7676 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7677 change = (cst == 0);
7678 #ifdef LOAD_EXTEND_OP
7679 if (change
7680 && !flag_syntax_only
7681 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7682 == ZERO_EXTEND))
7684 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7685 and0 = fold_convert (uns, and0);
7686 and1 = fold_convert (uns, and1);
7688 #endif
7690 if (change)
7692 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7693 TREE_INT_CST_HIGH (and1));
7694 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7695 TREE_CONSTANT_OVERFLOW (and1));
7696 return fold_build2 (BIT_AND_EXPR, type,
7697 fold_convert (type, and0), tem);
7701 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7702 T2 being pointers to types of the same size. */
7703 if (POINTER_TYPE_P (type)
7704 && BINARY_CLASS_P (arg0)
7705 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7706 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7708 tree arg00 = TREE_OPERAND (arg0, 0);
7709 tree t0 = type;
7710 tree t1 = TREE_TYPE (arg00);
7711 tree tt0 = TREE_TYPE (t0);
7712 tree tt1 = TREE_TYPE (t1);
7713 tree s0 = TYPE_SIZE (tt0);
7714 tree s1 = TYPE_SIZE (tt1);
7716 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7717 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7718 TREE_OPERAND (arg0, 1));
7721 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7722 of the same precision, and X is a integer type not narrower than
7723 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7724 if (INTEGRAL_TYPE_P (type)
7725 && TREE_CODE (op0) == BIT_NOT_EXPR
7726 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7727 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7728 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7729 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7731 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7732 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7733 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7734 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7737 tem = fold_convert_const (code, type, op0);
7738 return tem ? tem : NULL_TREE;
7740 case VIEW_CONVERT_EXPR:
7741 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7742 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7743 return fold_view_convert_expr (type, op0);
7745 case NEGATE_EXPR:
7746 tem = fold_negate_expr (arg0);
7747 if (tem)
7748 return fold_convert (type, tem);
7749 return NULL_TREE;
7751 case ABS_EXPR:
7752 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7753 return fold_abs_const (arg0, type);
7754 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7755 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7756 /* Convert fabs((double)float) into (double)fabsf(float). */
7757 else if (TREE_CODE (arg0) == NOP_EXPR
7758 && TREE_CODE (type) == REAL_TYPE)
7760 tree targ0 = strip_float_extensions (arg0);
7761 if (targ0 != arg0)
7762 return fold_convert (type, fold_build1 (ABS_EXPR,
7763 TREE_TYPE (targ0),
7764 targ0));
7766 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7767 else if (TREE_CODE (arg0) == ABS_EXPR)
7768 return arg0;
7769 else if (tree_expr_nonnegative_p (arg0))
7770 return arg0;
7772 /* Strip sign ops from argument. */
7773 if (TREE_CODE (type) == REAL_TYPE)
7775 tem = fold_strip_sign_ops (arg0);
7776 if (tem)
7777 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7779 return NULL_TREE;
7781 case CONJ_EXPR:
7782 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7783 return fold_convert (type, arg0);
7784 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7786 tree itype = TREE_TYPE (type);
7787 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7788 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7789 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7791 if (TREE_CODE (arg0) == COMPLEX_CST)
7793 tree itype = TREE_TYPE (type);
7794 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7795 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7796 return build_complex (type, rpart, negate_expr (ipart));
7798 if (TREE_CODE (arg0) == CONJ_EXPR)
7799 return fold_convert (type, TREE_OPERAND (arg0, 0));
7800 return NULL_TREE;
7802 case BIT_NOT_EXPR:
7803 if (TREE_CODE (arg0) == INTEGER_CST)
7804 return fold_not_const (arg0, type);
7805 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7806 return TREE_OPERAND (arg0, 0);
7807 /* Convert ~ (-A) to A - 1. */
7808 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7809 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7810 build_int_cst (type, 1));
7811 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7812 else if (INTEGRAL_TYPE_P (type)
7813 && ((TREE_CODE (arg0) == MINUS_EXPR
7814 && integer_onep (TREE_OPERAND (arg0, 1)))
7815 || (TREE_CODE (arg0) == PLUS_EXPR
7816 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7817 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7818 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7819 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7820 && (tem = fold_unary (BIT_NOT_EXPR, type,
7821 fold_convert (type,
7822 TREE_OPERAND (arg0, 0)))))
7823 return fold_build2 (BIT_XOR_EXPR, type, tem,
7824 fold_convert (type, TREE_OPERAND (arg0, 1)));
7825 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7826 && (tem = fold_unary (BIT_NOT_EXPR, type,
7827 fold_convert (type,
7828 TREE_OPERAND (arg0, 1)))))
7829 return fold_build2 (BIT_XOR_EXPR, type,
7830 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7832 return NULL_TREE;
7834 case TRUTH_NOT_EXPR:
7835 /* The argument to invert_truthvalue must have Boolean type. */
7836 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7837 arg0 = fold_convert (boolean_type_node, arg0);
7839 /* Note that the operand of this must be an int
7840 and its values must be 0 or 1.
7841 ("true" is a fixed value perhaps depending on the language,
7842 but we don't handle values other than 1 correctly yet.) */
7843 tem = fold_truth_not_expr (arg0);
7844 if (!tem)
7845 return NULL_TREE;
7846 return fold_convert (type, tem);
7848 case REALPART_EXPR:
7849 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7850 return fold_convert (type, arg0);
7851 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7852 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7853 TREE_OPERAND (arg0, 1));
7854 if (TREE_CODE (arg0) == COMPLEX_CST)
7855 return fold_convert (type, TREE_REALPART (arg0));
7856 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7858 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7859 tem = fold_build2 (TREE_CODE (arg0), itype,
7860 fold_build1 (REALPART_EXPR, itype,
7861 TREE_OPERAND (arg0, 0)),
7862 fold_build1 (REALPART_EXPR, itype,
7863 TREE_OPERAND (arg0, 1)));
7864 return fold_convert (type, tem);
7866 if (TREE_CODE (arg0) == CONJ_EXPR)
7868 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7869 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7870 return fold_convert (type, tem);
7872 return NULL_TREE;
7874 case IMAGPART_EXPR:
7875 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7876 return fold_convert (type, integer_zero_node);
7877 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7878 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7879 TREE_OPERAND (arg0, 0));
7880 if (TREE_CODE (arg0) == COMPLEX_CST)
7881 return fold_convert (type, TREE_IMAGPART (arg0));
7882 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7884 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7885 tem = fold_build2 (TREE_CODE (arg0), itype,
7886 fold_build1 (IMAGPART_EXPR, itype,
7887 TREE_OPERAND (arg0, 0)),
7888 fold_build1 (IMAGPART_EXPR, itype,
7889 TREE_OPERAND (arg0, 1)));
7890 return fold_convert (type, tem);
7892 if (TREE_CODE (arg0) == CONJ_EXPR)
7894 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7895 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7896 return fold_convert (type, negate_expr (tem));
7898 return NULL_TREE;
7900 default:
7901 return NULL_TREE;
7902 } /* switch (code) */
7905 /* Fold a binary expression of code CODE and type TYPE with operands
7906 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7907 Return the folded expression if folding is successful. Otherwise,
7908 return NULL_TREE. */
7910 static tree
7911 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7913 enum tree_code compl_code;
7915 if (code == MIN_EXPR)
7916 compl_code = MAX_EXPR;
7917 else if (code == MAX_EXPR)
7918 compl_code = MIN_EXPR;
7919 else
7920 gcc_unreachable ();
7922 /* MIN (MAX (a, b), b) == b. */
7923 if (TREE_CODE (op0) == compl_code
7924 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7925 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7927 /* MIN (MAX (b, a), b) == b. */
7928 if (TREE_CODE (op0) == compl_code
7929 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7930 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7931 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7933 /* MIN (a, MAX (a, b)) == a. */
7934 if (TREE_CODE (op1) == compl_code
7935 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7936 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7937 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7939 /* MIN (a, MAX (b, a)) == a. */
7940 if (TREE_CODE (op1) == compl_code
7941 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7942 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7943 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7945 return NULL_TREE;
7948 /* Subroutine of fold_binary. This routine performs all of the
7949 transformations that are common to the equality/inequality
7950 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7951 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7952 fold_binary should call fold_binary. Fold a comparison with
7953 tree code CODE and type TYPE with operands OP0 and OP1. Return
7954 the folded comparison or NULL_TREE. */
7956 static tree
7957 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7959 tree arg0, arg1, tem;
7961 arg0 = op0;
7962 arg1 = op1;
7964 STRIP_SIGN_NOPS (arg0);
7965 STRIP_SIGN_NOPS (arg1);
7967 tem = fold_relational_const (code, type, arg0, arg1);
7968 if (tem != NULL_TREE)
7969 return tem;
7971 /* If one arg is a real or integer constant, put it last. */
7972 if (tree_swap_operands_p (arg0, arg1, true))
7973 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7975 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7976 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7977 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7978 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7979 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
7980 && (TREE_CODE (arg1) == INTEGER_CST
7981 && !TREE_OVERFLOW (arg1)))
7983 tree const1 = TREE_OPERAND (arg0, 1);
7984 tree const2 = arg1;
7985 tree variable = TREE_OPERAND (arg0, 0);
7986 tree lhs;
7987 int lhs_add;
7988 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7990 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7991 TREE_TYPE (arg1), const2, const1);
7992 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7993 && (TREE_CODE (lhs) != INTEGER_CST
7994 || !TREE_OVERFLOW (lhs)))
7996 fold_overflow_warning (("assuming signed overflow does not occur "
7997 "when changing X +- C1 cmp C2 to "
7998 "X cmp C1 +- C2"),
7999 WARN_STRICT_OVERFLOW_COMPARISON);
8000 return fold_build2 (code, type, variable, lhs);
8004 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8005 same object, then we can fold this to a comparison of the two offsets in
8006 signed size type. This is possible because pointer arithmetic is
8007 restricted to retain within an object and overflow on pointer differences
8008 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8010 We check flag_wrapv directly because pointers types are unsigned,
8011 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8012 normally what we want to avoid certain odd overflow cases, but
8013 not here. */
8014 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8015 && !flag_wrapv
8016 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8018 tree base0, offset0, base1, offset1;
8020 if (extract_array_ref (arg0, &base0, &offset0)
8021 && extract_array_ref (arg1, &base1, &offset1)
8022 && operand_equal_p (base0, base1, 0))
8024 tree signed_size_type_node;
8025 signed_size_type_node = signed_type_for (size_type_node);
8027 /* By converting to signed size type we cover middle-end pointer
8028 arithmetic which operates on unsigned pointer types of size
8029 type size and ARRAY_REF offsets which are properly sign or
8030 zero extended from their type in case it is narrower than
8031 size type. */
8032 if (offset0 == NULL_TREE)
8033 offset0 = build_int_cst (signed_size_type_node, 0);
8034 else
8035 offset0 = fold_convert (signed_size_type_node, offset0);
8036 if (offset1 == NULL_TREE)
8037 offset1 = build_int_cst (signed_size_type_node, 0);
8038 else
8039 offset1 = fold_convert (signed_size_type_node, offset1);
8041 return fold_build2 (code, type, offset0, offset1);
8045 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8047 tree targ0 = strip_float_extensions (arg0);
8048 tree targ1 = strip_float_extensions (arg1);
8049 tree newtype = TREE_TYPE (targ0);
8051 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8052 newtype = TREE_TYPE (targ1);
8054 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8055 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8056 return fold_build2 (code, type, fold_convert (newtype, targ0),
8057 fold_convert (newtype, targ1));
8059 /* (-a) CMP (-b) -> b CMP a */
8060 if (TREE_CODE (arg0) == NEGATE_EXPR
8061 && TREE_CODE (arg1) == NEGATE_EXPR)
8062 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8063 TREE_OPERAND (arg0, 0));
8065 if (TREE_CODE (arg1) == REAL_CST)
8067 REAL_VALUE_TYPE cst;
8068 cst = TREE_REAL_CST (arg1);
8070 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8071 if (TREE_CODE (arg0) == NEGATE_EXPR)
8072 return fold_build2 (swap_tree_comparison (code), type,
8073 TREE_OPERAND (arg0, 0),
8074 build_real (TREE_TYPE (arg1),
8075 REAL_VALUE_NEGATE (cst)));
8077 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8078 /* a CMP (-0) -> a CMP 0 */
8079 if (REAL_VALUE_MINUS_ZERO (cst))
8080 return fold_build2 (code, type, arg0,
8081 build_real (TREE_TYPE (arg1), dconst0));
8083 /* x != NaN is always true, other ops are always false. */
8084 if (REAL_VALUE_ISNAN (cst)
8085 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8087 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8088 return omit_one_operand (type, tem, arg0);
8091 /* Fold comparisons against infinity. */
8092 if (REAL_VALUE_ISINF (cst))
8094 tem = fold_inf_compare (code, type, arg0, arg1);
8095 if (tem != NULL_TREE)
8096 return tem;
8100 /* If this is a comparison of a real constant with a PLUS_EXPR
8101 or a MINUS_EXPR of a real constant, we can convert it into a
8102 comparison with a revised real constant as long as no overflow
8103 occurs when unsafe_math_optimizations are enabled. */
8104 if (flag_unsafe_math_optimizations
8105 && TREE_CODE (arg1) == REAL_CST
8106 && (TREE_CODE (arg0) == PLUS_EXPR
8107 || TREE_CODE (arg0) == MINUS_EXPR)
8108 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8109 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8110 ? MINUS_EXPR : PLUS_EXPR,
8111 arg1, TREE_OPERAND (arg0, 1), 0))
8112 && ! TREE_CONSTANT_OVERFLOW (tem))
8113 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8115 /* Likewise, we can simplify a comparison of a real constant with
8116 a MINUS_EXPR whose first operand is also a real constant, i.e.
8117 (c1 - x) < c2 becomes x > c1-c2. */
8118 if (flag_unsafe_math_optimizations
8119 && TREE_CODE (arg1) == REAL_CST
8120 && TREE_CODE (arg0) == MINUS_EXPR
8121 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8122 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8123 arg1, 0))
8124 && ! TREE_CONSTANT_OVERFLOW (tem))
8125 return fold_build2 (swap_tree_comparison (code), type,
8126 TREE_OPERAND (arg0, 1), tem);
8128 /* Fold comparisons against built-in math functions. */
8129 if (TREE_CODE (arg1) == REAL_CST
8130 && flag_unsafe_math_optimizations
8131 && ! flag_errno_math)
8133 enum built_in_function fcode = builtin_mathfn_code (arg0);
8135 if (fcode != END_BUILTINS)
8137 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8138 if (tem != NULL_TREE)
8139 return tem;
8144 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8145 if (TREE_CONSTANT (arg1)
8146 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8147 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8148 /* This optimization is invalid for ordered comparisons
8149 if CONST+INCR overflows or if foo+incr might overflow.
8150 This optimization is invalid for floating point due to rounding.
8151 For pointer types we assume overflow doesn't happen. */
8152 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8153 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8154 && (code == EQ_EXPR || code == NE_EXPR))))
8156 tree varop, newconst;
8158 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8160 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8161 arg1, TREE_OPERAND (arg0, 1));
8162 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8163 TREE_OPERAND (arg0, 0),
8164 TREE_OPERAND (arg0, 1));
8166 else
8168 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8169 arg1, TREE_OPERAND (arg0, 1));
8170 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8171 TREE_OPERAND (arg0, 0),
8172 TREE_OPERAND (arg0, 1));
8176 /* If VAROP is a reference to a bitfield, we must mask
8177 the constant by the width of the field. */
8178 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8179 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8180 && host_integerp (DECL_SIZE (TREE_OPERAND
8181 (TREE_OPERAND (varop, 0), 1)), 1))
8183 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8184 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8185 tree folded_compare, shift;
8187 /* First check whether the comparison would come out
8188 always the same. If we don't do that we would
8189 change the meaning with the masking. */
8190 folded_compare = fold_build2 (code, type,
8191 TREE_OPERAND (varop, 0), arg1);
8192 if (TREE_CODE (folded_compare) == INTEGER_CST)
8193 return omit_one_operand (type, folded_compare, varop);
8195 shift = build_int_cst (NULL_TREE,
8196 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8197 shift = fold_convert (TREE_TYPE (varop), shift);
8198 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8199 newconst, shift);
8200 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8201 newconst, shift);
8204 return fold_build2 (code, type, varop, newconst);
8207 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8208 && (TREE_CODE (arg0) == NOP_EXPR
8209 || TREE_CODE (arg0) == CONVERT_EXPR))
8211 /* If we are widening one operand of an integer comparison,
8212 see if the other operand is similarly being widened. Perhaps we
8213 can do the comparison in the narrower type. */
8214 tem = fold_widened_comparison (code, type, arg0, arg1);
8215 if (tem)
8216 return tem;
8218 /* Or if we are changing signedness. */
8219 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8220 if (tem)
8221 return tem;
8224 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8225 constant, we can simplify it. */
8226 if (TREE_CODE (arg1) == INTEGER_CST
8227 && (TREE_CODE (arg0) == MIN_EXPR
8228 || TREE_CODE (arg0) == MAX_EXPR)
8229 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8231 tem = optimize_minmax_comparison (code, type, op0, op1);
8232 if (tem)
8233 return tem;
8236 /* Simplify comparison of something with itself. (For IEEE
8237 floating-point, we can only do some of these simplifications.) */
8238 if (operand_equal_p (arg0, arg1, 0))
8240 switch (code)
8242 case EQ_EXPR:
8243 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8244 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8245 return constant_boolean_node (1, type);
8246 break;
8248 case GE_EXPR:
8249 case LE_EXPR:
8250 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8251 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8252 return constant_boolean_node (1, type);
8253 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8255 case NE_EXPR:
8256 /* For NE, we can only do this simplification if integer
8257 or we don't honor IEEE floating point NaNs. */
8258 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8259 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8260 break;
8261 /* ... fall through ... */
8262 case GT_EXPR:
8263 case LT_EXPR:
8264 return constant_boolean_node (0, type);
8265 default:
8266 gcc_unreachable ();
8270 /* If we are comparing an expression that just has comparisons
8271 of two integer values, arithmetic expressions of those comparisons,
8272 and constants, we can simplify it. There are only three cases
8273 to check: the two values can either be equal, the first can be
8274 greater, or the second can be greater. Fold the expression for
8275 those three values. Since each value must be 0 or 1, we have
8276 eight possibilities, each of which corresponds to the constant 0
8277 or 1 or one of the six possible comparisons.
8279 This handles common cases like (a > b) == 0 but also handles
8280 expressions like ((x > y) - (y > x)) > 0, which supposedly
8281 occur in macroized code. */
8283 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8285 tree cval1 = 0, cval2 = 0;
8286 int save_p = 0;
8288 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8289 /* Don't handle degenerate cases here; they should already
8290 have been handled anyway. */
8291 && cval1 != 0 && cval2 != 0
8292 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8293 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8294 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8295 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8296 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8297 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8298 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8300 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8301 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8303 /* We can't just pass T to eval_subst in case cval1 or cval2
8304 was the same as ARG1. */
8306 tree high_result
8307 = fold_build2 (code, type,
8308 eval_subst (arg0, cval1, maxval,
8309 cval2, minval),
8310 arg1);
8311 tree equal_result
8312 = fold_build2 (code, type,
8313 eval_subst (arg0, cval1, maxval,
8314 cval2, maxval),
8315 arg1);
8316 tree low_result
8317 = fold_build2 (code, type,
8318 eval_subst (arg0, cval1, minval,
8319 cval2, maxval),
8320 arg1);
8322 /* All three of these results should be 0 or 1. Confirm they are.
8323 Then use those values to select the proper code to use. */
8325 if (TREE_CODE (high_result) == INTEGER_CST
8326 && TREE_CODE (equal_result) == INTEGER_CST
8327 && TREE_CODE (low_result) == INTEGER_CST)
8329 /* Make a 3-bit mask with the high-order bit being the
8330 value for `>', the next for '=', and the low for '<'. */
8331 switch ((integer_onep (high_result) * 4)
8332 + (integer_onep (equal_result) * 2)
8333 + integer_onep (low_result))
8335 case 0:
8336 /* Always false. */
8337 return omit_one_operand (type, integer_zero_node, arg0);
8338 case 1:
8339 code = LT_EXPR;
8340 break;
8341 case 2:
8342 code = EQ_EXPR;
8343 break;
8344 case 3:
8345 code = LE_EXPR;
8346 break;
8347 case 4:
8348 code = GT_EXPR;
8349 break;
8350 case 5:
8351 code = NE_EXPR;
8352 break;
8353 case 6:
8354 code = GE_EXPR;
8355 break;
8356 case 7:
8357 /* Always true. */
8358 return omit_one_operand (type, integer_one_node, arg0);
8361 if (save_p)
8362 return save_expr (build2 (code, type, cval1, cval2));
8363 return fold_build2 (code, type, cval1, cval2);
8368 /* Fold a comparison of the address of COMPONENT_REFs with the same
8369 type and component to a comparison of the address of the base
8370 object. In short, &x->a OP &y->a to x OP y and
8371 &x->a OP &y.a to x OP &y */
8372 if (TREE_CODE (arg0) == ADDR_EXPR
8373 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8374 && TREE_CODE (arg1) == ADDR_EXPR
8375 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8377 tree cref0 = TREE_OPERAND (arg0, 0);
8378 tree cref1 = TREE_OPERAND (arg1, 0);
8379 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8381 tree op0 = TREE_OPERAND (cref0, 0);
8382 tree op1 = TREE_OPERAND (cref1, 0);
8383 return fold_build2 (code, type,
8384 build_fold_addr_expr (op0),
8385 build_fold_addr_expr (op1));
8389 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8390 into a single range test. */
8391 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8392 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8393 && TREE_CODE (arg1) == INTEGER_CST
8394 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8395 && !integer_zerop (TREE_OPERAND (arg0, 1))
8396 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8397 && !TREE_OVERFLOW (arg1))
8399 tem = fold_div_compare (code, type, arg0, arg1);
8400 if (tem != NULL_TREE)
8401 return tem;
8404 return NULL_TREE;
8408 /* Subroutine of fold_binary. Optimize complex multiplications of the
8409 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8410 argument EXPR represents the expression "z" of type TYPE. */
8412 static tree
8413 fold_mult_zconjz (tree type, tree expr)
8415 tree itype = TREE_TYPE (type);
8416 tree rpart, ipart, tem;
8418 if (TREE_CODE (expr) == COMPLEX_EXPR)
8420 rpart = TREE_OPERAND (expr, 0);
8421 ipart = TREE_OPERAND (expr, 1);
8423 else if (TREE_CODE (expr) == COMPLEX_CST)
8425 rpart = TREE_REALPART (expr);
8426 ipart = TREE_IMAGPART (expr);
8428 else
8430 expr = save_expr (expr);
8431 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8432 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8435 rpart = save_expr (rpart);
8436 ipart = save_expr (ipart);
8437 tem = fold_build2 (PLUS_EXPR, itype,
8438 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8439 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8440 return fold_build2 (COMPLEX_EXPR, type, tem,
8441 fold_convert (itype, integer_zero_node));
8445 /* Fold a binary expression of code CODE and type TYPE with operands
8446 OP0 and OP1. Return the folded expression if folding is
8447 successful. Otherwise, return NULL_TREE. */
8449 tree
8450 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8452 enum tree_code_class kind = TREE_CODE_CLASS (code);
8453 tree arg0, arg1, tem;
8454 tree t1 = NULL_TREE;
8455 bool strict_overflow_p;
8457 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8458 && TREE_CODE_LENGTH (code) == 2
8459 && op0 != NULL_TREE
8460 && op1 != NULL_TREE);
8462 arg0 = op0;
8463 arg1 = op1;
8465 /* Strip any conversions that don't change the mode. This is
8466 safe for every expression, except for a comparison expression
8467 because its signedness is derived from its operands. So, in
8468 the latter case, only strip conversions that don't change the
8469 signedness.
8471 Note that this is done as an internal manipulation within the
8472 constant folder, in order to find the simplest representation
8473 of the arguments so that their form can be studied. In any
8474 cases, the appropriate type conversions should be put back in
8475 the tree that will get out of the constant folder. */
8477 if (kind == tcc_comparison)
8479 STRIP_SIGN_NOPS (arg0);
8480 STRIP_SIGN_NOPS (arg1);
8482 else
8484 STRIP_NOPS (arg0);
8485 STRIP_NOPS (arg1);
8488 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8489 constant but we can't do arithmetic on them. */
8490 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8491 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8492 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8493 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8495 if (kind == tcc_binary)
8496 tem = const_binop (code, arg0, arg1, 0);
8497 else if (kind == tcc_comparison)
8498 tem = fold_relational_const (code, type, arg0, arg1);
8499 else
8500 tem = NULL_TREE;
8502 if (tem != NULL_TREE)
8504 if (TREE_TYPE (tem) != type)
8505 tem = fold_convert (type, tem);
8506 return tem;
8510 /* If this is a commutative operation, and ARG0 is a constant, move it
8511 to ARG1 to reduce the number of tests below. */
8512 if (commutative_tree_code (code)
8513 && tree_swap_operands_p (arg0, arg1, true))
8514 return fold_build2 (code, type, op1, op0);
8516 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8518 First check for cases where an arithmetic operation is applied to a
8519 compound, conditional, or comparison operation. Push the arithmetic
8520 operation inside the compound or conditional to see if any folding
8521 can then be done. Convert comparison to conditional for this purpose.
8522 The also optimizes non-constant cases that used to be done in
8523 expand_expr.
8525 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8526 one of the operands is a comparison and the other is a comparison, a
8527 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8528 code below would make the expression more complex. Change it to a
8529 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8530 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8532 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8533 || code == EQ_EXPR || code == NE_EXPR)
8534 && ((truth_value_p (TREE_CODE (arg0))
8535 && (truth_value_p (TREE_CODE (arg1))
8536 || (TREE_CODE (arg1) == BIT_AND_EXPR
8537 && integer_onep (TREE_OPERAND (arg1, 1)))))
8538 || (truth_value_p (TREE_CODE (arg1))
8539 && (truth_value_p (TREE_CODE (arg0))
8540 || (TREE_CODE (arg0) == BIT_AND_EXPR
8541 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8543 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8544 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8545 : TRUTH_XOR_EXPR,
8546 boolean_type_node,
8547 fold_convert (boolean_type_node, arg0),
8548 fold_convert (boolean_type_node, arg1));
8550 if (code == EQ_EXPR)
8551 tem = invert_truthvalue (tem);
8553 return fold_convert (type, tem);
8556 if (TREE_CODE_CLASS (code) == tcc_binary
8557 || TREE_CODE_CLASS (code) == tcc_comparison)
8559 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8560 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8561 fold_build2 (code, type,
8562 TREE_OPERAND (arg0, 1), op1));
8563 if (TREE_CODE (arg1) == COMPOUND_EXPR
8564 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8565 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8566 fold_build2 (code, type,
8567 op0, TREE_OPERAND (arg1, 1)));
8569 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8571 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8572 arg0, arg1,
8573 /*cond_first_p=*/1);
8574 if (tem != NULL_TREE)
8575 return tem;
8578 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8580 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8581 arg1, arg0,
8582 /*cond_first_p=*/0);
8583 if (tem != NULL_TREE)
8584 return tem;
8588 switch (code)
8590 case PLUS_EXPR:
8591 /* A + (-B) -> A - B */
8592 if (TREE_CODE (arg1) == NEGATE_EXPR)
8593 return fold_build2 (MINUS_EXPR, type,
8594 fold_convert (type, arg0),
8595 fold_convert (type, TREE_OPERAND (arg1, 0)));
8596 /* (-A) + B -> B - A */
8597 if (TREE_CODE (arg0) == NEGATE_EXPR
8598 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8599 return fold_build2 (MINUS_EXPR, type,
8600 fold_convert (type, arg1),
8601 fold_convert (type, TREE_OPERAND (arg0, 0)));
8602 /* Convert ~A + 1 to -A. */
8603 if (INTEGRAL_TYPE_P (type)
8604 && TREE_CODE (arg0) == BIT_NOT_EXPR
8605 && integer_onep (arg1))
8606 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8608 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8609 same or one. */
8610 if ((TREE_CODE (arg0) == MULT_EXPR
8611 || TREE_CODE (arg1) == MULT_EXPR)
8612 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8614 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8615 if (tem)
8616 return tem;
8619 if (! FLOAT_TYPE_P (type))
8621 if (integer_zerop (arg1))
8622 return non_lvalue (fold_convert (type, arg0));
8624 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8625 with a constant, and the two constants have no bits in common,
8626 we should treat this as a BIT_IOR_EXPR since this may produce more
8627 simplifications. */
8628 if (TREE_CODE (arg0) == BIT_AND_EXPR
8629 && TREE_CODE (arg1) == BIT_AND_EXPR
8630 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8631 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8632 && integer_zerop (const_binop (BIT_AND_EXPR,
8633 TREE_OPERAND (arg0, 1),
8634 TREE_OPERAND (arg1, 1), 0)))
8636 code = BIT_IOR_EXPR;
8637 goto bit_ior;
8640 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8641 (plus (plus (mult) (mult)) (foo)) so that we can
8642 take advantage of the factoring cases below. */
8643 if (((TREE_CODE (arg0) == PLUS_EXPR
8644 || TREE_CODE (arg0) == MINUS_EXPR)
8645 && TREE_CODE (arg1) == MULT_EXPR)
8646 || ((TREE_CODE (arg1) == PLUS_EXPR
8647 || TREE_CODE (arg1) == MINUS_EXPR)
8648 && TREE_CODE (arg0) == MULT_EXPR))
8650 tree parg0, parg1, parg, marg;
8651 enum tree_code pcode;
8653 if (TREE_CODE (arg1) == MULT_EXPR)
8654 parg = arg0, marg = arg1;
8655 else
8656 parg = arg1, marg = arg0;
8657 pcode = TREE_CODE (parg);
8658 parg0 = TREE_OPERAND (parg, 0);
8659 parg1 = TREE_OPERAND (parg, 1);
8660 STRIP_NOPS (parg0);
8661 STRIP_NOPS (parg1);
8663 if (TREE_CODE (parg0) == MULT_EXPR
8664 && TREE_CODE (parg1) != MULT_EXPR)
8665 return fold_build2 (pcode, type,
8666 fold_build2 (PLUS_EXPR, type,
8667 fold_convert (type, parg0),
8668 fold_convert (type, marg)),
8669 fold_convert (type, parg1));
8670 if (TREE_CODE (parg0) != MULT_EXPR
8671 && TREE_CODE (parg1) == MULT_EXPR)
8672 return fold_build2 (PLUS_EXPR, type,
8673 fold_convert (type, parg0),
8674 fold_build2 (pcode, type,
8675 fold_convert (type, marg),
8676 fold_convert (type,
8677 parg1)));
8680 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8681 of the array. Loop optimizer sometimes produce this type of
8682 expressions. */
8683 if (TREE_CODE (arg0) == ADDR_EXPR)
8685 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8686 if (tem)
8687 return fold_convert (type, tem);
8689 else if (TREE_CODE (arg1) == ADDR_EXPR)
8691 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8692 if (tem)
8693 return fold_convert (type, tem);
8696 else
8698 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8699 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8700 return non_lvalue (fold_convert (type, arg0));
8702 /* Likewise if the operands are reversed. */
8703 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8704 return non_lvalue (fold_convert (type, arg1));
8706 /* Convert X + -C into X - C. */
8707 if (TREE_CODE (arg1) == REAL_CST
8708 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8710 tem = fold_negate_const (arg1, type);
8711 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8712 return fold_build2 (MINUS_EXPR, type,
8713 fold_convert (type, arg0),
8714 fold_convert (type, tem));
8717 if (flag_unsafe_math_optimizations
8718 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8719 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8720 && (tem = distribute_real_division (code, type, arg0, arg1)))
8721 return tem;
8723 /* Convert x+x into x*2.0. */
8724 if (operand_equal_p (arg0, arg1, 0)
8725 && SCALAR_FLOAT_TYPE_P (type))
8726 return fold_build2 (MULT_EXPR, type, arg0,
8727 build_real (type, dconst2));
8729 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8730 if (flag_unsafe_math_optimizations
8731 && TREE_CODE (arg1) == PLUS_EXPR
8732 && TREE_CODE (arg0) != MULT_EXPR)
8734 tree tree10 = TREE_OPERAND (arg1, 0);
8735 tree tree11 = TREE_OPERAND (arg1, 1);
8736 if (TREE_CODE (tree11) == MULT_EXPR
8737 && TREE_CODE (tree10) == MULT_EXPR)
8739 tree tree0;
8740 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8741 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8744 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8745 if (flag_unsafe_math_optimizations
8746 && TREE_CODE (arg0) == PLUS_EXPR
8747 && TREE_CODE (arg1) != MULT_EXPR)
8749 tree tree00 = TREE_OPERAND (arg0, 0);
8750 tree tree01 = TREE_OPERAND (arg0, 1);
8751 if (TREE_CODE (tree01) == MULT_EXPR
8752 && TREE_CODE (tree00) == MULT_EXPR)
8754 tree tree0;
8755 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8756 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8761 bit_rotate:
8762 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8763 is a rotate of A by C1 bits. */
8764 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8765 is a rotate of A by B bits. */
8767 enum tree_code code0, code1;
8768 code0 = TREE_CODE (arg0);
8769 code1 = TREE_CODE (arg1);
8770 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8771 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8772 && operand_equal_p (TREE_OPERAND (arg0, 0),
8773 TREE_OPERAND (arg1, 0), 0)
8774 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8776 tree tree01, tree11;
8777 enum tree_code code01, code11;
8779 tree01 = TREE_OPERAND (arg0, 1);
8780 tree11 = TREE_OPERAND (arg1, 1);
8781 STRIP_NOPS (tree01);
8782 STRIP_NOPS (tree11);
8783 code01 = TREE_CODE (tree01);
8784 code11 = TREE_CODE (tree11);
8785 if (code01 == INTEGER_CST
8786 && code11 == INTEGER_CST
8787 && TREE_INT_CST_HIGH (tree01) == 0
8788 && TREE_INT_CST_HIGH (tree11) == 0
8789 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8790 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8791 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8792 code0 == LSHIFT_EXPR ? tree01 : tree11);
8793 else if (code11 == MINUS_EXPR)
8795 tree tree110, tree111;
8796 tree110 = TREE_OPERAND (tree11, 0);
8797 tree111 = TREE_OPERAND (tree11, 1);
8798 STRIP_NOPS (tree110);
8799 STRIP_NOPS (tree111);
8800 if (TREE_CODE (tree110) == INTEGER_CST
8801 && 0 == compare_tree_int (tree110,
8802 TYPE_PRECISION
8803 (TREE_TYPE (TREE_OPERAND
8804 (arg0, 0))))
8805 && operand_equal_p (tree01, tree111, 0))
8806 return build2 ((code0 == LSHIFT_EXPR
8807 ? LROTATE_EXPR
8808 : RROTATE_EXPR),
8809 type, TREE_OPERAND (arg0, 0), tree01);
8811 else if (code01 == MINUS_EXPR)
8813 tree tree010, tree011;
8814 tree010 = TREE_OPERAND (tree01, 0);
8815 tree011 = TREE_OPERAND (tree01, 1);
8816 STRIP_NOPS (tree010);
8817 STRIP_NOPS (tree011);
8818 if (TREE_CODE (tree010) == INTEGER_CST
8819 && 0 == compare_tree_int (tree010,
8820 TYPE_PRECISION
8821 (TREE_TYPE (TREE_OPERAND
8822 (arg0, 0))))
8823 && operand_equal_p (tree11, tree011, 0))
8824 return build2 ((code0 != LSHIFT_EXPR
8825 ? LROTATE_EXPR
8826 : RROTATE_EXPR),
8827 type, TREE_OPERAND (arg0, 0), tree11);
8832 associate:
8833 /* In most languages, can't associate operations on floats through
8834 parentheses. Rather than remember where the parentheses were, we
8835 don't associate floats at all, unless the user has specified
8836 -funsafe-math-optimizations. */
8838 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8840 tree var0, con0, lit0, minus_lit0;
8841 tree var1, con1, lit1, minus_lit1;
8842 bool ok = true;
8844 /* Split both trees into variables, constants, and literals. Then
8845 associate each group together, the constants with literals,
8846 then the result with variables. This increases the chances of
8847 literals being recombined later and of generating relocatable
8848 expressions for the sum of a constant and literal. */
8849 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8850 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8851 code == MINUS_EXPR);
8853 /* With undefined overflow we can only associate constants
8854 with one variable. */
8855 if ((POINTER_TYPE_P (type)
8856 || (INTEGRAL_TYPE_P (type)
8857 && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8858 && var0 && var1)
8860 tree tmp0 = var0;
8861 tree tmp1 = var1;
8863 if (TREE_CODE (tmp0) == NEGATE_EXPR)
8864 tmp0 = TREE_OPERAND (tmp0, 0);
8865 if (TREE_CODE (tmp1) == NEGATE_EXPR)
8866 tmp1 = TREE_OPERAND (tmp1, 0);
8867 /* The only case we can still associate with two variables
8868 is if they are the same, modulo negation. */
8869 if (!operand_equal_p (tmp0, tmp1, 0))
8870 ok = false;
8873 /* Only do something if we found more than two objects. Otherwise,
8874 nothing has changed and we risk infinite recursion. */
8875 if (ok
8876 && (2 < ((var0 != 0) + (var1 != 0)
8877 + (con0 != 0) + (con1 != 0)
8878 + (lit0 != 0) + (lit1 != 0)
8879 + (minus_lit0 != 0) + (minus_lit1 != 0))))
8881 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8882 if (code == MINUS_EXPR)
8883 code = PLUS_EXPR;
8885 var0 = associate_trees (var0, var1, code, type);
8886 con0 = associate_trees (con0, con1, code, type);
8887 lit0 = associate_trees (lit0, lit1, code, type);
8888 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8890 /* Preserve the MINUS_EXPR if the negative part of the literal is
8891 greater than the positive part. Otherwise, the multiplicative
8892 folding code (i.e extract_muldiv) may be fooled in case
8893 unsigned constants are subtracted, like in the following
8894 example: ((X*2 + 4) - 8U)/2. */
8895 if (minus_lit0 && lit0)
8897 if (TREE_CODE (lit0) == INTEGER_CST
8898 && TREE_CODE (minus_lit0) == INTEGER_CST
8899 && tree_int_cst_lt (lit0, minus_lit0))
8901 minus_lit0 = associate_trees (minus_lit0, lit0,
8902 MINUS_EXPR, type);
8903 lit0 = 0;
8905 else
8907 lit0 = associate_trees (lit0, minus_lit0,
8908 MINUS_EXPR, type);
8909 minus_lit0 = 0;
8912 if (minus_lit0)
8914 if (con0 == 0)
8915 return fold_convert (type,
8916 associate_trees (var0, minus_lit0,
8917 MINUS_EXPR, type));
8918 else
8920 con0 = associate_trees (con0, minus_lit0,
8921 MINUS_EXPR, type);
8922 return fold_convert (type,
8923 associate_trees (var0, con0,
8924 PLUS_EXPR, type));
8928 con0 = associate_trees (con0, lit0, code, type);
8929 return fold_convert (type, associate_trees (var0, con0,
8930 code, type));
8934 return NULL_TREE;
8936 case MINUS_EXPR:
8937 /* A - (-B) -> A + B */
8938 if (TREE_CODE (arg1) == NEGATE_EXPR)
8939 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8940 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8941 if (TREE_CODE (arg0) == NEGATE_EXPR
8942 && (FLOAT_TYPE_P (type)
8943 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8944 && negate_expr_p (arg1)
8945 && reorder_operands_p (arg0, arg1))
8946 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8947 TREE_OPERAND (arg0, 0));
8948 /* Convert -A - 1 to ~A. */
8949 if (INTEGRAL_TYPE_P (type)
8950 && TREE_CODE (arg0) == NEGATE_EXPR
8951 && integer_onep (arg1))
8952 return fold_build1 (BIT_NOT_EXPR, type,
8953 fold_convert (type, TREE_OPERAND (arg0, 0)));
8955 /* Convert -1 - A to ~A. */
8956 if (INTEGRAL_TYPE_P (type)
8957 && integer_all_onesp (arg0))
8958 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8960 if (! FLOAT_TYPE_P (type))
8962 if (integer_zerop (arg0))
8963 return negate_expr (fold_convert (type, arg1));
8964 if (integer_zerop (arg1))
8965 return non_lvalue (fold_convert (type, arg0));
8967 /* Fold A - (A & B) into ~B & A. */
8968 if (!TREE_SIDE_EFFECTS (arg0)
8969 && TREE_CODE (arg1) == BIT_AND_EXPR)
8971 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8972 return fold_build2 (BIT_AND_EXPR, type,
8973 fold_build1 (BIT_NOT_EXPR, type,
8974 TREE_OPERAND (arg1, 0)),
8975 arg0);
8976 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8977 return fold_build2 (BIT_AND_EXPR, type,
8978 fold_build1 (BIT_NOT_EXPR, type,
8979 TREE_OPERAND (arg1, 1)),
8980 arg0);
8983 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8984 any power of 2 minus 1. */
8985 if (TREE_CODE (arg0) == BIT_AND_EXPR
8986 && TREE_CODE (arg1) == BIT_AND_EXPR
8987 && operand_equal_p (TREE_OPERAND (arg0, 0),
8988 TREE_OPERAND (arg1, 0), 0))
8990 tree mask0 = TREE_OPERAND (arg0, 1);
8991 tree mask1 = TREE_OPERAND (arg1, 1);
8992 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8994 if (operand_equal_p (tem, mask1, 0))
8996 tem = fold_build2 (BIT_XOR_EXPR, type,
8997 TREE_OPERAND (arg0, 0), mask1);
8998 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9003 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9004 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9005 return non_lvalue (fold_convert (type, arg0));
9007 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9008 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9009 (-ARG1 + ARG0) reduces to -ARG1. */
9010 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9011 return negate_expr (fold_convert (type, arg1));
9013 /* Fold &x - &x. This can happen from &x.foo - &x.
9014 This is unsafe for certain floats even in non-IEEE formats.
9015 In IEEE, it is unsafe because it does wrong for NaNs.
9016 Also note that operand_equal_p is always false if an operand
9017 is volatile. */
9019 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9020 && operand_equal_p (arg0, arg1, 0))
9021 return fold_convert (type, integer_zero_node);
9023 /* A - B -> A + (-B) if B is easily negatable. */
9024 if (negate_expr_p (arg1)
9025 && ((FLOAT_TYPE_P (type)
9026 /* Avoid this transformation if B is a positive REAL_CST. */
9027 && (TREE_CODE (arg1) != REAL_CST
9028 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9029 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9030 return fold_build2 (PLUS_EXPR, type,
9031 fold_convert (type, arg0),
9032 fold_convert (type, negate_expr (arg1)));
9034 /* Try folding difference of addresses. */
9036 HOST_WIDE_INT diff;
9038 if ((TREE_CODE (arg0) == ADDR_EXPR
9039 || TREE_CODE (arg1) == ADDR_EXPR)
9040 && ptr_difference_const (arg0, arg1, &diff))
9041 return build_int_cst_type (type, diff);
9044 /* Fold &a[i] - &a[j] to i-j. */
9045 if (TREE_CODE (arg0) == ADDR_EXPR
9046 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9047 && TREE_CODE (arg1) == ADDR_EXPR
9048 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9050 tree aref0 = TREE_OPERAND (arg0, 0);
9051 tree aref1 = TREE_OPERAND (arg1, 0);
9052 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9053 TREE_OPERAND (aref1, 0), 0))
9055 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9056 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9057 tree esz = array_ref_element_size (aref0);
9058 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9059 return fold_build2 (MULT_EXPR, type, diff,
9060 fold_convert (type, esz));
9065 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9066 of the array. Loop optimizer sometimes produce this type of
9067 expressions. */
9068 if (TREE_CODE (arg0) == ADDR_EXPR)
9070 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9071 if (tem)
9072 return fold_convert (type, tem);
9075 if (flag_unsafe_math_optimizations
9076 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9077 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9078 && (tem = distribute_real_division (code, type, arg0, arg1)))
9079 return tem;
9081 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9082 same or one. */
9083 if ((TREE_CODE (arg0) == MULT_EXPR
9084 || TREE_CODE (arg1) == MULT_EXPR)
9085 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9087 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9088 if (tem)
9089 return tem;
9092 goto associate;
9094 case MULT_EXPR:
9095 /* (-A) * (-B) -> A * B */
9096 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9097 return fold_build2 (MULT_EXPR, type,
9098 fold_convert (type, TREE_OPERAND (arg0, 0)),
9099 fold_convert (type, negate_expr (arg1)));
9100 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9101 return fold_build2 (MULT_EXPR, type,
9102 fold_convert (type, negate_expr (arg0)),
9103 fold_convert (type, TREE_OPERAND (arg1, 0)));
9105 if (! FLOAT_TYPE_P (type))
9107 if (integer_zerop (arg1))
9108 return omit_one_operand (type, arg1, arg0);
9109 if (integer_onep (arg1))
9110 return non_lvalue (fold_convert (type, arg0));
9111 /* Transform x * -1 into -x. */
9112 if (integer_all_onesp (arg1))
9113 return fold_convert (type, negate_expr (arg0));
9115 /* (a * (1 << b)) is (a << b) */
9116 if (TREE_CODE (arg1) == LSHIFT_EXPR
9117 && integer_onep (TREE_OPERAND (arg1, 0)))
9118 return fold_build2 (LSHIFT_EXPR, type, arg0,
9119 TREE_OPERAND (arg1, 1));
9120 if (TREE_CODE (arg0) == LSHIFT_EXPR
9121 && integer_onep (TREE_OPERAND (arg0, 0)))
9122 return fold_build2 (LSHIFT_EXPR, type, arg1,
9123 TREE_OPERAND (arg0, 1));
9125 strict_overflow_p = false;
9126 if (TREE_CODE (arg1) == INTEGER_CST
9127 && 0 != (tem = extract_muldiv (op0,
9128 fold_convert (type, arg1),
9129 code, NULL_TREE,
9130 &strict_overflow_p)))
9132 if (strict_overflow_p)
9133 fold_overflow_warning (("assuming signed overflow does not "
9134 "occur when simplifying "
9135 "multiplication"),
9136 WARN_STRICT_OVERFLOW_MISC);
9137 return fold_convert (type, tem);
9140 /* Optimize z * conj(z) for integer complex numbers. */
9141 if (TREE_CODE (arg0) == CONJ_EXPR
9142 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9143 return fold_mult_zconjz (type, arg1);
9144 if (TREE_CODE (arg1) == CONJ_EXPR
9145 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9146 return fold_mult_zconjz (type, arg0);
9148 else
9150 /* Maybe fold x * 0 to 0. The expressions aren't the same
9151 when x is NaN, since x * 0 is also NaN. Nor are they the
9152 same in modes with signed zeros, since multiplying a
9153 negative value by 0 gives -0, not +0. */
9154 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9155 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9156 && real_zerop (arg1))
9157 return omit_one_operand (type, arg1, arg0);
9158 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9159 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9160 && real_onep (arg1))
9161 return non_lvalue (fold_convert (type, arg0));
9163 /* Transform x * -1.0 into -x. */
9164 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9165 && real_minus_onep (arg1))
9166 return fold_convert (type, negate_expr (arg0));
9168 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9169 if (flag_unsafe_math_optimizations
9170 && TREE_CODE (arg0) == RDIV_EXPR
9171 && TREE_CODE (arg1) == REAL_CST
9172 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9174 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9175 arg1, 0);
9176 if (tem)
9177 return fold_build2 (RDIV_EXPR, type, tem,
9178 TREE_OPERAND (arg0, 1));
9181 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9182 if (operand_equal_p (arg0, arg1, 0))
9184 tree tem = fold_strip_sign_ops (arg0);
9185 if (tem != NULL_TREE)
9187 tem = fold_convert (type, tem);
9188 return fold_build2 (MULT_EXPR, type, tem, tem);
9192 /* Optimize z * conj(z) for floating point complex numbers.
9193 Guarded by flag_unsafe_math_optimizations as non-finite
9194 imaginary components don't produce scalar results. */
9195 if (flag_unsafe_math_optimizations
9196 && TREE_CODE (arg0) == CONJ_EXPR
9197 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9198 return fold_mult_zconjz (type, arg1);
9199 if (flag_unsafe_math_optimizations
9200 && TREE_CODE (arg1) == CONJ_EXPR
9201 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9202 return fold_mult_zconjz (type, arg0);
9204 if (flag_unsafe_math_optimizations)
9206 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9207 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9209 /* Optimizations of root(...)*root(...). */
9210 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9212 tree rootfn, arg, arglist;
9213 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9214 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9216 /* Optimize sqrt(x)*sqrt(x) as x. */
9217 if (BUILTIN_SQRT_P (fcode0)
9218 && operand_equal_p (arg00, arg10, 0)
9219 && ! HONOR_SNANS (TYPE_MODE (type)))
9220 return arg00;
9222 /* Optimize root(x)*root(y) as root(x*y). */
9223 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9224 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9225 arglist = build_tree_list (NULL_TREE, arg);
9226 return build_function_call_expr (rootfn, arglist);
9229 /* Optimize expN(x)*expN(y) as expN(x+y). */
9230 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9232 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9233 tree arg = fold_build2 (PLUS_EXPR, type,
9234 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9235 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9236 tree arglist = build_tree_list (NULL_TREE, arg);
9237 return build_function_call_expr (expfn, arglist);
9240 /* Optimizations of pow(...)*pow(...). */
9241 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9242 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9243 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9245 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9246 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9247 1)));
9248 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9249 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9250 1)));
9252 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9253 if (operand_equal_p (arg01, arg11, 0))
9255 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9256 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9257 tree arglist = tree_cons (NULL_TREE, arg,
9258 build_tree_list (NULL_TREE,
9259 arg01));
9260 return build_function_call_expr (powfn, arglist);
9263 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9264 if (operand_equal_p (arg00, arg10, 0))
9266 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9267 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9268 tree arglist = tree_cons (NULL_TREE, arg00,
9269 build_tree_list (NULL_TREE,
9270 arg));
9271 return build_function_call_expr (powfn, arglist);
9275 /* Optimize tan(x)*cos(x) as sin(x). */
9276 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9277 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9278 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9279 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9280 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9281 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9282 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9283 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9285 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9287 if (sinfn != NULL_TREE)
9288 return build_function_call_expr (sinfn,
9289 TREE_OPERAND (arg0, 1));
9292 /* Optimize x*pow(x,c) as pow(x,c+1). */
9293 if (fcode1 == BUILT_IN_POW
9294 || fcode1 == BUILT_IN_POWF
9295 || fcode1 == BUILT_IN_POWL)
9297 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9298 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9299 1)));
9300 if (TREE_CODE (arg11) == REAL_CST
9301 && ! TREE_CONSTANT_OVERFLOW (arg11)
9302 && operand_equal_p (arg0, arg10, 0))
9304 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9305 REAL_VALUE_TYPE c;
9306 tree arg, arglist;
9308 c = TREE_REAL_CST (arg11);
9309 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9310 arg = build_real (type, c);
9311 arglist = build_tree_list (NULL_TREE, arg);
9312 arglist = tree_cons (NULL_TREE, arg0, arglist);
9313 return build_function_call_expr (powfn, arglist);
9317 /* Optimize pow(x,c)*x as pow(x,c+1). */
9318 if (fcode0 == BUILT_IN_POW
9319 || fcode0 == BUILT_IN_POWF
9320 || fcode0 == BUILT_IN_POWL)
9322 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9323 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9324 1)));
9325 if (TREE_CODE (arg01) == REAL_CST
9326 && ! TREE_CONSTANT_OVERFLOW (arg01)
9327 && operand_equal_p (arg1, arg00, 0))
9329 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9330 REAL_VALUE_TYPE c;
9331 tree arg, arglist;
9333 c = TREE_REAL_CST (arg01);
9334 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9335 arg = build_real (type, c);
9336 arglist = build_tree_list (NULL_TREE, arg);
9337 arglist = tree_cons (NULL_TREE, arg1, arglist);
9338 return build_function_call_expr (powfn, arglist);
9342 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9343 if (! optimize_size
9344 && operand_equal_p (arg0, arg1, 0))
9346 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9348 if (powfn)
9350 tree arg = build_real (type, dconst2);
9351 tree arglist = build_tree_list (NULL_TREE, arg);
9352 arglist = tree_cons (NULL_TREE, arg0, arglist);
9353 return build_function_call_expr (powfn, arglist);
9358 goto associate;
9360 case BIT_IOR_EXPR:
9361 bit_ior:
9362 if (integer_all_onesp (arg1))
9363 return omit_one_operand (type, arg1, arg0);
9364 if (integer_zerop (arg1))
9365 return non_lvalue (fold_convert (type, arg0));
9366 if (operand_equal_p (arg0, arg1, 0))
9367 return non_lvalue (fold_convert (type, arg0));
9369 /* ~X | X is -1. */
9370 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9371 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9373 t1 = build_int_cst (type, -1);
9374 t1 = force_fit_type (t1, 0, false, false);
9375 return omit_one_operand (type, t1, arg1);
9378 /* X | ~X is -1. */
9379 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9380 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9382 t1 = build_int_cst (type, -1);
9383 t1 = force_fit_type (t1, 0, false, false);
9384 return omit_one_operand (type, t1, arg0);
9387 /* Canonicalize (X & C1) | C2. */
9388 if (TREE_CODE (arg0) == BIT_AND_EXPR
9389 && TREE_CODE (arg1) == INTEGER_CST
9390 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9392 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9393 int width = TYPE_PRECISION (type);
9394 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9395 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9396 hi2 = TREE_INT_CST_HIGH (arg1);
9397 lo2 = TREE_INT_CST_LOW (arg1);
9399 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9400 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9401 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9403 if (width > HOST_BITS_PER_WIDE_INT)
9405 mhi = (unsigned HOST_WIDE_INT) -1
9406 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9407 mlo = -1;
9409 else
9411 mhi = 0;
9412 mlo = (unsigned HOST_WIDE_INT) -1
9413 >> (HOST_BITS_PER_WIDE_INT - width);
9416 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9417 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9418 return fold_build2 (BIT_IOR_EXPR, type,
9419 TREE_OPERAND (arg0, 0), arg1);
9421 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9422 hi1 &= mhi;
9423 lo1 &= mlo;
9424 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9425 return fold_build2 (BIT_IOR_EXPR, type,
9426 fold_build2 (BIT_AND_EXPR, type,
9427 TREE_OPERAND (arg0, 0),
9428 build_int_cst_wide (type,
9429 lo1 & ~lo2,
9430 hi1 & ~hi2)),
9431 arg1);
9434 /* (X & Y) | Y is (X, Y). */
9435 if (TREE_CODE (arg0) == BIT_AND_EXPR
9436 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9437 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9438 /* (X & Y) | X is (Y, X). */
9439 if (TREE_CODE (arg0) == BIT_AND_EXPR
9440 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9441 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9442 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9443 /* X | (X & Y) is (Y, X). */
9444 if (TREE_CODE (arg1) == BIT_AND_EXPR
9445 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9446 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9447 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9448 /* X | (Y & X) is (Y, X). */
9449 if (TREE_CODE (arg1) == BIT_AND_EXPR
9450 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9451 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9452 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9454 t1 = distribute_bit_expr (code, type, arg0, arg1);
9455 if (t1 != NULL_TREE)
9456 return t1;
9458 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9460 This results in more efficient code for machines without a NAND
9461 instruction. Combine will canonicalize to the first form
9462 which will allow use of NAND instructions provided by the
9463 backend if they exist. */
9464 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9465 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9467 return fold_build1 (BIT_NOT_EXPR, type,
9468 build2 (BIT_AND_EXPR, type,
9469 TREE_OPERAND (arg0, 0),
9470 TREE_OPERAND (arg1, 0)));
9473 /* See if this can be simplified into a rotate first. If that
9474 is unsuccessful continue in the association code. */
9475 goto bit_rotate;
9477 case BIT_XOR_EXPR:
9478 if (integer_zerop (arg1))
9479 return non_lvalue (fold_convert (type, arg0));
9480 if (integer_all_onesp (arg1))
9481 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9482 if (operand_equal_p (arg0, arg1, 0))
9483 return omit_one_operand (type, integer_zero_node, arg0);
9485 /* ~X ^ X is -1. */
9486 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9487 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9489 t1 = build_int_cst (type, -1);
9490 t1 = force_fit_type (t1, 0, false, false);
9491 return omit_one_operand (type, t1, arg1);
9494 /* X ^ ~X is -1. */
9495 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9496 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9498 t1 = build_int_cst (type, -1);
9499 t1 = force_fit_type (t1, 0, false, false);
9500 return omit_one_operand (type, t1, arg0);
9503 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9504 with a constant, and the two constants have no bits in common,
9505 we should treat this as a BIT_IOR_EXPR since this may produce more
9506 simplifications. */
9507 if (TREE_CODE (arg0) == BIT_AND_EXPR
9508 && TREE_CODE (arg1) == BIT_AND_EXPR
9509 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9510 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9511 && integer_zerop (const_binop (BIT_AND_EXPR,
9512 TREE_OPERAND (arg0, 1),
9513 TREE_OPERAND (arg1, 1), 0)))
9515 code = BIT_IOR_EXPR;
9516 goto bit_ior;
9519 /* (X | Y) ^ X -> Y & ~ X*/
9520 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9521 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9523 tree t2 = TREE_OPERAND (arg0, 1);
9524 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9525 arg1);
9526 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9527 fold_convert (type, t1));
9528 return t1;
9531 /* (Y | X) ^ X -> Y & ~ X*/
9532 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9533 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9535 tree t2 = TREE_OPERAND (arg0, 0);
9536 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9537 arg1);
9538 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9539 fold_convert (type, t1));
9540 return t1;
9543 /* X ^ (X | Y) -> Y & ~ X*/
9544 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9545 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9547 tree t2 = TREE_OPERAND (arg1, 1);
9548 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9549 arg0);
9550 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9551 fold_convert (type, t1));
9552 return t1;
9555 /* X ^ (Y | X) -> Y & ~ X*/
9556 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9557 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9559 tree t2 = TREE_OPERAND (arg1, 0);
9560 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9561 arg0);
9562 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9563 fold_convert (type, t1));
9564 return t1;
9567 /* Convert ~X ^ ~Y to X ^ Y. */
9568 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9569 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9570 return fold_build2 (code, type,
9571 fold_convert (type, TREE_OPERAND (arg0, 0)),
9572 fold_convert (type, TREE_OPERAND (arg1, 0)));
9574 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9575 if (TREE_CODE (arg0) == BIT_AND_EXPR
9576 && integer_onep (TREE_OPERAND (arg0, 1))
9577 && integer_onep (arg1))
9578 return fold_build2 (EQ_EXPR, type, arg0,
9579 build_int_cst (TREE_TYPE (arg0), 0));
9581 /* Fold (X & Y) ^ Y as ~X & Y. */
9582 if (TREE_CODE (arg0) == BIT_AND_EXPR
9583 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9585 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9586 return fold_build2 (BIT_AND_EXPR, type,
9587 fold_build1 (BIT_NOT_EXPR, type, tem),
9588 fold_convert (type, arg1));
9590 /* Fold (X & Y) ^ X as ~Y & X. */
9591 if (TREE_CODE (arg0) == BIT_AND_EXPR
9592 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9593 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9595 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9596 return fold_build2 (BIT_AND_EXPR, type,
9597 fold_build1 (BIT_NOT_EXPR, type, tem),
9598 fold_convert (type, arg1));
9600 /* Fold X ^ (X & Y) as X & ~Y. */
9601 if (TREE_CODE (arg1) == BIT_AND_EXPR
9602 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9604 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9605 return fold_build2 (BIT_AND_EXPR, type,
9606 fold_convert (type, arg0),
9607 fold_build1 (BIT_NOT_EXPR, type, tem));
9609 /* Fold X ^ (Y & X) as ~Y & X. */
9610 if (TREE_CODE (arg1) == BIT_AND_EXPR
9611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9612 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9614 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9615 return fold_build2 (BIT_AND_EXPR, type,
9616 fold_build1 (BIT_NOT_EXPR, type, tem),
9617 fold_convert (type, arg0));
9620 /* See if this can be simplified into a rotate first. If that
9621 is unsuccessful continue in the association code. */
9622 goto bit_rotate;
9624 case BIT_AND_EXPR:
9625 if (integer_all_onesp (arg1))
9626 return non_lvalue (fold_convert (type, arg0));
9627 if (integer_zerop (arg1))
9628 return omit_one_operand (type, arg1, arg0);
9629 if (operand_equal_p (arg0, arg1, 0))
9630 return non_lvalue (fold_convert (type, arg0));
9632 /* ~X & X is always zero. */
9633 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9634 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9635 return omit_one_operand (type, integer_zero_node, arg1);
9637 /* X & ~X is always zero. */
9638 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9639 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9640 return omit_one_operand (type, integer_zero_node, arg0);
9642 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9643 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9644 && TREE_CODE (arg1) == INTEGER_CST
9645 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9646 return fold_build2 (BIT_IOR_EXPR, type,
9647 fold_build2 (BIT_AND_EXPR, type,
9648 TREE_OPERAND (arg0, 0), arg1),
9649 fold_build2 (BIT_AND_EXPR, type,
9650 TREE_OPERAND (arg0, 1), arg1));
9652 /* (X | Y) & Y is (X, Y). */
9653 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9654 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9655 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9656 /* (X | Y) & X is (Y, X). */
9657 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9658 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9659 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9660 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9661 /* X & (X | Y) is (Y, X). */
9662 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9663 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9664 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9665 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9666 /* X & (Y | X) is (Y, X). */
9667 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9668 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9669 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9670 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9672 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9673 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9674 && integer_onep (TREE_OPERAND (arg0, 1))
9675 && integer_onep (arg1))
9677 tem = TREE_OPERAND (arg0, 0);
9678 return fold_build2 (EQ_EXPR, type,
9679 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9680 build_int_cst (TREE_TYPE (tem), 1)),
9681 build_int_cst (TREE_TYPE (tem), 0));
9683 /* Fold ~X & 1 as (X & 1) == 0. */
9684 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9685 && integer_onep (arg1))
9687 tem = TREE_OPERAND (arg0, 0);
9688 return fold_build2 (EQ_EXPR, type,
9689 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9690 build_int_cst (TREE_TYPE (tem), 1)),
9691 build_int_cst (TREE_TYPE (tem), 0));
9694 /* Fold (X ^ Y) & Y as ~X & Y. */
9695 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9696 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9698 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9699 return fold_build2 (BIT_AND_EXPR, type,
9700 fold_build1 (BIT_NOT_EXPR, type, tem),
9701 fold_convert (type, arg1));
9703 /* Fold (X ^ Y) & X as ~Y & X. */
9704 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9705 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9706 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9708 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9709 return fold_build2 (BIT_AND_EXPR, type,
9710 fold_build1 (BIT_NOT_EXPR, type, tem),
9711 fold_convert (type, arg1));
9713 /* Fold X & (X ^ Y) as X & ~Y. */
9714 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9717 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9718 return fold_build2 (BIT_AND_EXPR, type,
9719 fold_convert (type, arg0),
9720 fold_build1 (BIT_NOT_EXPR, type, tem));
9722 /* Fold X & (Y ^ X) as ~Y & X. */
9723 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9724 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9725 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9727 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9728 return fold_build2 (BIT_AND_EXPR, type,
9729 fold_build1 (BIT_NOT_EXPR, type, tem),
9730 fold_convert (type, arg0));
9733 t1 = distribute_bit_expr (code, type, arg0, arg1);
9734 if (t1 != NULL_TREE)
9735 return t1;
9736 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9737 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9738 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9740 unsigned int prec
9741 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9743 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9744 && (~TREE_INT_CST_LOW (arg1)
9745 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9746 return fold_convert (type, TREE_OPERAND (arg0, 0));
9749 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9751 This results in more efficient code for machines without a NOR
9752 instruction. Combine will canonicalize to the first form
9753 which will allow use of NOR instructions provided by the
9754 backend if they exist. */
9755 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9756 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9758 return fold_build1 (BIT_NOT_EXPR, type,
9759 build2 (BIT_IOR_EXPR, type,
9760 TREE_OPERAND (arg0, 0),
9761 TREE_OPERAND (arg1, 0)));
9764 goto associate;
9766 case RDIV_EXPR:
9767 /* Don't touch a floating-point divide by zero unless the mode
9768 of the constant can represent infinity. */
9769 if (TREE_CODE (arg1) == REAL_CST
9770 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9771 && real_zerop (arg1))
9772 return NULL_TREE;
9774 /* Optimize A / A to 1.0 if we don't care about
9775 NaNs or Infinities. Skip the transformation
9776 for non-real operands. */
9777 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9778 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9779 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9780 && operand_equal_p (arg0, arg1, 0))
9782 tree r = build_real (TREE_TYPE (arg0), dconst1);
9784 return omit_two_operands (type, r, arg0, arg1);
9787 /* The complex version of the above A / A optimization. */
9788 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9789 && operand_equal_p (arg0, arg1, 0))
9791 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9792 if (! HONOR_NANS (TYPE_MODE (elem_type))
9793 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9795 tree r = build_real (elem_type, dconst1);
9796 /* omit_two_operands will call fold_convert for us. */
9797 return omit_two_operands (type, r, arg0, arg1);
9801 /* (-A) / (-B) -> A / B */
9802 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9803 return fold_build2 (RDIV_EXPR, type,
9804 TREE_OPERAND (arg0, 0),
9805 negate_expr (arg1));
9806 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9807 return fold_build2 (RDIV_EXPR, type,
9808 negate_expr (arg0),
9809 TREE_OPERAND (arg1, 0));
9811 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9812 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9813 && real_onep (arg1))
9814 return non_lvalue (fold_convert (type, arg0));
9816 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9817 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9818 && real_minus_onep (arg1))
9819 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9821 /* If ARG1 is a constant, we can convert this to a multiply by the
9822 reciprocal. This does not have the same rounding properties,
9823 so only do this if -funsafe-math-optimizations. We can actually
9824 always safely do it if ARG1 is a power of two, but it's hard to
9825 tell if it is or not in a portable manner. */
9826 if (TREE_CODE (arg1) == REAL_CST)
9828 if (flag_unsafe_math_optimizations
9829 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9830 arg1, 0)))
9831 return fold_build2 (MULT_EXPR, type, arg0, tem);
9832 /* Find the reciprocal if optimizing and the result is exact. */
9833 if (optimize)
9835 REAL_VALUE_TYPE r;
9836 r = TREE_REAL_CST (arg1);
9837 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9839 tem = build_real (type, r);
9840 return fold_build2 (MULT_EXPR, type,
9841 fold_convert (type, arg0), tem);
9845 /* Convert A/B/C to A/(B*C). */
9846 if (flag_unsafe_math_optimizations
9847 && TREE_CODE (arg0) == RDIV_EXPR)
9848 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9849 fold_build2 (MULT_EXPR, type,
9850 TREE_OPERAND (arg0, 1), arg1));
9852 /* Convert A/(B/C) to (A/B)*C. */
9853 if (flag_unsafe_math_optimizations
9854 && TREE_CODE (arg1) == RDIV_EXPR)
9855 return fold_build2 (MULT_EXPR, type,
9856 fold_build2 (RDIV_EXPR, type, arg0,
9857 TREE_OPERAND (arg1, 0)),
9858 TREE_OPERAND (arg1, 1));
9860 /* Convert C1/(X*C2) into (C1/C2)/X. */
9861 if (flag_unsafe_math_optimizations
9862 && TREE_CODE (arg1) == MULT_EXPR
9863 && TREE_CODE (arg0) == REAL_CST
9864 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9866 tree tem = const_binop (RDIV_EXPR, arg0,
9867 TREE_OPERAND (arg1, 1), 0);
9868 if (tem)
9869 return fold_build2 (RDIV_EXPR, type, tem,
9870 TREE_OPERAND (arg1, 0));
9873 if (flag_unsafe_math_optimizations)
9875 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9876 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9878 /* Optimize sin(x)/cos(x) as tan(x). */
9879 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9880 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9881 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9882 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9883 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9885 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9887 if (tanfn != NULL_TREE)
9888 return build_function_call_expr (tanfn,
9889 TREE_OPERAND (arg0, 1));
9892 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9893 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9894 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9895 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9896 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9897 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9899 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9901 if (tanfn != NULL_TREE)
9903 tree tmp = TREE_OPERAND (arg0, 1);
9904 tmp = build_function_call_expr (tanfn, tmp);
9905 return fold_build2 (RDIV_EXPR, type,
9906 build_real (type, dconst1), tmp);
9910 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9911 NaNs or Infinities. */
9912 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9913 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9914 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9916 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9917 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9919 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9920 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9921 && operand_equal_p (arg00, arg01, 0))
9923 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9925 if (cosfn != NULL_TREE)
9926 return build_function_call_expr (cosfn,
9927 TREE_OPERAND (arg0, 1));
9931 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9932 NaNs or Infinities. */
9933 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9934 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9935 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9937 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9938 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9940 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9941 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9942 && operand_equal_p (arg00, arg01, 0))
9944 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9946 if (cosfn != NULL_TREE)
9948 tree tmp = TREE_OPERAND (arg0, 1);
9949 tmp = build_function_call_expr (cosfn, tmp);
9950 return fold_build2 (RDIV_EXPR, type,
9951 build_real (type, dconst1),
9952 tmp);
9957 /* Optimize pow(x,c)/x as pow(x,c-1). */
9958 if (fcode0 == BUILT_IN_POW
9959 || fcode0 == BUILT_IN_POWF
9960 || fcode0 == BUILT_IN_POWL)
9962 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9963 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9964 if (TREE_CODE (arg01) == REAL_CST
9965 && ! TREE_CONSTANT_OVERFLOW (arg01)
9966 && operand_equal_p (arg1, arg00, 0))
9968 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9969 REAL_VALUE_TYPE c;
9970 tree arg, arglist;
9972 c = TREE_REAL_CST (arg01);
9973 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9974 arg = build_real (type, c);
9975 arglist = build_tree_list (NULL_TREE, arg);
9976 arglist = tree_cons (NULL_TREE, arg1, arglist);
9977 return build_function_call_expr (powfn, arglist);
9981 /* Optimize x/expN(y) into x*expN(-y). */
9982 if (BUILTIN_EXPONENT_P (fcode1))
9984 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9985 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9986 tree arglist = build_tree_list (NULL_TREE,
9987 fold_convert (type, arg));
9988 arg1 = build_function_call_expr (expfn, arglist);
9989 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9992 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9993 if (fcode1 == BUILT_IN_POW
9994 || fcode1 == BUILT_IN_POWF
9995 || fcode1 == BUILT_IN_POWL)
9997 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9998 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9999 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10000 tree neg11 = fold_convert (type, negate_expr (arg11));
10001 tree arglist = tree_cons(NULL_TREE, arg10,
10002 build_tree_list (NULL_TREE, neg11));
10003 arg1 = build_function_call_expr (powfn, arglist);
10004 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10007 return NULL_TREE;
10009 case TRUNC_DIV_EXPR:
10010 case FLOOR_DIV_EXPR:
10011 /* Simplify A / (B << N) where A and B are positive and B is
10012 a power of 2, to A >> (N + log2(B)). */
10013 strict_overflow_p = false;
10014 if (TREE_CODE (arg1) == LSHIFT_EXPR
10015 && (TYPE_UNSIGNED (type)
10016 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10018 tree sval = TREE_OPERAND (arg1, 0);
10019 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10021 tree sh_cnt = TREE_OPERAND (arg1, 1);
10022 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10024 if (strict_overflow_p)
10025 fold_overflow_warning (("assuming signed overflow does not "
10026 "occur when simplifying A / (B << N)"),
10027 WARN_STRICT_OVERFLOW_MISC);
10029 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10030 sh_cnt, build_int_cst (NULL_TREE, pow2));
10031 return fold_build2 (RSHIFT_EXPR, type,
10032 fold_convert (type, arg0), sh_cnt);
10035 /* Fall thru */
10037 case ROUND_DIV_EXPR:
10038 case CEIL_DIV_EXPR:
10039 case EXACT_DIV_EXPR:
10040 if (integer_onep (arg1))
10041 return non_lvalue (fold_convert (type, arg0));
10042 if (integer_zerop (arg1))
10043 return NULL_TREE;
10044 /* X / -1 is -X. */
10045 if (!TYPE_UNSIGNED (type)
10046 && TREE_CODE (arg1) == INTEGER_CST
10047 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10048 && TREE_INT_CST_HIGH (arg1) == -1)
10049 return fold_convert (type, negate_expr (arg0));
10051 /* Convert -A / -B to A / B when the type is signed and overflow is
10052 undefined. */
10053 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10054 && TREE_CODE (arg0) == NEGATE_EXPR
10055 && negate_expr_p (arg1))
10057 if (INTEGRAL_TYPE_P (type))
10058 fold_overflow_warning (("assuming signed overflow does not occur "
10059 "when distributing negation across "
10060 "division"),
10061 WARN_STRICT_OVERFLOW_MISC);
10062 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10063 negate_expr (arg1));
10065 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10066 && TREE_CODE (arg1) == NEGATE_EXPR
10067 && negate_expr_p (arg0))
10069 if (INTEGRAL_TYPE_P (type))
10070 fold_overflow_warning (("assuming signed overflow does not occur "
10071 "when distributing negation across "
10072 "division"),
10073 WARN_STRICT_OVERFLOW_MISC);
10074 return fold_build2 (code, type, negate_expr (arg0),
10075 TREE_OPERAND (arg1, 0));
10078 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10079 operation, EXACT_DIV_EXPR.
10081 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10082 At one time others generated faster code, it's not clear if they do
10083 after the last round to changes to the DIV code in expmed.c. */
10084 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10085 && multiple_of_p (type, arg0, arg1))
10086 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10088 strict_overflow_p = false;
10089 if (TREE_CODE (arg1) == INTEGER_CST
10090 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10091 &strict_overflow_p)))
10093 if (strict_overflow_p)
10094 fold_overflow_warning (("assuming signed overflow does not occur "
10095 "when simplifying division"),
10096 WARN_STRICT_OVERFLOW_MISC);
10097 return fold_convert (type, tem);
10100 return NULL_TREE;
10102 case CEIL_MOD_EXPR:
10103 case FLOOR_MOD_EXPR:
10104 case ROUND_MOD_EXPR:
10105 case TRUNC_MOD_EXPR:
10106 /* X % 1 is always zero, but be sure to preserve any side
10107 effects in X. */
10108 if (integer_onep (arg1))
10109 return omit_one_operand (type, integer_zero_node, arg0);
10111 /* X % 0, return X % 0 unchanged so that we can get the
10112 proper warnings and errors. */
10113 if (integer_zerop (arg1))
10114 return NULL_TREE;
10116 /* 0 % X is always zero, but be sure to preserve any side
10117 effects in X. Place this after checking for X == 0. */
10118 if (integer_zerop (arg0))
10119 return omit_one_operand (type, integer_zero_node, arg1);
10121 /* X % -1 is zero. */
10122 if (!TYPE_UNSIGNED (type)
10123 && TREE_CODE (arg1) == INTEGER_CST
10124 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10125 && TREE_INT_CST_HIGH (arg1) == -1)
10126 return omit_one_operand (type, integer_zero_node, arg0);
10128 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10129 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10130 strict_overflow_p = false;
10131 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10132 && (TYPE_UNSIGNED (type)
10133 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10135 tree c = arg1;
10136 /* Also optimize A % (C << N) where C is a power of 2,
10137 to A & ((C << N) - 1). */
10138 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10139 c = TREE_OPERAND (arg1, 0);
10141 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10143 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10144 arg1, integer_one_node);
10145 if (strict_overflow_p)
10146 fold_overflow_warning (("assuming signed overflow does not "
10147 "occur when simplifying "
10148 "X % (power of two)"),
10149 WARN_STRICT_OVERFLOW_MISC);
10150 return fold_build2 (BIT_AND_EXPR, type,
10151 fold_convert (type, arg0),
10152 fold_convert (type, mask));
10156 /* X % -C is the same as X % C. */
10157 if (code == TRUNC_MOD_EXPR
10158 && !TYPE_UNSIGNED (type)
10159 && TREE_CODE (arg1) == INTEGER_CST
10160 && !TREE_CONSTANT_OVERFLOW (arg1)
10161 && TREE_INT_CST_HIGH (arg1) < 0
10162 && !TYPE_OVERFLOW_TRAPS (type)
10163 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10164 && !sign_bit_p (arg1, arg1))
10165 return fold_build2 (code, type, fold_convert (type, arg0),
10166 fold_convert (type, negate_expr (arg1)));
10168 /* X % -Y is the same as X % Y. */
10169 if (code == TRUNC_MOD_EXPR
10170 && !TYPE_UNSIGNED (type)
10171 && TREE_CODE (arg1) == NEGATE_EXPR
10172 && !TYPE_OVERFLOW_TRAPS (type))
10173 return fold_build2 (code, type, fold_convert (type, arg0),
10174 fold_convert (type, TREE_OPERAND (arg1, 0)));
10176 if (TREE_CODE (arg1) == INTEGER_CST
10177 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10178 &strict_overflow_p)))
10180 if (strict_overflow_p)
10181 fold_overflow_warning (("assuming signed overflow does not occur "
10182 "when simplifying modulos"),
10183 WARN_STRICT_OVERFLOW_MISC);
10184 return fold_convert (type, tem);
10187 return NULL_TREE;
10189 case LROTATE_EXPR:
10190 case RROTATE_EXPR:
10191 if (integer_all_onesp (arg0))
10192 return omit_one_operand (type, arg0, arg1);
10193 goto shift;
10195 case RSHIFT_EXPR:
10196 /* Optimize -1 >> x for arithmetic right shifts. */
10197 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10198 return omit_one_operand (type, arg0, arg1);
10199 /* ... fall through ... */
10201 case LSHIFT_EXPR:
10202 shift:
10203 if (integer_zerop (arg1))
10204 return non_lvalue (fold_convert (type, arg0));
10205 if (integer_zerop (arg0))
10206 return omit_one_operand (type, arg0, arg1);
10208 /* Since negative shift count is not well-defined,
10209 don't try to compute it in the compiler. */
10210 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10211 return NULL_TREE;
10213 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10214 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10215 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10216 && host_integerp (TREE_OPERAND (arg0, 1), false)
10217 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10219 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10220 + TREE_INT_CST_LOW (arg1));
10222 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10223 being well defined. */
10224 if (low >= TYPE_PRECISION (type))
10226 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10227 low = low % TYPE_PRECISION (type);
10228 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10229 return build_int_cst (type, 0);
10230 else
10231 low = TYPE_PRECISION (type) - 1;
10234 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10235 build_int_cst (type, low));
10238 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10239 into x & ((unsigned)-1 >> c) for unsigned types. */
10240 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10241 || (TYPE_UNSIGNED (type)
10242 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10243 && host_integerp (arg1, false)
10244 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10245 && host_integerp (TREE_OPERAND (arg0, 1), false)
10246 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10248 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10249 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10250 tree lshift;
10251 tree arg00;
10253 if (low0 == low1)
10255 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10257 lshift = build_int_cst (type, -1);
10258 lshift = int_const_binop (code, lshift, arg1, 0);
10260 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10264 /* Rewrite an LROTATE_EXPR by a constant into an
10265 RROTATE_EXPR by a new constant. */
10266 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10268 tree tem = build_int_cst (NULL_TREE,
10269 GET_MODE_BITSIZE (TYPE_MODE (type)));
10270 tem = fold_convert (TREE_TYPE (arg1), tem);
10271 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10272 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10275 /* If we have a rotate of a bit operation with the rotate count and
10276 the second operand of the bit operation both constant,
10277 permute the two operations. */
10278 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10279 && (TREE_CODE (arg0) == BIT_AND_EXPR
10280 || TREE_CODE (arg0) == BIT_IOR_EXPR
10281 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10282 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10283 return fold_build2 (TREE_CODE (arg0), type,
10284 fold_build2 (code, type,
10285 TREE_OPERAND (arg0, 0), arg1),
10286 fold_build2 (code, type,
10287 TREE_OPERAND (arg0, 1), arg1));
10289 /* Two consecutive rotates adding up to the width of the mode can
10290 be ignored. */
10291 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10292 && TREE_CODE (arg0) == RROTATE_EXPR
10293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10294 && TREE_INT_CST_HIGH (arg1) == 0
10295 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10296 && ((TREE_INT_CST_LOW (arg1)
10297 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10298 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10299 return TREE_OPERAND (arg0, 0);
10301 return NULL_TREE;
10303 case MIN_EXPR:
10304 if (operand_equal_p (arg0, arg1, 0))
10305 return omit_one_operand (type, arg0, arg1);
10306 if (INTEGRAL_TYPE_P (type)
10307 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10308 return omit_one_operand (type, arg1, arg0);
10309 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10310 if (tem)
10311 return tem;
10312 goto associate;
10314 case MAX_EXPR:
10315 if (operand_equal_p (arg0, arg1, 0))
10316 return omit_one_operand (type, arg0, arg1);
10317 if (INTEGRAL_TYPE_P (type)
10318 && TYPE_MAX_VALUE (type)
10319 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10320 return omit_one_operand (type, arg1, arg0);
10321 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10322 if (tem)
10323 return tem;
10324 goto associate;
10326 case TRUTH_ANDIF_EXPR:
10327 /* Note that the operands of this must be ints
10328 and their values must be 0 or 1.
10329 ("true" is a fixed value perhaps depending on the language.) */
10330 /* If first arg is constant zero, return it. */
10331 if (integer_zerop (arg0))
10332 return fold_convert (type, arg0);
10333 case TRUTH_AND_EXPR:
10334 /* If either arg is constant true, drop it. */
10335 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10336 return non_lvalue (fold_convert (type, arg1));
10337 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10338 /* Preserve sequence points. */
10339 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10340 return non_lvalue (fold_convert (type, arg0));
10341 /* If second arg is constant zero, result is zero, but first arg
10342 must be evaluated. */
10343 if (integer_zerop (arg1))
10344 return omit_one_operand (type, arg1, arg0);
10345 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10346 case will be handled here. */
10347 if (integer_zerop (arg0))
10348 return omit_one_operand (type, arg0, arg1);
10350 /* !X && X is always false. */
10351 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10352 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10353 return omit_one_operand (type, integer_zero_node, arg1);
10354 /* X && !X is always false. */
10355 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10356 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10357 return omit_one_operand (type, integer_zero_node, arg0);
10359 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10360 means A >= Y && A != MAX, but in this case we know that
10361 A < X <= MAX. */
10363 if (!TREE_SIDE_EFFECTS (arg0)
10364 && !TREE_SIDE_EFFECTS (arg1))
10366 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10367 if (tem && !operand_equal_p (tem, arg0, 0))
10368 return fold_build2 (code, type, tem, arg1);
10370 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10371 if (tem && !operand_equal_p (tem, arg1, 0))
10372 return fold_build2 (code, type, arg0, tem);
10375 truth_andor:
10376 /* We only do these simplifications if we are optimizing. */
10377 if (!optimize)
10378 return NULL_TREE;
10380 /* Check for things like (A || B) && (A || C). We can convert this
10381 to A || (B && C). Note that either operator can be any of the four
10382 truth and/or operations and the transformation will still be
10383 valid. Also note that we only care about order for the
10384 ANDIF and ORIF operators. If B contains side effects, this
10385 might change the truth-value of A. */
10386 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10387 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10388 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10389 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10390 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10391 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10393 tree a00 = TREE_OPERAND (arg0, 0);
10394 tree a01 = TREE_OPERAND (arg0, 1);
10395 tree a10 = TREE_OPERAND (arg1, 0);
10396 tree a11 = TREE_OPERAND (arg1, 1);
10397 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10398 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10399 && (code == TRUTH_AND_EXPR
10400 || code == TRUTH_OR_EXPR));
10402 if (operand_equal_p (a00, a10, 0))
10403 return fold_build2 (TREE_CODE (arg0), type, a00,
10404 fold_build2 (code, type, a01, a11));
10405 else if (commutative && operand_equal_p (a00, a11, 0))
10406 return fold_build2 (TREE_CODE (arg0), type, a00,
10407 fold_build2 (code, type, a01, a10));
10408 else if (commutative && operand_equal_p (a01, a10, 0))
10409 return fold_build2 (TREE_CODE (arg0), type, a01,
10410 fold_build2 (code, type, a00, a11));
10412 /* This case if tricky because we must either have commutative
10413 operators or else A10 must not have side-effects. */
10415 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10416 && operand_equal_p (a01, a11, 0))
10417 return fold_build2 (TREE_CODE (arg0), type,
10418 fold_build2 (code, type, a00, a10),
10419 a01);
10422 /* See if we can build a range comparison. */
10423 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10424 return tem;
10426 /* Check for the possibility of merging component references. If our
10427 lhs is another similar operation, try to merge its rhs with our
10428 rhs. Then try to merge our lhs and rhs. */
10429 if (TREE_CODE (arg0) == code
10430 && 0 != (tem = fold_truthop (code, type,
10431 TREE_OPERAND (arg0, 1), arg1)))
10432 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10434 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10435 return tem;
10437 return NULL_TREE;
10439 case TRUTH_ORIF_EXPR:
10440 /* Note that the operands of this must be ints
10441 and their values must be 0 or true.
10442 ("true" is a fixed value perhaps depending on the language.) */
10443 /* If first arg is constant true, return it. */
10444 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10445 return fold_convert (type, arg0);
10446 case TRUTH_OR_EXPR:
10447 /* If either arg is constant zero, drop it. */
10448 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10449 return non_lvalue (fold_convert (type, arg1));
10450 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10451 /* Preserve sequence points. */
10452 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10453 return non_lvalue (fold_convert (type, arg0));
10454 /* If second arg is constant true, result is true, but we must
10455 evaluate first arg. */
10456 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10457 return omit_one_operand (type, arg1, arg0);
10458 /* Likewise for first arg, but note this only occurs here for
10459 TRUTH_OR_EXPR. */
10460 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10461 return omit_one_operand (type, arg0, arg1);
10463 /* !X || X is always true. */
10464 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10465 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10466 return omit_one_operand (type, integer_one_node, arg1);
10467 /* X || !X is always true. */
10468 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10469 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10470 return omit_one_operand (type, integer_one_node, arg0);
10472 goto truth_andor;
10474 case TRUTH_XOR_EXPR:
10475 /* If the second arg is constant zero, drop it. */
10476 if (integer_zerop (arg1))
10477 return non_lvalue (fold_convert (type, arg0));
10478 /* If the second arg is constant true, this is a logical inversion. */
10479 if (integer_onep (arg1))
10481 /* Only call invert_truthvalue if operand is a truth value. */
10482 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10483 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10484 else
10485 tem = invert_truthvalue (arg0);
10486 return non_lvalue (fold_convert (type, tem));
10488 /* Identical arguments cancel to zero. */
10489 if (operand_equal_p (arg0, arg1, 0))
10490 return omit_one_operand (type, integer_zero_node, arg0);
10492 /* !X ^ X is always true. */
10493 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10494 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10495 return omit_one_operand (type, integer_one_node, arg1);
10497 /* X ^ !X is always true. */
10498 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10499 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10500 return omit_one_operand (type, integer_one_node, arg0);
10502 return NULL_TREE;
10504 case EQ_EXPR:
10505 case NE_EXPR:
10506 tem = fold_comparison (code, type, op0, op1);
10507 if (tem != NULL_TREE)
10508 return tem;
10510 /* bool_var != 0 becomes bool_var. */
10511 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10512 && code == NE_EXPR)
10513 return non_lvalue (fold_convert (type, arg0));
10515 /* bool_var == 1 becomes bool_var. */
10516 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10517 && code == EQ_EXPR)
10518 return non_lvalue (fold_convert (type, arg0));
10520 /* bool_var != 1 becomes !bool_var. */
10521 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10522 && code == NE_EXPR)
10523 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10525 /* bool_var == 0 becomes !bool_var. */
10526 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10527 && code == EQ_EXPR)
10528 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10530 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10531 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10532 && TREE_CODE (arg1) == INTEGER_CST)
10534 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10535 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10536 fold_build1 (BIT_NOT_EXPR, cmp_type,
10537 fold_convert (cmp_type, arg1)));
10540 /* If this is an equality comparison of the address of a non-weak
10541 object against zero, then we know the result. */
10542 if (TREE_CODE (arg0) == ADDR_EXPR
10543 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10544 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10545 && integer_zerop (arg1))
10546 return constant_boolean_node (code != EQ_EXPR, type);
10548 /* If this is an equality comparison of the address of two non-weak,
10549 unaliased symbols neither of which are extern (since we do not
10550 have access to attributes for externs), then we know the result. */
10551 if (TREE_CODE (arg0) == ADDR_EXPR
10552 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10553 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10554 && ! lookup_attribute ("alias",
10555 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10556 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10557 && TREE_CODE (arg1) == ADDR_EXPR
10558 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10559 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10560 && ! lookup_attribute ("alias",
10561 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10562 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10564 /* We know that we're looking at the address of two
10565 non-weak, unaliased, static _DECL nodes.
10567 It is both wasteful and incorrect to call operand_equal_p
10568 to compare the two ADDR_EXPR nodes. It is wasteful in that
10569 all we need to do is test pointer equality for the arguments
10570 to the two ADDR_EXPR nodes. It is incorrect to use
10571 operand_equal_p as that function is NOT equivalent to a
10572 C equality test. It can in fact return false for two
10573 objects which would test as equal using the C equality
10574 operator. */
10575 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10576 return constant_boolean_node (equal
10577 ? code == EQ_EXPR : code != EQ_EXPR,
10578 type);
10581 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10582 a MINUS_EXPR of a constant, we can convert it into a comparison with
10583 a revised constant as long as no overflow occurs. */
10584 if (TREE_CODE (arg1) == INTEGER_CST
10585 && (TREE_CODE (arg0) == PLUS_EXPR
10586 || TREE_CODE (arg0) == MINUS_EXPR)
10587 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10588 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10589 ? MINUS_EXPR : PLUS_EXPR,
10590 fold_convert (TREE_TYPE (arg0), arg1),
10591 TREE_OPERAND (arg0, 1), 0))
10592 && ! TREE_CONSTANT_OVERFLOW (tem))
10593 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10595 /* Similarly for a NEGATE_EXPR. */
10596 if (TREE_CODE (arg0) == NEGATE_EXPR
10597 && TREE_CODE (arg1) == INTEGER_CST
10598 && 0 != (tem = negate_expr (arg1))
10599 && TREE_CODE (tem) == INTEGER_CST
10600 && ! TREE_CONSTANT_OVERFLOW (tem))
10601 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10603 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10604 for !=. Don't do this for ordered comparisons due to overflow. */
10605 if (TREE_CODE (arg0) == MINUS_EXPR
10606 && integer_zerop (arg1))
10607 return fold_build2 (code, type,
10608 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10610 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10611 if (TREE_CODE (arg0) == ABS_EXPR
10612 && (integer_zerop (arg1) || real_zerop (arg1)))
10613 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10615 /* If this is an EQ or NE comparison with zero and ARG0 is
10616 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10617 two operations, but the latter can be done in one less insn
10618 on machines that have only two-operand insns or on which a
10619 constant cannot be the first operand. */
10620 if (TREE_CODE (arg0) == BIT_AND_EXPR
10621 && integer_zerop (arg1))
10623 tree arg00 = TREE_OPERAND (arg0, 0);
10624 tree arg01 = TREE_OPERAND (arg0, 1);
10625 if (TREE_CODE (arg00) == LSHIFT_EXPR
10626 && integer_onep (TREE_OPERAND (arg00, 0)))
10627 return
10628 fold_build2 (code, type,
10629 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10630 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10631 arg01, TREE_OPERAND (arg00, 1)),
10632 fold_convert (TREE_TYPE (arg0),
10633 integer_one_node)),
10634 arg1);
10635 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10636 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10637 return
10638 fold_build2 (code, type,
10639 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10640 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10641 arg00, TREE_OPERAND (arg01, 1)),
10642 fold_convert (TREE_TYPE (arg0),
10643 integer_one_node)),
10644 arg1);
10647 /* If this is an NE or EQ comparison of zero against the result of a
10648 signed MOD operation whose second operand is a power of 2, make
10649 the MOD operation unsigned since it is simpler and equivalent. */
10650 if (integer_zerop (arg1)
10651 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10652 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10653 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10654 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10655 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10656 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10658 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10659 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10660 fold_convert (newtype,
10661 TREE_OPERAND (arg0, 0)),
10662 fold_convert (newtype,
10663 TREE_OPERAND (arg0, 1)));
10665 return fold_build2 (code, type, newmod,
10666 fold_convert (newtype, arg1));
10669 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10670 C1 is a valid shift constant, and C2 is a power of two, i.e.
10671 a single bit. */
10672 if (TREE_CODE (arg0) == BIT_AND_EXPR
10673 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10674 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10675 == INTEGER_CST
10676 && integer_pow2p (TREE_OPERAND (arg0, 1))
10677 && integer_zerop (arg1))
10679 tree itype = TREE_TYPE (arg0);
10680 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10681 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10683 /* Check for a valid shift count. */
10684 if (TREE_INT_CST_HIGH (arg001) == 0
10685 && TREE_INT_CST_LOW (arg001) < prec)
10687 tree arg01 = TREE_OPERAND (arg0, 1);
10688 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10689 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10690 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10691 can be rewritten as (X & (C2 << C1)) != 0. */
10692 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10694 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10695 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10696 return fold_build2 (code, type, tem, arg1);
10698 /* Otherwise, for signed (arithmetic) shifts,
10699 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10700 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10701 else if (!TYPE_UNSIGNED (itype))
10702 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10703 arg000, build_int_cst (itype, 0));
10704 /* Otherwise, of unsigned (logical) shifts,
10705 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10706 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10707 else
10708 return omit_one_operand (type,
10709 code == EQ_EXPR ? integer_one_node
10710 : integer_zero_node,
10711 arg000);
10715 /* If this is an NE comparison of zero with an AND of one, remove the
10716 comparison since the AND will give the correct value. */
10717 if (code == NE_EXPR
10718 && integer_zerop (arg1)
10719 && TREE_CODE (arg0) == BIT_AND_EXPR
10720 && integer_onep (TREE_OPERAND (arg0, 1)))
10721 return fold_convert (type, arg0);
10723 /* If we have (A & C) == C where C is a power of 2, convert this into
10724 (A & C) != 0. Similarly for NE_EXPR. */
10725 if (TREE_CODE (arg0) == BIT_AND_EXPR
10726 && integer_pow2p (TREE_OPERAND (arg0, 1))
10727 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10728 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10729 arg0, fold_convert (TREE_TYPE (arg0),
10730 integer_zero_node));
10732 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10733 bit, then fold the expression into A < 0 or A >= 0. */
10734 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10735 if (tem)
10736 return tem;
10738 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10739 Similarly for NE_EXPR. */
10740 if (TREE_CODE (arg0) == BIT_AND_EXPR
10741 && TREE_CODE (arg1) == INTEGER_CST
10742 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10744 tree notc = fold_build1 (BIT_NOT_EXPR,
10745 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10746 TREE_OPERAND (arg0, 1));
10747 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10748 arg1, notc);
10749 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10750 if (integer_nonzerop (dandnotc))
10751 return omit_one_operand (type, rslt, arg0);
10754 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10755 Similarly for NE_EXPR. */
10756 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10757 && TREE_CODE (arg1) == INTEGER_CST
10758 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10760 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10761 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10762 TREE_OPERAND (arg0, 1), notd);
10763 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10764 if (integer_nonzerop (candnotd))
10765 return omit_one_operand (type, rslt, arg0);
10768 /* If this is a comparison of a field, we may be able to simplify it. */
10769 if (((TREE_CODE (arg0) == COMPONENT_REF
10770 && lang_hooks.can_use_bit_fields_p ())
10771 || TREE_CODE (arg0) == BIT_FIELD_REF)
10772 /* Handle the constant case even without -O
10773 to make sure the warnings are given. */
10774 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10776 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10777 if (t1)
10778 return t1;
10781 /* Optimize comparisons of strlen vs zero to a compare of the
10782 first character of the string vs zero. To wit,
10783 strlen(ptr) == 0 => *ptr == 0
10784 strlen(ptr) != 0 => *ptr != 0
10785 Other cases should reduce to one of these two (or a constant)
10786 due to the return value of strlen being unsigned. */
10787 if (TREE_CODE (arg0) == CALL_EXPR
10788 && integer_zerop (arg1))
10790 tree fndecl = get_callee_fndecl (arg0);
10791 tree arglist;
10793 if (fndecl
10794 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10795 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10796 && (arglist = TREE_OPERAND (arg0, 1))
10797 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10798 && ! TREE_CHAIN (arglist))
10800 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10801 return fold_build2 (code, type, iref,
10802 build_int_cst (TREE_TYPE (iref), 0));
10806 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10807 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10808 if (TREE_CODE (arg0) == RSHIFT_EXPR
10809 && integer_zerop (arg1)
10810 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10812 tree arg00 = TREE_OPERAND (arg0, 0);
10813 tree arg01 = TREE_OPERAND (arg0, 1);
10814 tree itype = TREE_TYPE (arg00);
10815 if (TREE_INT_CST_HIGH (arg01) == 0
10816 && TREE_INT_CST_LOW (arg01)
10817 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10819 if (TYPE_UNSIGNED (itype))
10821 itype = lang_hooks.types.signed_type (itype);
10822 arg00 = fold_convert (itype, arg00);
10824 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10825 type, arg00, build_int_cst (itype, 0));
10829 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10830 if (integer_zerop (arg1)
10831 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10832 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10833 TREE_OPERAND (arg0, 1));
10835 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10836 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10837 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10838 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10839 build_int_cst (TREE_TYPE (arg1), 0));
10840 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10841 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10842 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10843 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10844 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10845 build_int_cst (TREE_TYPE (arg1), 0));
10847 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10848 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10849 && TREE_CODE (arg1) == INTEGER_CST
10850 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10851 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10852 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10853 TREE_OPERAND (arg0, 1), arg1));
10855 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10856 (X & C) == 0 when C is a single bit. */
10857 if (TREE_CODE (arg0) == BIT_AND_EXPR
10858 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10859 && integer_zerop (arg1)
10860 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10862 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10863 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10864 TREE_OPERAND (arg0, 1));
10865 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10866 type, tem, arg1);
10869 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10870 constant C is a power of two, i.e. a single bit. */
10871 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10872 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10873 && integer_zerop (arg1)
10874 && integer_pow2p (TREE_OPERAND (arg0, 1))
10875 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10876 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10878 tree arg00 = TREE_OPERAND (arg0, 0);
10879 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10880 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10883 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10884 when is C is a power of two, i.e. a single bit. */
10885 if (TREE_CODE (arg0) == BIT_AND_EXPR
10886 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10887 && integer_zerop (arg1)
10888 && integer_pow2p (TREE_OPERAND (arg0, 1))
10889 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10890 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10892 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10893 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10894 arg000, TREE_OPERAND (arg0, 1));
10895 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10896 tem, build_int_cst (TREE_TYPE (tem), 0));
10899 if (integer_zerop (arg1)
10900 && tree_expr_nonzero_p (arg0))
10902 tree res = constant_boolean_node (code==NE_EXPR, type);
10903 return omit_one_operand (type, res, arg0);
10905 return NULL_TREE;
10907 case LT_EXPR:
10908 case GT_EXPR:
10909 case LE_EXPR:
10910 case GE_EXPR:
10911 tem = fold_comparison (code, type, op0, op1);
10912 if (tem != NULL_TREE)
10913 return tem;
10915 /* Transform comparisons of the form X +- C CMP X. */
10916 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10917 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10918 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10919 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10920 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10921 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10923 tree arg01 = TREE_OPERAND (arg0, 1);
10924 enum tree_code code0 = TREE_CODE (arg0);
10925 int is_positive;
10927 if (TREE_CODE (arg01) == REAL_CST)
10928 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10929 else
10930 is_positive = tree_int_cst_sgn (arg01);
10932 /* (X - c) > X becomes false. */
10933 if (code == GT_EXPR
10934 && ((code0 == MINUS_EXPR && is_positive >= 0)
10935 || (code0 == PLUS_EXPR && is_positive <= 0)))
10937 if (TREE_CODE (arg01) == INTEGER_CST
10938 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10939 fold_overflow_warning (("assuming signed overflow does not "
10940 "occur when assuming that (X - c) > X "
10941 "is always false"),
10942 WARN_STRICT_OVERFLOW_ALL);
10943 return constant_boolean_node (0, type);
10946 /* Likewise (X + c) < X becomes false. */
10947 if (code == LT_EXPR
10948 && ((code0 == PLUS_EXPR && is_positive >= 0)
10949 || (code0 == MINUS_EXPR && is_positive <= 0)))
10951 if (TREE_CODE (arg01) == INTEGER_CST
10952 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10953 fold_overflow_warning (("assuming signed overflow does not "
10954 "occur when assuming that "
10955 "(X + c) < X is always false"),
10956 WARN_STRICT_OVERFLOW_ALL);
10957 return constant_boolean_node (0, type);
10960 /* Convert (X - c) <= X to true. */
10961 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10962 && code == LE_EXPR
10963 && ((code0 == MINUS_EXPR && is_positive >= 0)
10964 || (code0 == PLUS_EXPR && is_positive <= 0)))
10966 if (TREE_CODE (arg01) == INTEGER_CST
10967 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10968 fold_overflow_warning (("assuming signed overflow does not "
10969 "occur when assuming that "
10970 "(X - c) <= X is always true"),
10971 WARN_STRICT_OVERFLOW_ALL);
10972 return constant_boolean_node (1, type);
10975 /* Convert (X + c) >= X to true. */
10976 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10977 && code == GE_EXPR
10978 && ((code0 == PLUS_EXPR && is_positive >= 0)
10979 || (code0 == MINUS_EXPR && is_positive <= 0)))
10981 if (TREE_CODE (arg01) == INTEGER_CST
10982 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10983 fold_overflow_warning (("assuming signed overflow does not "
10984 "occur when assuming that "
10985 "(X + c) >= X is always true"),
10986 WARN_STRICT_OVERFLOW_ALL);
10987 return constant_boolean_node (1, type);
10990 if (TREE_CODE (arg01) == INTEGER_CST)
10992 /* Convert X + c > X and X - c < X to true for integers. */
10993 if (code == GT_EXPR
10994 && ((code0 == PLUS_EXPR && is_positive > 0)
10995 || (code0 == MINUS_EXPR && is_positive < 0)))
10997 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10998 fold_overflow_warning (("assuming signed overflow does "
10999 "not occur when assuming that "
11000 "(X + c) > X is always true"),
11001 WARN_STRICT_OVERFLOW_ALL);
11002 return constant_boolean_node (1, type);
11005 if (code == LT_EXPR
11006 && ((code0 == MINUS_EXPR && is_positive > 0)
11007 || (code0 == PLUS_EXPR && is_positive < 0)))
11009 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11010 fold_overflow_warning (("assuming signed overflow does "
11011 "not occur when assuming that "
11012 "(X - c) < X is always true"),
11013 WARN_STRICT_OVERFLOW_ALL);
11014 return constant_boolean_node (1, type);
11017 /* Convert X + c <= X and X - c >= X to false for integers. */
11018 if (code == LE_EXPR
11019 && ((code0 == PLUS_EXPR && is_positive > 0)
11020 || (code0 == MINUS_EXPR && is_positive < 0)))
11022 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11023 fold_overflow_warning (("assuming signed overflow does "
11024 "not occur when assuming that "
11025 "(X + c) <= X is always false"),
11026 WARN_STRICT_OVERFLOW_ALL);
11027 return constant_boolean_node (0, type);
11030 if (code == GE_EXPR
11031 && ((code0 == MINUS_EXPR && is_positive > 0)
11032 || (code0 == PLUS_EXPR && is_positive < 0)))
11034 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11035 fold_overflow_warning (("assuming signed overflow does "
11036 "not occur when assuming that "
11037 "(X - c) >= X is always true"),
11038 WARN_STRICT_OVERFLOW_ALL);
11039 return constant_boolean_node (0, type);
11044 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11045 This transformation affects the cases which are handled in later
11046 optimizations involving comparisons with non-negative constants. */
11047 if (TREE_CODE (arg1) == INTEGER_CST
11048 && TREE_CODE (arg0) != INTEGER_CST
11049 && tree_int_cst_sgn (arg1) > 0)
11051 if (code == GE_EXPR)
11053 arg1 = const_binop (MINUS_EXPR, arg1,
11054 build_int_cst (TREE_TYPE (arg1), 1), 0);
11055 return fold_build2 (GT_EXPR, type, arg0,
11056 fold_convert (TREE_TYPE (arg0), arg1));
11058 if (code == LT_EXPR)
11060 arg1 = const_binop (MINUS_EXPR, arg1,
11061 build_int_cst (TREE_TYPE (arg1), 1), 0);
11062 return fold_build2 (LE_EXPR, type, arg0,
11063 fold_convert (TREE_TYPE (arg0), arg1));
11067 /* Comparisons with the highest or lowest possible integer of
11068 the specified size will have known values. */
11070 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11072 if (TREE_CODE (arg1) == INTEGER_CST
11073 && ! TREE_CONSTANT_OVERFLOW (arg1)
11074 && width <= 2 * HOST_BITS_PER_WIDE_INT
11075 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11076 || POINTER_TYPE_P (TREE_TYPE (arg1))))
11078 HOST_WIDE_INT signed_max_hi;
11079 unsigned HOST_WIDE_INT signed_max_lo;
11080 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11082 if (width <= HOST_BITS_PER_WIDE_INT)
11084 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11085 - 1;
11086 signed_max_hi = 0;
11087 max_hi = 0;
11089 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11091 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11092 min_lo = 0;
11093 min_hi = 0;
11095 else
11097 max_lo = signed_max_lo;
11098 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11099 min_hi = -1;
11102 else
11104 width -= HOST_BITS_PER_WIDE_INT;
11105 signed_max_lo = -1;
11106 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11107 - 1;
11108 max_lo = -1;
11109 min_lo = 0;
11111 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11113 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11114 min_hi = 0;
11116 else
11118 max_hi = signed_max_hi;
11119 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11123 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11124 && TREE_INT_CST_LOW (arg1) == max_lo)
11125 switch (code)
11127 case GT_EXPR:
11128 return omit_one_operand (type, integer_zero_node, arg0);
11130 case GE_EXPR:
11131 return fold_build2 (EQ_EXPR, type, op0, op1);
11133 case LE_EXPR:
11134 return omit_one_operand (type, integer_one_node, arg0);
11136 case LT_EXPR:
11137 return fold_build2 (NE_EXPR, type, op0, op1);
11139 /* The GE_EXPR and LT_EXPR cases above are not normally
11140 reached because of previous transformations. */
11142 default:
11143 break;
11145 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11146 == max_hi
11147 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11148 switch (code)
11150 case GT_EXPR:
11151 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11152 return fold_build2 (EQ_EXPR, type,
11153 fold_convert (TREE_TYPE (arg1), arg0),
11154 arg1);
11155 case LE_EXPR:
11156 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11157 return fold_build2 (NE_EXPR, type,
11158 fold_convert (TREE_TYPE (arg1), arg0),
11159 arg1);
11160 default:
11161 break;
11163 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11164 == min_hi
11165 && TREE_INT_CST_LOW (arg1) == min_lo)
11166 switch (code)
11168 case LT_EXPR:
11169 return omit_one_operand (type, integer_zero_node, arg0);
11171 case LE_EXPR:
11172 return fold_build2 (EQ_EXPR, type, op0, op1);
11174 case GE_EXPR:
11175 return omit_one_operand (type, integer_one_node, arg0);
11177 case GT_EXPR:
11178 return fold_build2 (NE_EXPR, type, op0, op1);
11180 default:
11181 break;
11183 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11184 == min_hi
11185 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11186 switch (code)
11188 case GE_EXPR:
11189 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11190 return fold_build2 (NE_EXPR, type,
11191 fold_convert (TREE_TYPE (arg1), arg0),
11192 arg1);
11193 case LT_EXPR:
11194 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11195 return fold_build2 (EQ_EXPR, type,
11196 fold_convert (TREE_TYPE (arg1), arg0),
11197 arg1);
11198 default:
11199 break;
11202 else if (!in_gimple_form
11203 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11204 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11205 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11206 /* signed_type does not work on pointer types. */
11207 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11209 /* The following case also applies to X < signed_max+1
11210 and X >= signed_max+1 because previous transformations. */
11211 if (code == LE_EXPR || code == GT_EXPR)
11213 tree st;
11214 st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11215 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11216 type, fold_convert (st, arg0),
11217 build_int_cst (st, 0));
11223 /* If we are comparing an ABS_EXPR with a constant, we can
11224 convert all the cases into explicit comparisons, but they may
11225 well not be faster than doing the ABS and one comparison.
11226 But ABS (X) <= C is a range comparison, which becomes a subtraction
11227 and a comparison, and is probably faster. */
11228 if (code == LE_EXPR
11229 && TREE_CODE (arg1) == INTEGER_CST
11230 && TREE_CODE (arg0) == ABS_EXPR
11231 && ! TREE_SIDE_EFFECTS (arg0)
11232 && (0 != (tem = negate_expr (arg1)))
11233 && TREE_CODE (tem) == INTEGER_CST
11234 && ! TREE_CONSTANT_OVERFLOW (tem))
11235 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11236 build2 (GE_EXPR, type,
11237 TREE_OPERAND (arg0, 0), tem),
11238 build2 (LE_EXPR, type,
11239 TREE_OPERAND (arg0, 0), arg1));
11241 /* Convert ABS_EXPR<x> >= 0 to true. */
11242 strict_overflow_p = false;
11243 if (code == GE_EXPR
11244 && (integer_zerop (arg1)
11245 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11246 && real_zerop (arg1)))
11247 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11249 if (strict_overflow_p)
11250 fold_overflow_warning (("assuming signed overflow does not occur "
11251 "when simplifying comparison of "
11252 "absolute value and zero"),
11253 WARN_STRICT_OVERFLOW_CONDITIONAL);
11254 return omit_one_operand (type, integer_one_node, arg0);
11257 /* Convert ABS_EXPR<x> < 0 to false. */
11258 strict_overflow_p = false;
11259 if (code == LT_EXPR
11260 && (integer_zerop (arg1) || real_zerop (arg1))
11261 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11263 if (strict_overflow_p)
11264 fold_overflow_warning (("assuming signed overflow does not occur "
11265 "when simplifying comparison of "
11266 "absolute value and zero"),
11267 WARN_STRICT_OVERFLOW_CONDITIONAL);
11268 return omit_one_operand (type, integer_zero_node, arg0);
11271 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11272 and similarly for >= into !=. */
11273 if ((code == LT_EXPR || code == GE_EXPR)
11274 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11275 && TREE_CODE (arg1) == LSHIFT_EXPR
11276 && integer_onep (TREE_OPERAND (arg1, 0)))
11277 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11278 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11279 TREE_OPERAND (arg1, 1)),
11280 build_int_cst (TREE_TYPE (arg0), 0));
11282 if ((code == LT_EXPR || code == GE_EXPR)
11283 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11284 && (TREE_CODE (arg1) == NOP_EXPR
11285 || TREE_CODE (arg1) == CONVERT_EXPR)
11286 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11287 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11288 return
11289 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11290 fold_convert (TREE_TYPE (arg0),
11291 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11292 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11293 1))),
11294 build_int_cst (TREE_TYPE (arg0), 0));
11296 return NULL_TREE;
11298 case UNORDERED_EXPR:
11299 case ORDERED_EXPR:
11300 case UNLT_EXPR:
11301 case UNLE_EXPR:
11302 case UNGT_EXPR:
11303 case UNGE_EXPR:
11304 case UNEQ_EXPR:
11305 case LTGT_EXPR:
11306 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11308 t1 = fold_relational_const (code, type, arg0, arg1);
11309 if (t1 != NULL_TREE)
11310 return t1;
11313 /* If the first operand is NaN, the result is constant. */
11314 if (TREE_CODE (arg0) == REAL_CST
11315 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11316 && (code != LTGT_EXPR || ! flag_trapping_math))
11318 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11319 ? integer_zero_node
11320 : integer_one_node;
11321 return omit_one_operand (type, t1, arg1);
11324 /* If the second operand is NaN, the result is constant. */
11325 if (TREE_CODE (arg1) == REAL_CST
11326 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11327 && (code != LTGT_EXPR || ! flag_trapping_math))
11329 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11330 ? integer_zero_node
11331 : integer_one_node;
11332 return omit_one_operand (type, t1, arg0);
11335 /* Simplify unordered comparison of something with itself. */
11336 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11337 && operand_equal_p (arg0, arg1, 0))
11338 return constant_boolean_node (1, type);
11340 if (code == LTGT_EXPR
11341 && !flag_trapping_math
11342 && operand_equal_p (arg0, arg1, 0))
11343 return constant_boolean_node (0, type);
11345 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11347 tree targ0 = strip_float_extensions (arg0);
11348 tree targ1 = strip_float_extensions (arg1);
11349 tree newtype = TREE_TYPE (targ0);
11351 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11352 newtype = TREE_TYPE (targ1);
11354 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11355 return fold_build2 (code, type, fold_convert (newtype, targ0),
11356 fold_convert (newtype, targ1));
11359 return NULL_TREE;
11361 case COMPOUND_EXPR:
11362 /* When pedantic, a compound expression can be neither an lvalue
11363 nor an integer constant expression. */
11364 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11365 return NULL_TREE;
11366 /* Don't let (0, 0) be null pointer constant. */
11367 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11368 : fold_convert (type, arg1);
11369 return pedantic_non_lvalue (tem);
11371 case COMPLEX_EXPR:
11372 if ((TREE_CODE (arg0) == REAL_CST
11373 && TREE_CODE (arg1) == REAL_CST)
11374 || (TREE_CODE (arg0) == INTEGER_CST
11375 && TREE_CODE (arg1) == INTEGER_CST))
11376 return build_complex (type, arg0, arg1);
11377 return NULL_TREE;
11379 case ASSERT_EXPR:
11380 /* An ASSERT_EXPR should never be passed to fold_binary. */
11381 gcc_unreachable ();
11383 default:
11384 return NULL_TREE;
11385 } /* switch (code) */
11388 /* Callback for walk_tree, looking for LABEL_EXPR.
11389 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11390 Do not check the sub-tree of GOTO_EXPR. */
11392 static tree
11393 contains_label_1 (tree *tp,
11394 int *walk_subtrees,
11395 void *data ATTRIBUTE_UNUSED)
11397 switch (TREE_CODE (*tp))
11399 case LABEL_EXPR:
11400 return *tp;
11401 case GOTO_EXPR:
11402 *walk_subtrees = 0;
11403 /* no break */
11404 default:
11405 return NULL_TREE;
11409 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11410 accessible from outside the sub-tree. Returns NULL_TREE if no
11411 addressable label is found. */
11413 static bool
11414 contains_label_p (tree st)
11416 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11419 /* Fold a ternary expression of code CODE and type TYPE with operands
11420 OP0, OP1, and OP2. Return the folded expression if folding is
11421 successful. Otherwise, return NULL_TREE. */
11423 tree
11424 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11426 tree tem;
11427 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11428 enum tree_code_class kind = TREE_CODE_CLASS (code);
11430 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11431 && TREE_CODE_LENGTH (code) == 3);
11433 /* Strip any conversions that don't change the mode. This is safe
11434 for every expression, except for a comparison expression because
11435 its signedness is derived from its operands. So, in the latter
11436 case, only strip conversions that don't change the signedness.
11438 Note that this is done as an internal manipulation within the
11439 constant folder, in order to find the simplest representation of
11440 the arguments so that their form can be studied. In any cases,
11441 the appropriate type conversions should be put back in the tree
11442 that will get out of the constant folder. */
11443 if (op0)
11445 arg0 = op0;
11446 STRIP_NOPS (arg0);
11449 if (op1)
11451 arg1 = op1;
11452 STRIP_NOPS (arg1);
11455 switch (code)
11457 case COMPONENT_REF:
11458 if (TREE_CODE (arg0) == CONSTRUCTOR
11459 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11461 unsigned HOST_WIDE_INT idx;
11462 tree field, value;
11463 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11464 if (field == arg1)
11465 return value;
11467 return NULL_TREE;
11469 case COND_EXPR:
11470 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11471 so all simple results must be passed through pedantic_non_lvalue. */
11472 if (TREE_CODE (arg0) == INTEGER_CST)
11474 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11475 tem = integer_zerop (arg0) ? op2 : op1;
11476 /* Only optimize constant conditions when the selected branch
11477 has the same type as the COND_EXPR. This avoids optimizing
11478 away "c ? x : throw", where the throw has a void type.
11479 Avoid throwing away that operand which contains label. */
11480 if ((!TREE_SIDE_EFFECTS (unused_op)
11481 || !contains_label_p (unused_op))
11482 && (! VOID_TYPE_P (TREE_TYPE (tem))
11483 || VOID_TYPE_P (type)))
11484 return pedantic_non_lvalue (tem);
11485 return NULL_TREE;
11487 if (operand_equal_p (arg1, op2, 0))
11488 return pedantic_omit_one_operand (type, arg1, arg0);
11490 /* If we have A op B ? A : C, we may be able to convert this to a
11491 simpler expression, depending on the operation and the values
11492 of B and C. Signed zeros prevent all of these transformations,
11493 for reasons given above each one.
11495 Also try swapping the arguments and inverting the conditional. */
11496 if (COMPARISON_CLASS_P (arg0)
11497 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11498 arg1, TREE_OPERAND (arg0, 1))
11499 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11501 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11502 if (tem)
11503 return tem;
11506 if (COMPARISON_CLASS_P (arg0)
11507 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11508 op2,
11509 TREE_OPERAND (arg0, 1))
11510 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11512 tem = fold_truth_not_expr (arg0);
11513 if (tem && COMPARISON_CLASS_P (tem))
11515 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11516 if (tem)
11517 return tem;
11521 /* If the second operand is simpler than the third, swap them
11522 since that produces better jump optimization results. */
11523 if (truth_value_p (TREE_CODE (arg0))
11524 && tree_swap_operands_p (op1, op2, false))
11526 /* See if this can be inverted. If it can't, possibly because
11527 it was a floating-point inequality comparison, don't do
11528 anything. */
11529 tem = fold_truth_not_expr (arg0);
11530 if (tem)
11531 return fold_build3 (code, type, tem, op2, op1);
11534 /* Convert A ? 1 : 0 to simply A. */
11535 if (integer_onep (op1)
11536 && integer_zerop (op2)
11537 /* If we try to convert OP0 to our type, the
11538 call to fold will try to move the conversion inside
11539 a COND, which will recurse. In that case, the COND_EXPR
11540 is probably the best choice, so leave it alone. */
11541 && type == TREE_TYPE (arg0))
11542 return pedantic_non_lvalue (arg0);
11544 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11545 over COND_EXPR in cases such as floating point comparisons. */
11546 if (integer_zerop (op1)
11547 && integer_onep (op2)
11548 && truth_value_p (TREE_CODE (arg0)))
11549 return pedantic_non_lvalue (fold_convert (type,
11550 invert_truthvalue (arg0)));
11552 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11553 if (TREE_CODE (arg0) == LT_EXPR
11554 && integer_zerop (TREE_OPERAND (arg0, 1))
11555 && integer_zerop (op2)
11556 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11558 /* sign_bit_p only checks ARG1 bits within A's precision.
11559 If <sign bit of A> has wider type than A, bits outside
11560 of A's precision in <sign bit of A> need to be checked.
11561 If they are all 0, this optimization needs to be done
11562 in unsigned A's type, if they are all 1 in signed A's type,
11563 otherwise this can't be done. */
11564 if (TYPE_PRECISION (TREE_TYPE (tem))
11565 < TYPE_PRECISION (TREE_TYPE (arg1))
11566 && TYPE_PRECISION (TREE_TYPE (tem))
11567 < TYPE_PRECISION (type))
11569 unsigned HOST_WIDE_INT mask_lo;
11570 HOST_WIDE_INT mask_hi;
11571 int inner_width, outer_width;
11572 tree tem_type;
11574 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11575 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11576 if (outer_width > TYPE_PRECISION (type))
11577 outer_width = TYPE_PRECISION (type);
11579 if (outer_width > HOST_BITS_PER_WIDE_INT)
11581 mask_hi = ((unsigned HOST_WIDE_INT) -1
11582 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11583 mask_lo = -1;
11585 else
11587 mask_hi = 0;
11588 mask_lo = ((unsigned HOST_WIDE_INT) -1
11589 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11591 if (inner_width > HOST_BITS_PER_WIDE_INT)
11593 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11594 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11595 mask_lo = 0;
11597 else
11598 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11599 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11601 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11602 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11604 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11605 tem = fold_convert (tem_type, tem);
11607 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11608 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11610 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11611 tem = fold_convert (tem_type, tem);
11613 else
11614 tem = NULL;
11617 if (tem)
11618 return fold_convert (type,
11619 fold_build2 (BIT_AND_EXPR,
11620 TREE_TYPE (tem), tem,
11621 fold_convert (TREE_TYPE (tem),
11622 arg1)));
11625 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11626 already handled above. */
11627 if (TREE_CODE (arg0) == BIT_AND_EXPR
11628 && integer_onep (TREE_OPERAND (arg0, 1))
11629 && integer_zerop (op2)
11630 && integer_pow2p (arg1))
11632 tree tem = TREE_OPERAND (arg0, 0);
11633 STRIP_NOPS (tem);
11634 if (TREE_CODE (tem) == RSHIFT_EXPR
11635 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11636 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11637 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11638 return fold_build2 (BIT_AND_EXPR, type,
11639 TREE_OPERAND (tem, 0), arg1);
11642 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11643 is probably obsolete because the first operand should be a
11644 truth value (that's why we have the two cases above), but let's
11645 leave it in until we can confirm this for all front-ends. */
11646 if (integer_zerop (op2)
11647 && TREE_CODE (arg0) == NE_EXPR
11648 && integer_zerop (TREE_OPERAND (arg0, 1))
11649 && integer_pow2p (arg1)
11650 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11651 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11652 arg1, OEP_ONLY_CONST))
11653 return pedantic_non_lvalue (fold_convert (type,
11654 TREE_OPERAND (arg0, 0)));
11656 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11657 if (integer_zerop (op2)
11658 && truth_value_p (TREE_CODE (arg0))
11659 && truth_value_p (TREE_CODE (arg1)))
11660 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11661 fold_convert (type, arg0),
11662 arg1);
11664 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11665 if (integer_onep (op2)
11666 && truth_value_p (TREE_CODE (arg0))
11667 && truth_value_p (TREE_CODE (arg1)))
11669 /* Only perform transformation if ARG0 is easily inverted. */
11670 tem = fold_truth_not_expr (arg0);
11671 if (tem)
11672 return fold_build2 (TRUTH_ORIF_EXPR, type,
11673 fold_convert (type, tem),
11674 arg1);
11677 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11678 if (integer_zerop (arg1)
11679 && truth_value_p (TREE_CODE (arg0))
11680 && truth_value_p (TREE_CODE (op2)))
11682 /* Only perform transformation if ARG0 is easily inverted. */
11683 tem = fold_truth_not_expr (arg0);
11684 if (tem)
11685 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11686 fold_convert (type, tem),
11687 op2);
11690 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11691 if (integer_onep (arg1)
11692 && truth_value_p (TREE_CODE (arg0))
11693 && truth_value_p (TREE_CODE (op2)))
11694 return fold_build2 (TRUTH_ORIF_EXPR, type,
11695 fold_convert (type, arg0),
11696 op2);
11698 return NULL_TREE;
11700 case CALL_EXPR:
11701 /* Check for a built-in function. */
11702 if (TREE_CODE (op0) == ADDR_EXPR
11703 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11704 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11705 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11706 return NULL_TREE;
11708 case BIT_FIELD_REF:
11709 if (TREE_CODE (arg0) == VECTOR_CST
11710 && type == TREE_TYPE (TREE_TYPE (arg0))
11711 && host_integerp (arg1, 1)
11712 && host_integerp (op2, 1))
11714 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11715 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11717 if (width != 0
11718 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11719 && (idx % width) == 0
11720 && (idx = idx / width)
11721 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11723 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11724 while (idx-- > 0 && elements)
11725 elements = TREE_CHAIN (elements);
11726 if (elements)
11727 return TREE_VALUE (elements);
11728 else
11729 return fold_convert (type, integer_zero_node);
11732 return NULL_TREE;
11734 default:
11735 return NULL_TREE;
11736 } /* switch (code) */
11739 /* Perform constant folding and related simplification of EXPR.
11740 The related simplifications include x*1 => x, x*0 => 0, etc.,
11741 and application of the associative law.
11742 NOP_EXPR conversions may be removed freely (as long as we
11743 are careful not to change the type of the overall expression).
11744 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11745 but we can constant-fold them if they have constant operands. */
11747 #ifdef ENABLE_FOLD_CHECKING
11748 # define fold(x) fold_1 (x)
11749 static tree fold_1 (tree);
11750 static
11751 #endif
11752 tree
11753 fold (tree expr)
11755 const tree t = expr;
11756 enum tree_code code = TREE_CODE (t);
11757 enum tree_code_class kind = TREE_CODE_CLASS (code);
11758 tree tem;
11760 /* Return right away if a constant. */
11761 if (kind == tcc_constant)
11762 return t;
11764 if (IS_EXPR_CODE_CLASS (kind))
11766 tree type = TREE_TYPE (t);
11767 tree op0, op1, op2;
11769 switch (TREE_CODE_LENGTH (code))
11771 case 1:
11772 op0 = TREE_OPERAND (t, 0);
11773 tem = fold_unary (code, type, op0);
11774 return tem ? tem : expr;
11775 case 2:
11776 op0 = TREE_OPERAND (t, 0);
11777 op1 = TREE_OPERAND (t, 1);
11778 tem = fold_binary (code, type, op0, op1);
11779 return tem ? tem : expr;
11780 case 3:
11781 op0 = TREE_OPERAND (t, 0);
11782 op1 = TREE_OPERAND (t, 1);
11783 op2 = TREE_OPERAND (t, 2);
11784 tem = fold_ternary (code, type, op0, op1, op2);
11785 return tem ? tem : expr;
11786 default:
11787 break;
11791 switch (code)
11793 case CONST_DECL:
11794 return fold (DECL_INITIAL (t));
11796 default:
11797 return t;
11798 } /* switch (code) */
11801 #ifdef ENABLE_FOLD_CHECKING
11802 #undef fold
11804 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11805 static void fold_check_failed (tree, tree);
11806 void print_fold_checksum (tree);
11808 /* When --enable-checking=fold, compute a digest of expr before
11809 and after actual fold call to see if fold did not accidentally
11810 change original expr. */
11812 tree
11813 fold (tree expr)
11815 tree ret;
11816 struct md5_ctx ctx;
11817 unsigned char checksum_before[16], checksum_after[16];
11818 htab_t ht;
11820 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11821 md5_init_ctx (&ctx);
11822 fold_checksum_tree (expr, &ctx, ht);
11823 md5_finish_ctx (&ctx, checksum_before);
11824 htab_empty (ht);
11826 ret = fold_1 (expr);
11828 md5_init_ctx (&ctx);
11829 fold_checksum_tree (expr, &ctx, ht);
11830 md5_finish_ctx (&ctx, checksum_after);
11831 htab_delete (ht);
11833 if (memcmp (checksum_before, checksum_after, 16))
11834 fold_check_failed (expr, ret);
11836 return ret;
11839 void
11840 print_fold_checksum (tree expr)
11842 struct md5_ctx ctx;
11843 unsigned char checksum[16], cnt;
11844 htab_t ht;
11846 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11847 md5_init_ctx (&ctx);
11848 fold_checksum_tree (expr, &ctx, ht);
11849 md5_finish_ctx (&ctx, checksum);
11850 htab_delete (ht);
11851 for (cnt = 0; cnt < 16; ++cnt)
11852 fprintf (stderr, "%02x", checksum[cnt]);
11853 putc ('\n', stderr);
11856 static void
11857 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11859 internal_error ("fold check: original tree changed by fold");
11862 static void
11863 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11865 void **slot;
11866 enum tree_code code;
11867 struct tree_function_decl buf;
11868 int i, len;
11870 recursive_label:
11872 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11873 <= sizeof (struct tree_function_decl))
11874 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11875 if (expr == NULL)
11876 return;
11877 slot = htab_find_slot (ht, expr, INSERT);
11878 if (*slot != NULL)
11879 return;
11880 *slot = expr;
11881 code = TREE_CODE (expr);
11882 if (TREE_CODE_CLASS (code) == tcc_declaration
11883 && DECL_ASSEMBLER_NAME_SET_P (expr))
11885 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11886 memcpy ((char *) &buf, expr, tree_size (expr));
11887 expr = (tree) &buf;
11888 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11890 else if (TREE_CODE_CLASS (code) == tcc_type
11891 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11892 || TYPE_CACHED_VALUES_P (expr)
11893 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11895 /* Allow these fields to be modified. */
11896 memcpy ((char *) &buf, expr, tree_size (expr));
11897 expr = (tree) &buf;
11898 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11899 TYPE_POINTER_TO (expr) = NULL;
11900 TYPE_REFERENCE_TO (expr) = NULL;
11901 if (TYPE_CACHED_VALUES_P (expr))
11903 TYPE_CACHED_VALUES_P (expr) = 0;
11904 TYPE_CACHED_VALUES (expr) = NULL;
11907 md5_process_bytes (expr, tree_size (expr), ctx);
11908 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11909 if (TREE_CODE_CLASS (code) != tcc_type
11910 && TREE_CODE_CLASS (code) != tcc_declaration
11911 && code != TREE_LIST)
11912 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11913 switch (TREE_CODE_CLASS (code))
11915 case tcc_constant:
11916 switch (code)
11918 case STRING_CST:
11919 md5_process_bytes (TREE_STRING_POINTER (expr),
11920 TREE_STRING_LENGTH (expr), ctx);
11921 break;
11922 case COMPLEX_CST:
11923 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11924 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11925 break;
11926 case VECTOR_CST:
11927 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11928 break;
11929 default:
11930 break;
11932 break;
11933 case tcc_exceptional:
11934 switch (code)
11936 case TREE_LIST:
11937 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11938 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11939 expr = TREE_CHAIN (expr);
11940 goto recursive_label;
11941 break;
11942 case TREE_VEC:
11943 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11944 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11945 break;
11946 default:
11947 break;
11949 break;
11950 case tcc_expression:
11951 case tcc_reference:
11952 case tcc_comparison:
11953 case tcc_unary:
11954 case tcc_binary:
11955 case tcc_statement:
11956 len = TREE_CODE_LENGTH (code);
11957 for (i = 0; i < len; ++i)
11958 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11959 break;
11960 case tcc_declaration:
11961 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11962 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11963 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11965 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11966 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11967 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11968 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11969 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11971 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11972 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11974 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11976 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11977 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11978 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11980 break;
11981 case tcc_type:
11982 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11983 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11984 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11985 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11986 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11987 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11988 if (INTEGRAL_TYPE_P (expr)
11989 || SCALAR_FLOAT_TYPE_P (expr))
11991 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11992 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11994 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11995 if (TREE_CODE (expr) == RECORD_TYPE
11996 || TREE_CODE (expr) == UNION_TYPE
11997 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11998 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11999 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12000 break;
12001 default:
12002 break;
12006 #endif
12008 /* Fold a unary tree expression with code CODE of type TYPE with an
12009 operand OP0. Return a folded expression if successful. Otherwise,
12010 return a tree expression with code CODE of type TYPE with an
12011 operand OP0. */
12013 tree
12014 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12016 tree tem;
12017 #ifdef ENABLE_FOLD_CHECKING
12018 unsigned char checksum_before[16], checksum_after[16];
12019 struct md5_ctx ctx;
12020 htab_t ht;
12022 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12023 md5_init_ctx (&ctx);
12024 fold_checksum_tree (op0, &ctx, ht);
12025 md5_finish_ctx (&ctx, checksum_before);
12026 htab_empty (ht);
12027 #endif
12029 tem = fold_unary (code, type, op0);
12030 if (!tem)
12031 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12033 #ifdef ENABLE_FOLD_CHECKING
12034 md5_init_ctx (&ctx);
12035 fold_checksum_tree (op0, &ctx, ht);
12036 md5_finish_ctx (&ctx, checksum_after);
12037 htab_delete (ht);
12039 if (memcmp (checksum_before, checksum_after, 16))
12040 fold_check_failed (op0, tem);
12041 #endif
12042 return tem;
12045 /* Fold a binary tree expression with code CODE of type TYPE with
12046 operands OP0 and OP1. Return a folded expression if successful.
12047 Otherwise, return a tree expression with code CODE of type TYPE
12048 with operands OP0 and OP1. */
12050 tree
12051 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12052 MEM_STAT_DECL)
12054 tree tem;
12055 #ifdef ENABLE_FOLD_CHECKING
12056 unsigned char checksum_before_op0[16],
12057 checksum_before_op1[16],
12058 checksum_after_op0[16],
12059 checksum_after_op1[16];
12060 struct md5_ctx ctx;
12061 htab_t ht;
12063 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12064 md5_init_ctx (&ctx);
12065 fold_checksum_tree (op0, &ctx, ht);
12066 md5_finish_ctx (&ctx, checksum_before_op0);
12067 htab_empty (ht);
12069 md5_init_ctx (&ctx);
12070 fold_checksum_tree (op1, &ctx, ht);
12071 md5_finish_ctx (&ctx, checksum_before_op1);
12072 htab_empty (ht);
12073 #endif
12075 tem = fold_binary (code, type, op0, op1);
12076 if (!tem)
12077 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12079 #ifdef ENABLE_FOLD_CHECKING
12080 md5_init_ctx (&ctx);
12081 fold_checksum_tree (op0, &ctx, ht);
12082 md5_finish_ctx (&ctx, checksum_after_op0);
12083 htab_empty (ht);
12085 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12086 fold_check_failed (op0, tem);
12088 md5_init_ctx (&ctx);
12089 fold_checksum_tree (op1, &ctx, ht);
12090 md5_finish_ctx (&ctx, checksum_after_op1);
12091 htab_delete (ht);
12093 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12094 fold_check_failed (op1, tem);
12095 #endif
12096 return tem;
12099 /* Fold a ternary tree expression with code CODE of type TYPE with
12100 operands OP0, OP1, and OP2. Return a folded expression if
12101 successful. Otherwise, return a tree expression with code CODE of
12102 type TYPE with operands OP0, OP1, and OP2. */
12104 tree
12105 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12106 MEM_STAT_DECL)
12108 tree tem;
12109 #ifdef ENABLE_FOLD_CHECKING
12110 unsigned char checksum_before_op0[16],
12111 checksum_before_op1[16],
12112 checksum_before_op2[16],
12113 checksum_after_op0[16],
12114 checksum_after_op1[16],
12115 checksum_after_op2[16];
12116 struct md5_ctx ctx;
12117 htab_t ht;
12119 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12120 md5_init_ctx (&ctx);
12121 fold_checksum_tree (op0, &ctx, ht);
12122 md5_finish_ctx (&ctx, checksum_before_op0);
12123 htab_empty (ht);
12125 md5_init_ctx (&ctx);
12126 fold_checksum_tree (op1, &ctx, ht);
12127 md5_finish_ctx (&ctx, checksum_before_op1);
12128 htab_empty (ht);
12130 md5_init_ctx (&ctx);
12131 fold_checksum_tree (op2, &ctx, ht);
12132 md5_finish_ctx (&ctx, checksum_before_op2);
12133 htab_empty (ht);
12134 #endif
12136 tem = fold_ternary (code, type, op0, op1, op2);
12137 if (!tem)
12138 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12140 #ifdef ENABLE_FOLD_CHECKING
12141 md5_init_ctx (&ctx);
12142 fold_checksum_tree (op0, &ctx, ht);
12143 md5_finish_ctx (&ctx, checksum_after_op0);
12144 htab_empty (ht);
12146 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12147 fold_check_failed (op0, tem);
12149 md5_init_ctx (&ctx);
12150 fold_checksum_tree (op1, &ctx, ht);
12151 md5_finish_ctx (&ctx, checksum_after_op1);
12152 htab_empty (ht);
12154 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12155 fold_check_failed (op1, tem);
12157 md5_init_ctx (&ctx);
12158 fold_checksum_tree (op2, &ctx, ht);
12159 md5_finish_ctx (&ctx, checksum_after_op2);
12160 htab_delete (ht);
12162 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12163 fold_check_failed (op2, tem);
12164 #endif
12165 return tem;
12168 /* Perform constant folding and related simplification of initializer
12169 expression EXPR. These behave identically to "fold_buildN" but ignore
12170 potential run-time traps and exceptions that fold must preserve. */
12172 #define START_FOLD_INIT \
12173 int saved_signaling_nans = flag_signaling_nans;\
12174 int saved_trapping_math = flag_trapping_math;\
12175 int saved_rounding_math = flag_rounding_math;\
12176 int saved_trapv = flag_trapv;\
12177 int saved_folding_initializer = folding_initializer;\
12178 flag_signaling_nans = 0;\
12179 flag_trapping_math = 0;\
12180 flag_rounding_math = 0;\
12181 flag_trapv = 0;\
12182 folding_initializer = 1;
12184 #define END_FOLD_INIT \
12185 flag_signaling_nans = saved_signaling_nans;\
12186 flag_trapping_math = saved_trapping_math;\
12187 flag_rounding_math = saved_rounding_math;\
12188 flag_trapv = saved_trapv;\
12189 folding_initializer = saved_folding_initializer;
12191 tree
12192 fold_build1_initializer (enum tree_code code, tree type, tree op)
12194 tree result;
12195 START_FOLD_INIT;
12197 result = fold_build1 (code, type, op);
12199 END_FOLD_INIT;
12200 return result;
12203 tree
12204 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12206 tree result;
12207 START_FOLD_INIT;
12209 result = fold_build2 (code, type, op0, op1);
12211 END_FOLD_INIT;
12212 return result;
12215 tree
12216 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12217 tree op2)
12219 tree result;
12220 START_FOLD_INIT;
12222 result = fold_build3 (code, type, op0, op1, op2);
12224 END_FOLD_INIT;
12225 return result;
12228 #undef START_FOLD_INIT
12229 #undef END_FOLD_INIT
12231 /* Determine if first argument is a multiple of second argument. Return 0 if
12232 it is not, or we cannot easily determined it to be.
12234 An example of the sort of thing we care about (at this point; this routine
12235 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12236 fold cases do now) is discovering that
12238 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12240 is a multiple of
12242 SAVE_EXPR (J * 8)
12244 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12246 This code also handles discovering that
12248 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12250 is a multiple of 8 so we don't have to worry about dealing with a
12251 possible remainder.
12253 Note that we *look* inside a SAVE_EXPR only to determine how it was
12254 calculated; it is not safe for fold to do much of anything else with the
12255 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12256 at run time. For example, the latter example above *cannot* be implemented
12257 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12258 evaluation time of the original SAVE_EXPR is not necessarily the same at
12259 the time the new expression is evaluated. The only optimization of this
12260 sort that would be valid is changing
12262 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12264 divided by 8 to
12266 SAVE_EXPR (I) * SAVE_EXPR (J)
12268 (where the same SAVE_EXPR (J) is used in the original and the
12269 transformed version). */
12271 static int
12272 multiple_of_p (tree type, tree top, tree bottom)
12274 if (operand_equal_p (top, bottom, 0))
12275 return 1;
12277 if (TREE_CODE (type) != INTEGER_TYPE)
12278 return 0;
12280 switch (TREE_CODE (top))
12282 case BIT_AND_EXPR:
12283 /* Bitwise and provides a power of two multiple. If the mask is
12284 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12285 if (!integer_pow2p (bottom))
12286 return 0;
12287 /* FALLTHRU */
12289 case MULT_EXPR:
12290 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12291 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12293 case PLUS_EXPR:
12294 case MINUS_EXPR:
12295 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12296 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12298 case LSHIFT_EXPR:
12299 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12301 tree op1, t1;
12303 op1 = TREE_OPERAND (top, 1);
12304 /* const_binop may not detect overflow correctly,
12305 so check for it explicitly here. */
12306 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12307 > TREE_INT_CST_LOW (op1)
12308 && TREE_INT_CST_HIGH (op1) == 0
12309 && 0 != (t1 = fold_convert (type,
12310 const_binop (LSHIFT_EXPR,
12311 size_one_node,
12312 op1, 0)))
12313 && ! TREE_OVERFLOW (t1))
12314 return multiple_of_p (type, t1, bottom);
12316 return 0;
12318 case NOP_EXPR:
12319 /* Can't handle conversions from non-integral or wider integral type. */
12320 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12321 || (TYPE_PRECISION (type)
12322 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12323 return 0;
12325 /* .. fall through ... */
12327 case SAVE_EXPR:
12328 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12330 case INTEGER_CST:
12331 if (TREE_CODE (bottom) != INTEGER_CST
12332 || (TYPE_UNSIGNED (type)
12333 && (tree_int_cst_sgn (top) < 0
12334 || tree_int_cst_sgn (bottom) < 0)))
12335 return 0;
12336 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12337 top, bottom, 0));
12339 default:
12340 return 0;
12344 /* Return true if `t' is known to be non-negative. If the return
12345 value is based on the assumption that signed overflow is undefined,
12346 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12347 *STRICT_OVERFLOW_P. */
12350 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12352 if (t == error_mark_node)
12353 return 0;
12355 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12356 return 1;
12358 switch (TREE_CODE (t))
12360 case SSA_NAME:
12361 /* Query VRP to see if it has recorded any information about
12362 the range of this object. */
12363 return ssa_name_nonnegative_p (t);
12365 case ABS_EXPR:
12366 /* We can't return 1 if flag_wrapv is set because
12367 ABS_EXPR<INT_MIN> = INT_MIN. */
12368 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12369 return 1;
12370 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12372 *strict_overflow_p = true;
12373 return 1;
12375 break;
12377 case INTEGER_CST:
12378 return tree_int_cst_sgn (t) >= 0;
12380 case REAL_CST:
12381 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12383 case PLUS_EXPR:
12384 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12385 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12386 strict_overflow_p)
12387 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12388 strict_overflow_p));
12390 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12391 both unsigned and at least 2 bits shorter than the result. */
12392 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12393 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12394 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12396 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12397 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12398 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12399 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12401 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12402 TYPE_PRECISION (inner2)) + 1;
12403 return prec < TYPE_PRECISION (TREE_TYPE (t));
12406 break;
12408 case MULT_EXPR:
12409 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12411 /* x * x for floating point x is always non-negative. */
12412 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12413 return 1;
12414 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12415 strict_overflow_p)
12416 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12417 strict_overflow_p));
12420 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12421 both unsigned and their total bits is shorter than the result. */
12422 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12423 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12424 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12426 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12427 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12428 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12429 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12430 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12431 < TYPE_PRECISION (TREE_TYPE (t));
12433 return 0;
12435 case BIT_AND_EXPR:
12436 case MAX_EXPR:
12437 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12438 strict_overflow_p)
12439 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12440 strict_overflow_p));
12442 case BIT_IOR_EXPR:
12443 case BIT_XOR_EXPR:
12444 case MIN_EXPR:
12445 case RDIV_EXPR:
12446 case TRUNC_DIV_EXPR:
12447 case CEIL_DIV_EXPR:
12448 case FLOOR_DIV_EXPR:
12449 case ROUND_DIV_EXPR:
12450 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12451 strict_overflow_p)
12452 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12453 strict_overflow_p));
12455 case TRUNC_MOD_EXPR:
12456 case CEIL_MOD_EXPR:
12457 case FLOOR_MOD_EXPR:
12458 case ROUND_MOD_EXPR:
12459 case SAVE_EXPR:
12460 case NON_LVALUE_EXPR:
12461 case FLOAT_EXPR:
12462 case FIX_TRUNC_EXPR:
12463 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12464 strict_overflow_p);
12466 case COMPOUND_EXPR:
12467 case MODIFY_EXPR:
12468 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12469 strict_overflow_p);
12471 case BIND_EXPR:
12472 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12473 strict_overflow_p);
12475 case COND_EXPR:
12476 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12477 strict_overflow_p)
12478 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12479 strict_overflow_p));
12481 case NOP_EXPR:
12483 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12484 tree outer_type = TREE_TYPE (t);
12486 if (TREE_CODE (outer_type) == REAL_TYPE)
12488 if (TREE_CODE (inner_type) == REAL_TYPE)
12489 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12490 strict_overflow_p);
12491 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12493 if (TYPE_UNSIGNED (inner_type))
12494 return 1;
12495 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12496 strict_overflow_p);
12499 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12501 if (TREE_CODE (inner_type) == REAL_TYPE)
12502 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12503 strict_overflow_p);
12504 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12505 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12506 && TYPE_UNSIGNED (inner_type);
12509 break;
12511 case TARGET_EXPR:
12513 tree temp = TARGET_EXPR_SLOT (t);
12514 t = TARGET_EXPR_INITIAL (t);
12516 /* If the initializer is non-void, then it's a normal expression
12517 that will be assigned to the slot. */
12518 if (!VOID_TYPE_P (t))
12519 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12521 /* Otherwise, the initializer sets the slot in some way. One common
12522 way is an assignment statement at the end of the initializer. */
12523 while (1)
12525 if (TREE_CODE (t) == BIND_EXPR)
12526 t = expr_last (BIND_EXPR_BODY (t));
12527 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12528 || TREE_CODE (t) == TRY_CATCH_EXPR)
12529 t = expr_last (TREE_OPERAND (t, 0));
12530 else if (TREE_CODE (t) == STATEMENT_LIST)
12531 t = expr_last (t);
12532 else
12533 break;
12535 if (TREE_CODE (t) == MODIFY_EXPR
12536 && TREE_OPERAND (t, 0) == temp)
12537 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12538 strict_overflow_p);
12540 return 0;
12543 case CALL_EXPR:
12545 tree fndecl = get_callee_fndecl (t);
12546 tree arglist = TREE_OPERAND (t, 1);
12547 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12548 switch (DECL_FUNCTION_CODE (fndecl))
12550 CASE_FLT_FN (BUILT_IN_ACOS):
12551 CASE_FLT_FN (BUILT_IN_ACOSH):
12552 CASE_FLT_FN (BUILT_IN_CABS):
12553 CASE_FLT_FN (BUILT_IN_COSH):
12554 CASE_FLT_FN (BUILT_IN_ERFC):
12555 CASE_FLT_FN (BUILT_IN_EXP):
12556 CASE_FLT_FN (BUILT_IN_EXP10):
12557 CASE_FLT_FN (BUILT_IN_EXP2):
12558 CASE_FLT_FN (BUILT_IN_FABS):
12559 CASE_FLT_FN (BUILT_IN_FDIM):
12560 CASE_FLT_FN (BUILT_IN_HYPOT):
12561 CASE_FLT_FN (BUILT_IN_POW10):
12562 CASE_INT_FN (BUILT_IN_FFS):
12563 CASE_INT_FN (BUILT_IN_PARITY):
12564 CASE_INT_FN (BUILT_IN_POPCOUNT):
12565 /* Always true. */
12566 return 1;
12568 CASE_FLT_FN (BUILT_IN_SQRT):
12569 /* sqrt(-0.0) is -0.0. */
12570 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12571 return 1;
12572 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12573 strict_overflow_p);
12575 CASE_FLT_FN (BUILT_IN_ASINH):
12576 CASE_FLT_FN (BUILT_IN_ATAN):
12577 CASE_FLT_FN (BUILT_IN_ATANH):
12578 CASE_FLT_FN (BUILT_IN_CBRT):
12579 CASE_FLT_FN (BUILT_IN_CEIL):
12580 CASE_FLT_FN (BUILT_IN_ERF):
12581 CASE_FLT_FN (BUILT_IN_EXPM1):
12582 CASE_FLT_FN (BUILT_IN_FLOOR):
12583 CASE_FLT_FN (BUILT_IN_FMOD):
12584 CASE_FLT_FN (BUILT_IN_FREXP):
12585 CASE_FLT_FN (BUILT_IN_LCEIL):
12586 CASE_FLT_FN (BUILT_IN_LDEXP):
12587 CASE_FLT_FN (BUILT_IN_LFLOOR):
12588 CASE_FLT_FN (BUILT_IN_LLCEIL):
12589 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12590 CASE_FLT_FN (BUILT_IN_LLRINT):
12591 CASE_FLT_FN (BUILT_IN_LLROUND):
12592 CASE_FLT_FN (BUILT_IN_LRINT):
12593 CASE_FLT_FN (BUILT_IN_LROUND):
12594 CASE_FLT_FN (BUILT_IN_MODF):
12595 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12596 CASE_FLT_FN (BUILT_IN_POW):
12597 CASE_FLT_FN (BUILT_IN_RINT):
12598 CASE_FLT_FN (BUILT_IN_ROUND):
12599 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12600 CASE_FLT_FN (BUILT_IN_SINH):
12601 CASE_FLT_FN (BUILT_IN_TANH):
12602 CASE_FLT_FN (BUILT_IN_TRUNC):
12603 /* True if the 1st argument is nonnegative. */
12604 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12605 strict_overflow_p);
12607 CASE_FLT_FN (BUILT_IN_FMAX):
12608 /* True if the 1st OR 2nd arguments are nonnegative. */
12609 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12610 strict_overflow_p)
12611 || (tree_expr_nonnegative_warnv_p
12612 (TREE_VALUE (TREE_CHAIN (arglist)),
12613 strict_overflow_p)));
12615 CASE_FLT_FN (BUILT_IN_FMIN):
12616 /* True if the 1st AND 2nd arguments are nonnegative. */
12617 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12618 strict_overflow_p)
12619 && (tree_expr_nonnegative_warnv_p
12620 (TREE_VALUE (TREE_CHAIN (arglist)),
12621 strict_overflow_p)));
12623 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12624 /* True if the 2nd argument is nonnegative. */
12625 return (tree_expr_nonnegative_warnv_p
12626 (TREE_VALUE (TREE_CHAIN (arglist)),
12627 strict_overflow_p));
12629 default:
12630 break;
12634 /* ... fall through ... */
12636 default:
12637 if (truth_value_p (TREE_CODE (t)))
12638 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12639 return 1;
12642 /* We don't know sign of `t', so be conservative and return false. */
12643 return 0;
12646 /* Return true if `t' is known to be non-negative. Handle warnings
12647 about undefined signed overflow. */
12650 tree_expr_nonnegative_p (tree t)
12652 int ret;
12653 bool strict_overflow_p;
12655 strict_overflow_p = false;
12656 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12657 if (strict_overflow_p)
12658 fold_overflow_warning (("assuming signed overflow does not occur when "
12659 "determining that expression is always "
12660 "non-negative"),
12661 WARN_STRICT_OVERFLOW_MISC);
12662 return ret;
12665 /* Return true when T is an address and is known to be nonzero.
12666 For floating point we further ensure that T is not denormal.
12667 Similar logic is present in nonzero_address in rtlanal.h.
12669 If the return value is based on the assumption that signed overflow
12670 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12671 change *STRICT_OVERFLOW_P. */
12673 bool
12674 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12676 tree type = TREE_TYPE (t);
12677 bool sub_strict_overflow_p;
12679 /* Doing something useful for floating point would need more work. */
12680 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12681 return false;
12683 switch (TREE_CODE (t))
12685 case SSA_NAME:
12686 /* Query VRP to see if it has recorded any information about
12687 the range of this object. */
12688 return ssa_name_nonzero_p (t);
12690 case ABS_EXPR:
12691 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12692 strict_overflow_p);
12694 case INTEGER_CST:
12695 /* We used to test for !integer_zerop here. This does not work correctly
12696 if TREE_CONSTANT_OVERFLOW (t). */
12697 return (TREE_INT_CST_LOW (t) != 0
12698 || TREE_INT_CST_HIGH (t) != 0);
12700 case PLUS_EXPR:
12701 if (TYPE_OVERFLOW_UNDEFINED (type))
12703 /* With the presence of negative values it is hard
12704 to say something. */
12705 sub_strict_overflow_p = false;
12706 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12707 &sub_strict_overflow_p)
12708 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12709 &sub_strict_overflow_p))
12710 return false;
12711 /* One of operands must be positive and the other non-negative. */
12712 /* We don't set *STRICT_OVERFLOW_P here: even if this value
12713 overflows, on a twos-complement machine the sum of two
12714 nonnegative numbers can never be zero. */
12715 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12716 strict_overflow_p)
12717 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12718 strict_overflow_p));
12720 break;
12722 case MULT_EXPR:
12723 if (TYPE_OVERFLOW_UNDEFINED (type))
12725 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12726 strict_overflow_p)
12727 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12728 strict_overflow_p))
12730 *strict_overflow_p = true;
12731 return true;
12734 break;
12736 case NOP_EXPR:
12738 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12739 tree outer_type = TREE_TYPE (t);
12741 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12742 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12743 strict_overflow_p));
12745 break;
12747 case ADDR_EXPR:
12749 tree base = get_base_address (TREE_OPERAND (t, 0));
12751 if (!base)
12752 return false;
12754 /* Weak declarations may link to NULL. */
12755 if (VAR_OR_FUNCTION_DECL_P (base))
12756 return !DECL_WEAK (base);
12758 /* Constants are never weak. */
12759 if (CONSTANT_CLASS_P (base))
12760 return true;
12762 return false;
12765 case COND_EXPR:
12766 sub_strict_overflow_p = false;
12767 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12768 &sub_strict_overflow_p)
12769 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12770 &sub_strict_overflow_p))
12772 if (sub_strict_overflow_p)
12773 *strict_overflow_p = true;
12774 return true;
12776 break;
12778 case MIN_EXPR:
12779 sub_strict_overflow_p = false;
12780 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12781 &sub_strict_overflow_p)
12782 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12783 &sub_strict_overflow_p))
12785 if (sub_strict_overflow_p)
12786 *strict_overflow_p = true;
12788 break;
12790 case MAX_EXPR:
12791 sub_strict_overflow_p = false;
12792 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12793 &sub_strict_overflow_p))
12795 if (sub_strict_overflow_p)
12796 *strict_overflow_p = true;
12798 /* When both operands are nonzero, then MAX must be too. */
12799 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12800 strict_overflow_p))
12801 return true;
12803 /* MAX where operand 0 is positive is positive. */
12804 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12805 strict_overflow_p);
12807 /* MAX where operand 1 is positive is positive. */
12808 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12809 &sub_strict_overflow_p)
12810 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12811 &sub_strict_overflow_p))
12813 if (sub_strict_overflow_p)
12814 *strict_overflow_p = true;
12815 return true;
12817 break;
12819 case COMPOUND_EXPR:
12820 case MODIFY_EXPR:
12821 case BIND_EXPR:
12822 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12823 strict_overflow_p);
12825 case SAVE_EXPR:
12826 case NON_LVALUE_EXPR:
12827 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12828 strict_overflow_p);
12830 case BIT_IOR_EXPR:
12831 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12832 strict_overflow_p)
12833 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12834 strict_overflow_p));
12836 case CALL_EXPR:
12837 return alloca_call_p (t);
12839 default:
12840 break;
12842 return false;
12845 /* Return true when T is an address and is known to be nonzero.
12846 Handle warnings about undefined signed overflow. */
12848 bool
12849 tree_expr_nonzero_p (tree t)
12851 bool ret, strict_overflow_p;
12853 strict_overflow_p = false;
12854 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12855 if (strict_overflow_p)
12856 fold_overflow_warning (("assuming signed overflow does not occur when "
12857 "determining that expression is always "
12858 "non-zero"),
12859 WARN_STRICT_OVERFLOW_MISC);
12860 return ret;
12863 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12864 attempt to fold the expression to a constant without modifying TYPE,
12865 OP0 or OP1.
12867 If the expression could be simplified to a constant, then return
12868 the constant. If the expression would not be simplified to a
12869 constant, then return NULL_TREE. */
12871 tree
12872 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12874 tree tem = fold_binary (code, type, op0, op1);
12875 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12878 /* Given the components of a unary expression CODE, TYPE and OP0,
12879 attempt to fold the expression to a constant without modifying
12880 TYPE or OP0.
12882 If the expression could be simplified to a constant, then return
12883 the constant. If the expression would not be simplified to a
12884 constant, then return NULL_TREE. */
12886 tree
12887 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12889 tree tem = fold_unary (code, type, op0);
12890 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12893 /* If EXP represents referencing an element in a constant string
12894 (either via pointer arithmetic or array indexing), return the
12895 tree representing the value accessed, otherwise return NULL. */
12897 tree
12898 fold_read_from_constant_string (tree exp)
12900 if ((TREE_CODE (exp) == INDIRECT_REF
12901 || TREE_CODE (exp) == ARRAY_REF)
12902 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12904 tree exp1 = TREE_OPERAND (exp, 0);
12905 tree index;
12906 tree string;
12908 if (TREE_CODE (exp) == INDIRECT_REF)
12909 string = string_constant (exp1, &index);
12910 else
12912 tree low_bound = array_ref_low_bound (exp);
12913 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12915 /* Optimize the special-case of a zero lower bound.
12917 We convert the low_bound to sizetype to avoid some problems
12918 with constant folding. (E.g. suppose the lower bound is 1,
12919 and its mode is QI. Without the conversion,l (ARRAY
12920 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12921 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12922 if (! integer_zerop (low_bound))
12923 index = size_diffop (index, fold_convert (sizetype, low_bound));
12925 string = exp1;
12928 if (string
12929 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12930 && TREE_CODE (string) == STRING_CST
12931 && TREE_CODE (index) == INTEGER_CST
12932 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12933 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12934 == MODE_INT)
12935 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12936 return fold_convert (TREE_TYPE (exp),
12937 build_int_cst (NULL_TREE,
12938 (TREE_STRING_POINTER (string)
12939 [TREE_INT_CST_LOW (index)])));
12941 return NULL;
12944 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12945 an integer constant or real constant.
12947 TYPE is the type of the result. */
12949 static tree
12950 fold_negate_const (tree arg0, tree type)
12952 tree t = NULL_TREE;
12954 switch (TREE_CODE (arg0))
12956 case INTEGER_CST:
12958 unsigned HOST_WIDE_INT low;
12959 HOST_WIDE_INT high;
12960 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12961 TREE_INT_CST_HIGH (arg0),
12962 &low, &high);
12963 t = build_int_cst_wide (type, low, high);
12964 t = force_fit_type (t, 1,
12965 (overflow | TREE_OVERFLOW (arg0))
12966 && !TYPE_UNSIGNED (type),
12967 TREE_CONSTANT_OVERFLOW (arg0));
12968 break;
12971 case REAL_CST:
12972 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12973 break;
12975 default:
12976 gcc_unreachable ();
12979 return t;
12982 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12983 an integer constant or real constant.
12985 TYPE is the type of the result. */
12987 tree
12988 fold_abs_const (tree arg0, tree type)
12990 tree t = NULL_TREE;
12992 switch (TREE_CODE (arg0))
12994 case INTEGER_CST:
12995 /* If the value is unsigned, then the absolute value is
12996 the same as the ordinary value. */
12997 if (TYPE_UNSIGNED (type))
12998 t = arg0;
12999 /* Similarly, if the value is non-negative. */
13000 else if (INT_CST_LT (integer_minus_one_node, arg0))
13001 t = arg0;
13002 /* If the value is negative, then the absolute value is
13003 its negation. */
13004 else
13006 unsigned HOST_WIDE_INT low;
13007 HOST_WIDE_INT high;
13008 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13009 TREE_INT_CST_HIGH (arg0),
13010 &low, &high);
13011 t = build_int_cst_wide (type, low, high);
13012 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13013 TREE_CONSTANT_OVERFLOW (arg0));
13015 break;
13017 case REAL_CST:
13018 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13019 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13020 else
13021 t = arg0;
13022 break;
13024 default:
13025 gcc_unreachable ();
13028 return t;
13031 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13032 constant. TYPE is the type of the result. */
13034 static tree
13035 fold_not_const (tree arg0, tree type)
13037 tree t = NULL_TREE;
13039 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13041 t = build_int_cst_wide (type,
13042 ~ TREE_INT_CST_LOW (arg0),
13043 ~ TREE_INT_CST_HIGH (arg0));
13044 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13045 TREE_CONSTANT_OVERFLOW (arg0));
13047 return t;
13050 /* Given CODE, a relational operator, the target type, TYPE and two
13051 constant operands OP0 and OP1, return the result of the
13052 relational operation. If the result is not a compile time
13053 constant, then return NULL_TREE. */
13055 static tree
13056 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13058 int result, invert;
13060 /* From here on, the only cases we handle are when the result is
13061 known to be a constant. */
13063 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13065 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13066 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13068 /* Handle the cases where either operand is a NaN. */
13069 if (real_isnan (c0) || real_isnan (c1))
13071 switch (code)
13073 case EQ_EXPR:
13074 case ORDERED_EXPR:
13075 result = 0;
13076 break;
13078 case NE_EXPR:
13079 case UNORDERED_EXPR:
13080 case UNLT_EXPR:
13081 case UNLE_EXPR:
13082 case UNGT_EXPR:
13083 case UNGE_EXPR:
13084 case UNEQ_EXPR:
13085 result = 1;
13086 break;
13088 case LT_EXPR:
13089 case LE_EXPR:
13090 case GT_EXPR:
13091 case GE_EXPR:
13092 case LTGT_EXPR:
13093 if (flag_trapping_math)
13094 return NULL_TREE;
13095 result = 0;
13096 break;
13098 default:
13099 gcc_unreachable ();
13102 return constant_boolean_node (result, type);
13105 return constant_boolean_node (real_compare (code, c0, c1), type);
13108 /* Handle equality/inequality of complex constants. */
13109 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13111 tree rcond = fold_relational_const (code, type,
13112 TREE_REALPART (op0),
13113 TREE_REALPART (op1));
13114 tree icond = fold_relational_const (code, type,
13115 TREE_IMAGPART (op0),
13116 TREE_IMAGPART (op1));
13117 if (code == EQ_EXPR)
13118 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13119 else if (code == NE_EXPR)
13120 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13121 else
13122 return NULL_TREE;
13125 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13127 To compute GT, swap the arguments and do LT.
13128 To compute GE, do LT and invert the result.
13129 To compute LE, swap the arguments, do LT and invert the result.
13130 To compute NE, do EQ and invert the result.
13132 Therefore, the code below must handle only EQ and LT. */
13134 if (code == LE_EXPR || code == GT_EXPR)
13136 tree tem = op0;
13137 op0 = op1;
13138 op1 = tem;
13139 code = swap_tree_comparison (code);
13142 /* Note that it is safe to invert for real values here because we
13143 have already handled the one case that it matters. */
13145 invert = 0;
13146 if (code == NE_EXPR || code == GE_EXPR)
13148 invert = 1;
13149 code = invert_tree_comparison (code, false);
13152 /* Compute a result for LT or EQ if args permit;
13153 Otherwise return T. */
13154 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13156 if (code == EQ_EXPR)
13157 result = tree_int_cst_equal (op0, op1);
13158 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13159 result = INT_CST_LT_UNSIGNED (op0, op1);
13160 else
13161 result = INT_CST_LT (op0, op1);
13163 else
13164 return NULL_TREE;
13166 if (invert)
13167 result ^= 1;
13168 return constant_boolean_node (result, type);
13171 /* Build an expression for the a clean point containing EXPR with type TYPE.
13172 Don't build a cleanup point expression for EXPR which don't have side
13173 effects. */
13175 tree
13176 fold_build_cleanup_point_expr (tree type, tree expr)
13178 /* If the expression does not have side effects then we don't have to wrap
13179 it with a cleanup point expression. */
13180 if (!TREE_SIDE_EFFECTS (expr))
13181 return expr;
13183 /* If the expression is a return, check to see if the expression inside the
13184 return has no side effects or the right hand side of the modify expression
13185 inside the return. If either don't have side effects set we don't need to
13186 wrap the expression in a cleanup point expression. Note we don't check the
13187 left hand side of the modify because it should always be a return decl. */
13188 if (TREE_CODE (expr) == RETURN_EXPR)
13190 tree op = TREE_OPERAND (expr, 0);
13191 if (!op || !TREE_SIDE_EFFECTS (op))
13192 return expr;
13193 op = TREE_OPERAND (op, 1);
13194 if (!TREE_SIDE_EFFECTS (op))
13195 return expr;
13198 return build1 (CLEANUP_POINT_EXPR, type, expr);
13201 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13202 avoid confusing the gimplify process. */
13204 tree
13205 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13207 /* The size of the object is not relevant when talking about its address. */
13208 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13209 t = TREE_OPERAND (t, 0);
13211 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13212 if (TREE_CODE (t) == INDIRECT_REF
13213 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13215 t = TREE_OPERAND (t, 0);
13216 if (TREE_TYPE (t) != ptrtype)
13217 t = build1 (NOP_EXPR, ptrtype, t);
13219 else
13221 tree base = t;
13223 while (handled_component_p (base))
13224 base = TREE_OPERAND (base, 0);
13225 if (DECL_P (base))
13226 TREE_ADDRESSABLE (base) = 1;
13228 t = build1 (ADDR_EXPR, ptrtype, t);
13231 return t;
13234 tree
13235 build_fold_addr_expr (tree t)
13237 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13240 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13241 of an indirection through OP0, or NULL_TREE if no simplification is
13242 possible. */
13244 tree
13245 fold_indirect_ref_1 (tree type, tree op0)
13247 tree sub = op0;
13248 tree subtype;
13250 STRIP_NOPS (sub);
13251 subtype = TREE_TYPE (sub);
13252 if (!POINTER_TYPE_P (subtype))
13253 return NULL_TREE;
13255 if (TREE_CODE (sub) == ADDR_EXPR)
13257 tree op = TREE_OPERAND (sub, 0);
13258 tree optype = TREE_TYPE (op);
13259 /* *&CONST_DECL -> to the value of the const decl. */
13260 if (TREE_CODE (op) == CONST_DECL)
13261 return DECL_INITIAL (op);
13262 /* *&p => p; make sure to handle *&"str"[cst] here. */
13263 if (type == optype)
13265 tree fop = fold_read_from_constant_string (op);
13266 if (fop)
13267 return fop;
13268 else
13269 return op;
13271 /* *(foo *)&fooarray => fooarray[0] */
13272 else if (TREE_CODE (optype) == ARRAY_TYPE
13273 && type == TREE_TYPE (optype))
13275 tree type_domain = TYPE_DOMAIN (optype);
13276 tree min_val = size_zero_node;
13277 if (type_domain && TYPE_MIN_VALUE (type_domain))
13278 min_val = TYPE_MIN_VALUE (type_domain);
13279 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13281 /* *(foo *)&complexfoo => __real__ complexfoo */
13282 else if (TREE_CODE (optype) == COMPLEX_TYPE
13283 && type == TREE_TYPE (optype))
13284 return fold_build1 (REALPART_EXPR, type, op);
13287 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13288 if (TREE_CODE (sub) == PLUS_EXPR
13289 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13291 tree op00 = TREE_OPERAND (sub, 0);
13292 tree op01 = TREE_OPERAND (sub, 1);
13293 tree op00type;
13295 STRIP_NOPS (op00);
13296 op00type = TREE_TYPE (op00);
13297 if (TREE_CODE (op00) == ADDR_EXPR
13298 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13299 && type == TREE_TYPE (TREE_TYPE (op00type)))
13301 tree size = TYPE_SIZE_UNIT (type);
13302 if (tree_int_cst_equal (size, op01))
13303 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13307 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13308 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13309 && type == TREE_TYPE (TREE_TYPE (subtype)))
13311 tree type_domain;
13312 tree min_val = size_zero_node;
13313 sub = build_fold_indirect_ref (sub);
13314 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13315 if (type_domain && TYPE_MIN_VALUE (type_domain))
13316 min_val = TYPE_MIN_VALUE (type_domain);
13317 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13320 return NULL_TREE;
13323 /* Builds an expression for an indirection through T, simplifying some
13324 cases. */
13326 tree
13327 build_fold_indirect_ref (tree t)
13329 tree type = TREE_TYPE (TREE_TYPE (t));
13330 tree sub = fold_indirect_ref_1 (type, t);
13332 if (sub)
13333 return sub;
13334 else
13335 return build1 (INDIRECT_REF, type, t);
13338 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13340 tree
13341 fold_indirect_ref (tree t)
13343 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13345 if (sub)
13346 return sub;
13347 else
13348 return t;
13351 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13352 whose result is ignored. The type of the returned tree need not be
13353 the same as the original expression. */
13355 tree
13356 fold_ignored_result (tree t)
13358 if (!TREE_SIDE_EFFECTS (t))
13359 return integer_zero_node;
13361 for (;;)
13362 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13364 case tcc_unary:
13365 t = TREE_OPERAND (t, 0);
13366 break;
13368 case tcc_binary:
13369 case tcc_comparison:
13370 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13371 t = TREE_OPERAND (t, 0);
13372 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13373 t = TREE_OPERAND (t, 1);
13374 else
13375 return t;
13376 break;
13378 case tcc_expression:
13379 switch (TREE_CODE (t))
13381 case COMPOUND_EXPR:
13382 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13383 return t;
13384 t = TREE_OPERAND (t, 0);
13385 break;
13387 case COND_EXPR:
13388 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13389 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13390 return t;
13391 t = TREE_OPERAND (t, 0);
13392 break;
13394 default:
13395 return t;
13397 break;
13399 default:
13400 return t;
13404 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13405 This can only be applied to objects of a sizetype. */
13407 tree
13408 round_up (tree value, int divisor)
13410 tree div = NULL_TREE;
13412 gcc_assert (divisor > 0);
13413 if (divisor == 1)
13414 return value;
13416 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13417 have to do anything. Only do this when we are not given a const,
13418 because in that case, this check is more expensive than just
13419 doing it. */
13420 if (TREE_CODE (value) != INTEGER_CST)
13422 div = build_int_cst (TREE_TYPE (value), divisor);
13424 if (multiple_of_p (TREE_TYPE (value), value, div))
13425 return value;
13428 /* If divisor is a power of two, simplify this to bit manipulation. */
13429 if (divisor == (divisor & -divisor))
13431 tree t;
13433 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13434 value = size_binop (PLUS_EXPR, value, t);
13435 t = build_int_cst (TREE_TYPE (value), -divisor);
13436 value = size_binop (BIT_AND_EXPR, value, t);
13438 else
13440 if (!div)
13441 div = build_int_cst (TREE_TYPE (value), divisor);
13442 value = size_binop (CEIL_DIV_EXPR, value, div);
13443 value = size_binop (MULT_EXPR, value, div);
13446 return value;
13449 /* Likewise, but round down. */
13451 tree
13452 round_down (tree value, int divisor)
13454 tree div = NULL_TREE;
13456 gcc_assert (divisor > 0);
13457 if (divisor == 1)
13458 return value;
13460 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13461 have to do anything. Only do this when we are not given a const,
13462 because in that case, this check is more expensive than just
13463 doing it. */
13464 if (TREE_CODE (value) != INTEGER_CST)
13466 div = build_int_cst (TREE_TYPE (value), divisor);
13468 if (multiple_of_p (TREE_TYPE (value), value, div))
13469 return value;
13472 /* If divisor is a power of two, simplify this to bit manipulation. */
13473 if (divisor == (divisor & -divisor))
13475 tree t;
13477 t = build_int_cst (TREE_TYPE (value), -divisor);
13478 value = size_binop (BIT_AND_EXPR, value, t);
13480 else
13482 if (!div)
13483 div = build_int_cst (TREE_TYPE (value), divisor);
13484 value = size_binop (FLOOR_DIV_EXPR, value, div);
13485 value = size_binop (MULT_EXPR, value, div);
13488 return value;
13491 /* Returns the pointer to the base of the object addressed by EXP and
13492 extracts the information about the offset of the access, storing it
13493 to PBITPOS and POFFSET. */
13495 static tree
13496 split_address_to_core_and_offset (tree exp,
13497 HOST_WIDE_INT *pbitpos, tree *poffset)
13499 tree core;
13500 enum machine_mode mode;
13501 int unsignedp, volatilep;
13502 HOST_WIDE_INT bitsize;
13504 if (TREE_CODE (exp) == ADDR_EXPR)
13506 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13507 poffset, &mode, &unsignedp, &volatilep,
13508 false);
13509 core = build_fold_addr_expr (core);
13511 else
13513 core = exp;
13514 *pbitpos = 0;
13515 *poffset = NULL_TREE;
13518 return core;
13521 /* Returns true if addresses of E1 and E2 differ by a constant, false
13522 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13524 bool
13525 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13527 tree core1, core2;
13528 HOST_WIDE_INT bitpos1, bitpos2;
13529 tree toffset1, toffset2, tdiff, type;
13531 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13532 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13534 if (bitpos1 % BITS_PER_UNIT != 0
13535 || bitpos2 % BITS_PER_UNIT != 0
13536 || !operand_equal_p (core1, core2, 0))
13537 return false;
13539 if (toffset1 && toffset2)
13541 type = TREE_TYPE (toffset1);
13542 if (type != TREE_TYPE (toffset2))
13543 toffset2 = fold_convert (type, toffset2);
13545 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13546 if (!cst_and_fits_in_hwi (tdiff))
13547 return false;
13549 *diff = int_cst_value (tdiff);
13551 else if (toffset1 || toffset2)
13553 /* If only one of the offsets is non-constant, the difference cannot
13554 be a constant. */
13555 return false;
13557 else
13558 *diff = 0;
13560 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13561 return true;
13564 /* Simplify the floating point expression EXP when the sign of the
13565 result is not significant. Return NULL_TREE if no simplification
13566 is possible. */
13568 tree
13569 fold_strip_sign_ops (tree exp)
13571 tree arg0, arg1;
13573 switch (TREE_CODE (exp))
13575 case ABS_EXPR:
13576 case NEGATE_EXPR:
13577 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13578 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13580 case MULT_EXPR:
13581 case RDIV_EXPR:
13582 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13583 return NULL_TREE;
13584 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13585 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13586 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13587 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13588 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13589 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13590 break;
13592 default:
13593 break;
13595 return NULL_TREE;