2007-01-03 Paul Brook <paul@codesourcery.com>
[official-gcc.git] / gcc / fold-const.c
blob8826908054d447455a884e3d28e261b76cf44dce
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "rtl.h"
58 #include "expr.h"
59 #include "tm_p.h"
60 #include "toplev.h"
61 #include "ggc.h"
62 #include "hashtab.h"
63 #include "langhooks.h"
64 #include "md5.h"
66 /* Non-zero if we are folding constants inside an initializer; zero
67 otherwise. */
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
74 COMPCODE_FALSE = 0,
75 COMPCODE_LT = 1,
76 COMPCODE_EQ = 2,
77 COMPCODE_LE = 3,
78 COMPCODE_GT = 4,
79 COMPCODE_LTGT = 5,
80 COMPCODE_GE = 6,
81 COMPCODE_ORD = 7,
82 COMPCODE_UNORD = 8,
83 COMPCODE_UNLT = 9,
84 COMPCODE_UNEQ = 10,
85 COMPCODE_UNLE = 11,
86 COMPCODE_UNGT = 12,
87 COMPCODE_NE = 13,
88 COMPCODE_UNGE = 14,
89 COMPCODE_TRUE = 15
92 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
93 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static tree combine_comparisons (enum tree_code, enum tree_code,
103 enum tree_code, tree, tree, tree);
104 static int truth_value_p (enum tree_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand (tree, tree, tree);
109 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (tree, tree, int, int, int);
111 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
114 tree *, tree *);
115 static int all_ones_mask_p (tree, int);
116 static tree sign_bit_p (tree, tree);
117 static int simple_operand_p (tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
132 static int multiple_of_p (tree, tree, tree);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static int native_encode_expr (tree, unsigned char *, int);
146 static tree native_interpret_expr (tree, unsigned char *, int);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
152 addition.
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 sign. */
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
160 We do that by representing the two-word integer in 4 words, with only
161 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
162 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 #define LOWPART(x) \
165 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
166 #define HIGHPART(x) \
167 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
168 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170 /* Unpack a two-word integer into 4 words.
171 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
172 WORDS points to the array of HOST_WIDE_INTs. */
174 static void
175 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 words[0] = LOWPART (low);
178 words[1] = HIGHPART (low);
179 words[2] = LOWPART (hi);
180 words[3] = HIGHPART (hi);
183 /* Pack an array of 4 words into a two-word integer.
184 WORDS points to the array of words.
185 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 static void
188 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 HOST_WIDE_INT *hi)
191 *low = words[0] + words[1] * BASE;
192 *hi = words[2] + words[3] * BASE;
195 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
196 in overflow of the value, when >0 we are only interested in signed
197 overflow, for <0 we are interested in any overflow. OVERFLOWED
198 indicates whether overflow has already occurred. CONST_OVERFLOWED
199 indicates whether constant overflow has already occurred. We force
200 T's value to be within range of T's type (by setting to 0 or 1 all
201 the bits outside the type's range). We set TREE_OVERFLOWED if,
202 OVERFLOWED is nonzero,
203 or OVERFLOWABLE is >0 and signed overflow occurs
204 or OVERFLOWABLE is <0 and any overflow occurs
205 We set TREE_CONSTANT_OVERFLOWED if,
206 CONST_OVERFLOWED is nonzero
207 or we set TREE_OVERFLOWED.
208 We return either the original T, or a copy. */
210 tree
211 force_fit_type (tree t, int overflowable,
212 bool overflowed, bool overflowed_const)
214 unsigned HOST_WIDE_INT low;
215 HOST_WIDE_INT high;
216 unsigned int prec;
217 int sign_extended_type;
219 gcc_assert (TREE_CODE (t) == INTEGER_CST);
221 low = TREE_INT_CST_LOW (t);
222 high = TREE_INT_CST_HIGH (t);
224 if (POINTER_TYPE_P (TREE_TYPE (t))
225 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
226 prec = POINTER_SIZE;
227 else
228 prec = TYPE_PRECISION (TREE_TYPE (t));
229 /* Size types *are* sign extended. */
230 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
231 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
232 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
234 /* First clear all bits that are beyond the type's precision. */
236 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
240 else
242 high = 0;
243 if (prec < HOST_BITS_PER_WIDE_INT)
244 low &= ~((HOST_WIDE_INT) (-1) << prec);
247 if (!sign_extended_type)
248 /* No sign extension */;
249 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
250 /* Correct width already. */;
251 else if (prec > HOST_BITS_PER_WIDE_INT)
253 /* Sign extend top half? */
254 if (high & ((unsigned HOST_WIDE_INT)1
255 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
256 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
258 else if (prec == HOST_BITS_PER_WIDE_INT)
260 if ((HOST_WIDE_INT)low < 0)
261 high = -1;
263 else
265 /* Sign extend bottom half? */
266 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
268 high = -1;
269 low |= (HOST_WIDE_INT)(-1) << prec;
273 /* If the value changed, return a new node. */
274 if (overflowed || overflowed_const
275 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
277 t = build_int_cst_wide (TREE_TYPE (t), low, high);
279 if (overflowed
280 || overflowable < 0
281 || (overflowable > 0 && sign_extended_type))
283 t = copy_node (t);
284 TREE_OVERFLOW (t) = 1;
285 TREE_CONSTANT_OVERFLOW (t) = 1;
287 else if (overflowed_const)
289 t = copy_node (t);
290 TREE_CONSTANT_OVERFLOW (t) = 1;
294 return t;
297 /* Add two doubleword integers with doubleword result.
298 Return nonzero if the operation overflows according to UNSIGNED_P.
299 Each argument is given as two `HOST_WIDE_INT' pieces.
300 One argument is L1 and H1; the other, L2 and H2.
301 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
304 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
305 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
306 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
307 bool unsigned_p)
309 unsigned HOST_WIDE_INT l;
310 HOST_WIDE_INT h;
312 l = l1 + l2;
313 h = h1 + h2 + (l < l1);
315 *lv = l;
316 *hv = h;
318 if (unsigned_p)
319 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
320 else
321 return OVERFLOW_SUM_SIGN (h1, h2, h);
324 /* Negate a doubleword integer with doubleword result.
325 Return nonzero if the operation overflows, assuming it's signed.
326 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
327 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
330 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
331 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
333 if (l1 == 0)
335 *lv = 0;
336 *hv = - h1;
337 return (*hv & h1) < 0;
339 else
341 *lv = -l1;
342 *hv = ~h1;
343 return 0;
347 /* Multiply two doubleword integers with doubleword result.
348 Return nonzero if the operation overflows according to UNSIGNED_P.
349 Each argument is given as two `HOST_WIDE_INT' pieces.
350 One argument is L1 and H1; the other, L2 and H2.
351 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
354 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
355 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
356 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
357 bool unsigned_p)
359 HOST_WIDE_INT arg1[4];
360 HOST_WIDE_INT arg2[4];
361 HOST_WIDE_INT prod[4 * 2];
362 unsigned HOST_WIDE_INT carry;
363 int i, j, k;
364 unsigned HOST_WIDE_INT toplow, neglow;
365 HOST_WIDE_INT tophigh, neghigh;
367 encode (arg1, l1, h1);
368 encode (arg2, l2, h2);
370 memset (prod, 0, sizeof prod);
372 for (i = 0; i < 4; i++)
374 carry = 0;
375 for (j = 0; j < 4; j++)
377 k = i + j;
378 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
379 carry += arg1[i] * arg2[j];
380 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
381 carry += prod[k];
382 prod[k] = LOWPART (carry);
383 carry = HIGHPART (carry);
385 prod[i + 4] = carry;
388 decode (prod, lv, hv);
389 decode (prod + 4, &toplow, &tophigh);
391 /* Unsigned overflow is immediate. */
392 if (unsigned_p)
393 return (toplow | tophigh) != 0;
395 /* Check for signed overflow by calculating the signed representation of the
396 top half of the result; it should agree with the low half's sign bit. */
397 if (h1 < 0)
399 neg_double (l2, h2, &neglow, &neghigh);
400 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
402 if (h2 < 0)
404 neg_double (l1, h1, &neglow, &neghigh);
405 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
407 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
410 /* Shift the doubleword integer in L1, H1 left by COUNT places
411 keeping only PREC bits of result.
412 Shift right if COUNT is negative.
413 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
414 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
416 void
417 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
418 HOST_WIDE_INT count, unsigned int prec,
419 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
421 unsigned HOST_WIDE_INT signmask;
423 if (count < 0)
425 rshift_double (l1, h1, -count, prec, lv, hv, arith);
426 return;
429 if (SHIFT_COUNT_TRUNCATED)
430 count %= prec;
432 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
434 /* Shifting by the host word size is undefined according to the
435 ANSI standard, so we must handle this as a special case. */
436 *hv = 0;
437 *lv = 0;
439 else if (count >= HOST_BITS_PER_WIDE_INT)
441 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
442 *lv = 0;
444 else
446 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
447 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
448 *lv = l1 << count;
451 /* Sign extend all bits that are beyond the precision. */
453 signmask = -((prec > HOST_BITS_PER_WIDE_INT
454 ? ((unsigned HOST_WIDE_INT) *hv
455 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
456 : (*lv >> (prec - 1))) & 1);
458 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
460 else if (prec >= HOST_BITS_PER_WIDE_INT)
462 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
463 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
465 else
467 *hv = signmask;
468 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
469 *lv |= signmask << prec;
473 /* Shift the doubleword integer in L1, H1 right by COUNT places
474 keeping only PREC bits of result. COUNT must be positive.
475 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
476 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
478 void
479 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
480 HOST_WIDE_INT count, unsigned int prec,
481 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
482 int arith)
484 unsigned HOST_WIDE_INT signmask;
486 signmask = (arith
487 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
488 : 0);
490 if (SHIFT_COUNT_TRUNCATED)
491 count %= prec;
493 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
495 /* Shifting by the host word size is undefined according to the
496 ANSI standard, so we must handle this as a special case. */
497 *hv = 0;
498 *lv = 0;
500 else if (count >= HOST_BITS_PER_WIDE_INT)
502 *hv = 0;
503 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
505 else
507 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
508 *lv = ((l1 >> count)
509 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
512 /* Zero / sign extend all bits that are beyond the precision. */
514 if (count >= (HOST_WIDE_INT)prec)
516 *hv = signmask;
517 *lv = signmask;
519 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
521 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
523 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
524 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
526 else
528 *hv = signmask;
529 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
530 *lv |= signmask << (prec - count);
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result.
536 Rotate right if COUNT is negative.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
539 void
540 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
541 HOST_WIDE_INT count, unsigned int prec,
542 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
544 unsigned HOST_WIDE_INT s1l, s2l;
545 HOST_WIDE_INT s1h, s2h;
547 count %= prec;
548 if (count < 0)
549 count += prec;
551 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
552 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 *lv = s1l | s2l;
554 *hv = s1h | s2h;
557 /* Rotate the doubleword integer in L1, H1 left by COUNT places
558 keeping only PREC bits of result. COUNT must be positive.
559 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
561 void
562 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
563 HOST_WIDE_INT count, unsigned int prec,
564 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
566 unsigned HOST_WIDE_INT s1l, s2l;
567 HOST_WIDE_INT s1h, s2h;
569 count %= prec;
570 if (count < 0)
571 count += prec;
573 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
574 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 *lv = s1l | s2l;
576 *hv = s1h | s2h;
579 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
580 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
581 CODE is a tree code for a kind of division, one of
582 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
583 or EXACT_DIV_EXPR
584 It controls how the quotient is rounded to an integer.
585 Return nonzero if the operation overflows.
586 UNS nonzero says do unsigned division. */
589 div_and_round_double (enum tree_code code, int uns,
590 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
591 HOST_WIDE_INT hnum_orig,
592 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
593 HOST_WIDE_INT hden_orig,
594 unsigned HOST_WIDE_INT *lquo,
595 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
596 HOST_WIDE_INT *hrem)
598 int quo_neg = 0;
599 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
600 HOST_WIDE_INT den[4], quo[4];
601 int i, j;
602 unsigned HOST_WIDE_INT work;
603 unsigned HOST_WIDE_INT carry = 0;
604 unsigned HOST_WIDE_INT lnum = lnum_orig;
605 HOST_WIDE_INT hnum = hnum_orig;
606 unsigned HOST_WIDE_INT lden = lden_orig;
607 HOST_WIDE_INT hden = hden_orig;
608 int overflow = 0;
610 if (hden == 0 && lden == 0)
611 overflow = 1, lden = 1;
613 /* Calculate quotient sign and convert operands to unsigned. */
614 if (!uns)
616 if (hnum < 0)
618 quo_neg = ~ quo_neg;
619 /* (minimum integer) / (-1) is the only overflow case. */
620 if (neg_double (lnum, hnum, &lnum, &hnum)
621 && ((HOST_WIDE_INT) lden & hden) == -1)
622 overflow = 1;
624 if (hden < 0)
626 quo_neg = ~ quo_neg;
627 neg_double (lden, hden, &lden, &hden);
631 if (hnum == 0 && hden == 0)
632 { /* single precision */
633 *hquo = *hrem = 0;
634 /* This unsigned division rounds toward zero. */
635 *lquo = lnum / lden;
636 goto finish_up;
639 if (hnum == 0)
640 { /* trivial case: dividend < divisor */
641 /* hden != 0 already checked. */
642 *hquo = *lquo = 0;
643 *hrem = hnum;
644 *lrem = lnum;
645 goto finish_up;
648 memset (quo, 0, sizeof quo);
650 memset (num, 0, sizeof num); /* to zero 9th element */
651 memset (den, 0, sizeof den);
653 encode (num, lnum, hnum);
654 encode (den, lden, hden);
656 /* Special code for when the divisor < BASE. */
657 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
659 /* hnum != 0 already checked. */
660 for (i = 4 - 1; i >= 0; i--)
662 work = num[i] + carry * BASE;
663 quo[i] = work / lden;
664 carry = work % lden;
667 else
669 /* Full double precision division,
670 with thanks to Don Knuth's "Seminumerical Algorithms". */
671 int num_hi_sig, den_hi_sig;
672 unsigned HOST_WIDE_INT quo_est, scale;
674 /* Find the highest nonzero divisor digit. */
675 for (i = 4 - 1;; i--)
676 if (den[i] != 0)
678 den_hi_sig = i;
679 break;
682 /* Insure that the first digit of the divisor is at least BASE/2.
683 This is required by the quotient digit estimation algorithm. */
685 scale = BASE / (den[den_hi_sig] + 1);
686 if (scale > 1)
687 { /* scale divisor and dividend */
688 carry = 0;
689 for (i = 0; i <= 4 - 1; i++)
691 work = (num[i] * scale) + carry;
692 num[i] = LOWPART (work);
693 carry = HIGHPART (work);
696 num[4] = carry;
697 carry = 0;
698 for (i = 0; i <= 4 - 1; i++)
700 work = (den[i] * scale) + carry;
701 den[i] = LOWPART (work);
702 carry = HIGHPART (work);
703 if (den[i] != 0) den_hi_sig = i;
707 num_hi_sig = 4;
709 /* Main loop */
710 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
712 /* Guess the next quotient digit, quo_est, by dividing the first
713 two remaining dividend digits by the high order quotient digit.
714 quo_est is never low and is at most 2 high. */
715 unsigned HOST_WIDE_INT tmp;
717 num_hi_sig = i + den_hi_sig + 1;
718 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
719 if (num[num_hi_sig] != den[den_hi_sig])
720 quo_est = work / den[den_hi_sig];
721 else
722 quo_est = BASE - 1;
724 /* Refine quo_est so it's usually correct, and at most one high. */
725 tmp = work - quo_est * den[den_hi_sig];
726 if (tmp < BASE
727 && (den[den_hi_sig - 1] * quo_est
728 > (tmp * BASE + num[num_hi_sig - 2])))
729 quo_est--;
731 /* Try QUO_EST as the quotient digit, by multiplying the
732 divisor by QUO_EST and subtracting from the remaining dividend.
733 Keep in mind that QUO_EST is the I - 1st digit. */
735 carry = 0;
736 for (j = 0; j <= den_hi_sig; j++)
738 work = quo_est * den[j] + carry;
739 carry = HIGHPART (work);
740 work = num[i + j] - LOWPART (work);
741 num[i + j] = LOWPART (work);
742 carry += HIGHPART (work) != 0;
745 /* If quo_est was high by one, then num[i] went negative and
746 we need to correct things. */
747 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
749 quo_est--;
750 carry = 0; /* add divisor back in */
751 for (j = 0; j <= den_hi_sig; j++)
753 work = num[i + j] + den[j] + carry;
754 carry = HIGHPART (work);
755 num[i + j] = LOWPART (work);
758 num [num_hi_sig] += carry;
761 /* Store the quotient digit. */
762 quo[i] = quo_est;
766 decode (quo, lquo, hquo);
768 finish_up:
769 /* If result is negative, make it so. */
770 if (quo_neg)
771 neg_double (*lquo, *hquo, lquo, hquo);
773 /* Compute trial remainder: rem = num - (quo * den) */
774 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
775 neg_double (*lrem, *hrem, lrem, hrem);
776 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
778 switch (code)
780 case TRUNC_DIV_EXPR:
781 case TRUNC_MOD_EXPR: /* round toward zero */
782 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
783 return overflow;
785 case FLOOR_DIV_EXPR:
786 case FLOOR_MOD_EXPR: /* round toward negative infinity */
787 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
789 /* quo = quo - 1; */
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
791 lquo, hquo);
793 else
794 return overflow;
795 break;
797 case CEIL_DIV_EXPR:
798 case CEIL_MOD_EXPR: /* round toward positive infinity */
799 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
801 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 lquo, hquo);
804 else
805 return overflow;
806 break;
808 case ROUND_DIV_EXPR:
809 case ROUND_MOD_EXPR: /* round to closest integer */
811 unsigned HOST_WIDE_INT labs_rem = *lrem;
812 HOST_WIDE_INT habs_rem = *hrem;
813 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
814 HOST_WIDE_INT habs_den = hden, htwice;
816 /* Get absolute values. */
817 if (*hrem < 0)
818 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
819 if (hden < 0)
820 neg_double (lden, hden, &labs_den, &habs_den);
822 /* If (2 * abs (lrem) >= abs (lden)) */
823 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
824 labs_rem, habs_rem, &ltwice, &htwice);
826 if (((unsigned HOST_WIDE_INT) habs_den
827 < (unsigned HOST_WIDE_INT) htwice)
828 || (((unsigned HOST_WIDE_INT) habs_den
829 == (unsigned HOST_WIDE_INT) htwice)
830 && (labs_den < ltwice)))
832 if (*hquo < 0)
833 /* quo = quo - 1; */
834 add_double (*lquo, *hquo,
835 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
836 else
837 /* quo = quo + 1; */
838 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
839 lquo, hquo);
841 else
842 return overflow;
844 break;
846 default:
847 gcc_unreachable ();
850 /* Compute true remainder: rem = num - (quo * den) */
851 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
852 neg_double (*lrem, *hrem, lrem, hrem);
853 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
854 return overflow;
857 /* If ARG2 divides ARG1 with zero remainder, carries out the division
858 of type CODE and returns the quotient.
859 Otherwise returns NULL_TREE. */
861 static tree
862 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
864 unsigned HOST_WIDE_INT int1l, int2l;
865 HOST_WIDE_INT int1h, int2h;
866 unsigned HOST_WIDE_INT quol, reml;
867 HOST_WIDE_INT quoh, remh;
868 tree type = TREE_TYPE (arg1);
869 int uns = TYPE_UNSIGNED (type);
871 int1l = TREE_INT_CST_LOW (arg1);
872 int1h = TREE_INT_CST_HIGH (arg1);
873 int2l = TREE_INT_CST_LOW (arg2);
874 int2h = TREE_INT_CST_HIGH (arg2);
876 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
877 &quol, &quoh, &reml, &remh);
878 if (remh != 0 || reml != 0)
879 return NULL_TREE;
881 return build_int_cst_wide (type, quol, quoh);
884 /* Return true if the built-in mathematical function specified by CODE
885 is odd, i.e. -f(x) == f(-x). */
887 static bool
888 negate_mathfn_p (enum built_in_function code)
890 switch (code)
892 CASE_FLT_FN (BUILT_IN_ASIN):
893 CASE_FLT_FN (BUILT_IN_ASINH):
894 CASE_FLT_FN (BUILT_IN_ATAN):
895 CASE_FLT_FN (BUILT_IN_ATANH):
896 CASE_FLT_FN (BUILT_IN_CBRT):
897 CASE_FLT_FN (BUILT_IN_ERF):
898 CASE_FLT_FN (BUILT_IN_LLROUND):
899 CASE_FLT_FN (BUILT_IN_LROUND):
900 CASE_FLT_FN (BUILT_IN_ROUND):
901 CASE_FLT_FN (BUILT_IN_SIN):
902 CASE_FLT_FN (BUILT_IN_SINH):
903 CASE_FLT_FN (BUILT_IN_TAN):
904 CASE_FLT_FN (BUILT_IN_TANH):
905 CASE_FLT_FN (BUILT_IN_TRUNC):
906 return true;
908 CASE_FLT_FN (BUILT_IN_LLRINT):
909 CASE_FLT_FN (BUILT_IN_LRINT):
910 CASE_FLT_FN (BUILT_IN_NEARBYINT):
911 CASE_FLT_FN (BUILT_IN_RINT):
912 return !flag_rounding_math;
914 default:
915 break;
917 return false;
920 /* Check whether we may negate an integer constant T without causing
921 overflow. */
923 bool
924 may_negate_without_overflow_p (tree t)
926 unsigned HOST_WIDE_INT val;
927 unsigned int prec;
928 tree type;
930 gcc_assert (TREE_CODE (t) == INTEGER_CST);
932 type = TREE_TYPE (t);
933 if (TYPE_UNSIGNED (type))
934 return false;
936 prec = TYPE_PRECISION (type);
937 if (prec > HOST_BITS_PER_WIDE_INT)
939 if (TREE_INT_CST_LOW (t) != 0)
940 return true;
941 prec -= HOST_BITS_PER_WIDE_INT;
942 val = TREE_INT_CST_HIGH (t);
944 else
945 val = TREE_INT_CST_LOW (t);
946 if (prec < HOST_BITS_PER_WIDE_INT)
947 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
948 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
951 /* Determine whether an expression T can be cheaply negated using
952 the function negate_expr without introducing undefined overflow. */
954 static bool
955 negate_expr_p (tree t)
957 tree type;
959 if (t == 0)
960 return false;
962 type = TREE_TYPE (t);
964 STRIP_SIGN_NOPS (t);
965 switch (TREE_CODE (t))
967 case INTEGER_CST:
968 if (TYPE_UNSIGNED (type)
969 || (flag_wrapv && ! flag_trapv))
970 return true;
972 /* Check that -CST will not overflow type. */
973 return may_negate_without_overflow_p (t);
974 case BIT_NOT_EXPR:
975 return INTEGRAL_TYPE_P (type)
976 && (TYPE_UNSIGNED (type)
977 || (flag_wrapv && !flag_trapv));
979 case REAL_CST:
980 case NEGATE_EXPR:
981 return true;
983 case COMPLEX_CST:
984 return negate_expr_p (TREE_REALPART (t))
985 && negate_expr_p (TREE_IMAGPART (t));
987 case PLUS_EXPR:
988 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
989 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
990 return false;
991 /* -(A + B) -> (-B) - A. */
992 if (negate_expr_p (TREE_OPERAND (t, 1))
993 && reorder_operands_p (TREE_OPERAND (t, 0),
994 TREE_OPERAND (t, 1)))
995 return true;
996 /* -(A + B) -> (-A) - B. */
997 return negate_expr_p (TREE_OPERAND (t, 0));
999 case MINUS_EXPR:
1000 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1001 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1002 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1003 && reorder_operands_p (TREE_OPERAND (t, 0),
1004 TREE_OPERAND (t, 1));
1006 case MULT_EXPR:
1007 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1008 break;
1010 /* Fall through. */
1012 case RDIV_EXPR:
1013 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1014 return negate_expr_p (TREE_OPERAND (t, 1))
1015 || negate_expr_p (TREE_OPERAND (t, 0));
1016 break;
1018 case TRUNC_DIV_EXPR:
1019 case ROUND_DIV_EXPR:
1020 case FLOOR_DIV_EXPR:
1021 case CEIL_DIV_EXPR:
1022 case EXACT_DIV_EXPR:
1023 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1024 break;
1025 return negate_expr_p (TREE_OPERAND (t, 1))
1026 || negate_expr_p (TREE_OPERAND (t, 0));
1028 case NOP_EXPR:
1029 /* Negate -((double)float) as (double)(-float). */
1030 if (TREE_CODE (type) == REAL_TYPE)
1032 tree tem = strip_float_extensions (t);
1033 if (tem != t)
1034 return negate_expr_p (tem);
1036 break;
1038 case CALL_EXPR:
1039 /* Negate -f(x) as f(-x). */
1040 if (negate_mathfn_p (builtin_mathfn_code (t)))
1041 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1042 break;
1044 case RSHIFT_EXPR:
1045 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1046 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1048 tree op1 = TREE_OPERAND (t, 1);
1049 if (TREE_INT_CST_HIGH (op1) == 0
1050 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1051 == TREE_INT_CST_LOW (op1))
1052 return true;
1054 break;
1056 default:
1057 break;
1059 return false;
1062 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1063 simplification is possible.
1064 If negate_expr_p would return true for T, NULL_TREE will never be
1065 returned. */
1067 static tree
1068 fold_negate_expr (tree t)
1070 tree type = TREE_TYPE (t);
1071 tree tem;
1073 switch (TREE_CODE (t))
1075 /* Convert - (~A) to A + 1. */
1076 case BIT_NOT_EXPR:
1077 if (INTEGRAL_TYPE_P (type))
1078 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1079 build_int_cst (type, 1));
1080 break;
1082 case INTEGER_CST:
1083 tem = fold_negate_const (t, type);
1084 if (! TREE_OVERFLOW (tem)
1085 || TYPE_UNSIGNED (type)
1086 || ! flag_trapv)
1087 return tem;
1088 break;
1090 case REAL_CST:
1091 tem = fold_negate_const (t, type);
1092 /* Two's complement FP formats, such as c4x, may overflow. */
1093 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1094 return tem;
1095 break;
1097 case COMPLEX_CST:
1099 tree rpart = negate_expr (TREE_REALPART (t));
1100 tree ipart = negate_expr (TREE_IMAGPART (t));
1102 if ((TREE_CODE (rpart) == REAL_CST
1103 && TREE_CODE (ipart) == REAL_CST)
1104 || (TREE_CODE (rpart) == INTEGER_CST
1105 && TREE_CODE (ipart) == INTEGER_CST))
1106 return build_complex (type, rpart, ipart);
1108 break;
1110 case NEGATE_EXPR:
1111 return TREE_OPERAND (t, 0);
1113 case PLUS_EXPR:
1114 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1115 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1117 /* -(A + B) -> (-B) - A. */
1118 if (negate_expr_p (TREE_OPERAND (t, 1))
1119 && reorder_operands_p (TREE_OPERAND (t, 0),
1120 TREE_OPERAND (t, 1)))
1122 tem = negate_expr (TREE_OPERAND (t, 1));
1123 return fold_build2 (MINUS_EXPR, type,
1124 tem, TREE_OPERAND (t, 0));
1127 /* -(A + B) -> (-A) - B. */
1128 if (negate_expr_p (TREE_OPERAND (t, 0)))
1130 tem = negate_expr (TREE_OPERAND (t, 0));
1131 return fold_build2 (MINUS_EXPR, type,
1132 tem, TREE_OPERAND (t, 1));
1135 break;
1137 case MINUS_EXPR:
1138 /* - (A - B) -> B - A */
1139 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1140 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1141 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1142 return fold_build2 (MINUS_EXPR, type,
1143 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1144 break;
1146 case MULT_EXPR:
1147 if (TYPE_UNSIGNED (type))
1148 break;
1150 /* Fall through. */
1152 case RDIV_EXPR:
1153 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1155 tem = TREE_OPERAND (t, 1);
1156 if (negate_expr_p (tem))
1157 return fold_build2 (TREE_CODE (t), type,
1158 TREE_OPERAND (t, 0), negate_expr (tem));
1159 tem = TREE_OPERAND (t, 0);
1160 if (negate_expr_p (tem))
1161 return fold_build2 (TREE_CODE (t), type,
1162 negate_expr (tem), TREE_OPERAND (t, 1));
1164 break;
1166 case TRUNC_DIV_EXPR:
1167 case ROUND_DIV_EXPR:
1168 case FLOOR_DIV_EXPR:
1169 case CEIL_DIV_EXPR:
1170 case EXACT_DIV_EXPR:
1171 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1173 tem = TREE_OPERAND (t, 1);
1174 if (negate_expr_p (tem))
1175 return fold_build2 (TREE_CODE (t), type,
1176 TREE_OPERAND (t, 0), negate_expr (tem));
1177 tem = TREE_OPERAND (t, 0);
1178 if (negate_expr_p (tem))
1179 return fold_build2 (TREE_CODE (t), type,
1180 negate_expr (tem), TREE_OPERAND (t, 1));
1182 break;
1184 case NOP_EXPR:
1185 /* Convert -((double)float) into (double)(-float). */
1186 if (TREE_CODE (type) == REAL_TYPE)
1188 tem = strip_float_extensions (t);
1189 if (tem != t && negate_expr_p (tem))
1190 return negate_expr (tem);
1192 break;
1194 case CALL_EXPR:
1195 /* Negate -f(x) as f(-x). */
1196 if (negate_mathfn_p (builtin_mathfn_code (t))
1197 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1199 tree fndecl, arg, arglist;
1201 fndecl = get_callee_fndecl (t);
1202 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1203 arglist = build_tree_list (NULL_TREE, arg);
1204 return build_function_call_expr (fndecl, arglist);
1206 break;
1208 case RSHIFT_EXPR:
1209 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1210 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1212 tree op1 = TREE_OPERAND (t, 1);
1213 if (TREE_INT_CST_HIGH (op1) == 0
1214 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1215 == TREE_INT_CST_LOW (op1))
1217 tree ntype = TYPE_UNSIGNED (type)
1218 ? lang_hooks.types.signed_type (type)
1219 : lang_hooks.types.unsigned_type (type);
1220 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1221 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1222 return fold_convert (type, temp);
1225 break;
1227 default:
1228 break;
1231 return NULL_TREE;
1234 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1235 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1236 return NULL_TREE. */
1238 static tree
1239 negate_expr (tree t)
1241 tree type, tem;
1243 if (t == NULL_TREE)
1244 return NULL_TREE;
1246 type = TREE_TYPE (t);
1247 STRIP_SIGN_NOPS (t);
1249 tem = fold_negate_expr (t);
1250 if (!tem)
1251 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1252 return fold_convert (type, tem);
1255 /* Split a tree IN into a constant, literal and variable parts that could be
1256 combined with CODE to make IN. "constant" means an expression with
1257 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1258 commutative arithmetic operation. Store the constant part into *CONP,
1259 the literal in *LITP and return the variable part. If a part isn't
1260 present, set it to null. If the tree does not decompose in this way,
1261 return the entire tree as the variable part and the other parts as null.
1263 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1264 case, we negate an operand that was subtracted. Except if it is a
1265 literal for which we use *MINUS_LITP instead.
1267 If NEGATE_P is true, we are negating all of IN, again except a literal
1268 for which we use *MINUS_LITP instead.
1270 If IN is itself a literal or constant, return it as appropriate.
1272 Note that we do not guarantee that any of the three values will be the
1273 same type as IN, but they will have the same signedness and mode. */
1275 static tree
1276 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1277 tree *minus_litp, int negate_p)
1279 tree var = 0;
1281 *conp = 0;
1282 *litp = 0;
1283 *minus_litp = 0;
1285 /* Strip any conversions that don't change the machine mode or signedness. */
1286 STRIP_SIGN_NOPS (in);
1288 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1289 *litp = in;
1290 else if (TREE_CODE (in) == code
1291 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1292 /* We can associate addition and subtraction together (even
1293 though the C standard doesn't say so) for integers because
1294 the value is not affected. For reals, the value might be
1295 affected, so we can't. */
1296 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1297 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1299 tree op0 = TREE_OPERAND (in, 0);
1300 tree op1 = TREE_OPERAND (in, 1);
1301 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1302 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1304 /* First see if either of the operands is a literal, then a constant. */
1305 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1306 *litp = op0, op0 = 0;
1307 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1308 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1310 if (op0 != 0 && TREE_CONSTANT (op0))
1311 *conp = op0, op0 = 0;
1312 else if (op1 != 0 && TREE_CONSTANT (op1))
1313 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1315 /* If we haven't dealt with either operand, this is not a case we can
1316 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1317 if (op0 != 0 && op1 != 0)
1318 var = in;
1319 else if (op0 != 0)
1320 var = op0;
1321 else
1322 var = op1, neg_var_p = neg1_p;
1324 /* Now do any needed negations. */
1325 if (neg_litp_p)
1326 *minus_litp = *litp, *litp = 0;
1327 if (neg_conp_p)
1328 *conp = negate_expr (*conp);
1329 if (neg_var_p)
1330 var = negate_expr (var);
1332 else if (TREE_CONSTANT (in))
1333 *conp = in;
1334 else
1335 var = in;
1337 if (negate_p)
1339 if (*litp)
1340 *minus_litp = *litp, *litp = 0;
1341 else if (*minus_litp)
1342 *litp = *minus_litp, *minus_litp = 0;
1343 *conp = negate_expr (*conp);
1344 var = negate_expr (var);
1347 return var;
1350 /* Re-associate trees split by the above function. T1 and T2 are either
1351 expressions to associate or null. Return the new expression, if any. If
1352 we build an operation, do it in TYPE and with CODE. */
1354 static tree
1355 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1357 if (t1 == 0)
1358 return t2;
1359 else if (t2 == 0)
1360 return t1;
1362 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1363 try to fold this since we will have infinite recursion. But do
1364 deal with any NEGATE_EXPRs. */
1365 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1366 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1368 if (code == PLUS_EXPR)
1370 if (TREE_CODE (t1) == NEGATE_EXPR)
1371 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1372 fold_convert (type, TREE_OPERAND (t1, 0)));
1373 else if (TREE_CODE (t2) == NEGATE_EXPR)
1374 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1375 fold_convert (type, TREE_OPERAND (t2, 0)));
1376 else if (integer_zerop (t2))
1377 return fold_convert (type, t1);
1379 else if (code == MINUS_EXPR)
1381 if (integer_zerop (t2))
1382 return fold_convert (type, t1);
1385 return build2 (code, type, fold_convert (type, t1),
1386 fold_convert (type, t2));
1389 return fold_build2 (code, type, fold_convert (type, t1),
1390 fold_convert (type, t2));
1393 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1394 for use in int_const_binop, size_binop and size_diffop. */
1396 static bool
1397 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1399 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1400 return false;
1401 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1402 return false;
1404 switch (code)
1406 case LSHIFT_EXPR:
1407 case RSHIFT_EXPR:
1408 case LROTATE_EXPR:
1409 case RROTATE_EXPR:
1410 return true;
1412 default:
1413 break;
1416 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1417 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1418 && TYPE_MODE (type1) == TYPE_MODE (type2);
1422 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1423 to produce a new constant. Return NULL_TREE if we don't know how
1424 to evaluate CODE at compile-time.
1426 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1428 tree
1429 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1431 unsigned HOST_WIDE_INT int1l, int2l;
1432 HOST_WIDE_INT int1h, int2h;
1433 unsigned HOST_WIDE_INT low;
1434 HOST_WIDE_INT hi;
1435 unsigned HOST_WIDE_INT garbagel;
1436 HOST_WIDE_INT garbageh;
1437 tree t;
1438 tree type = TREE_TYPE (arg1);
1439 int uns = TYPE_UNSIGNED (type);
1440 int is_sizetype
1441 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1442 int overflow = 0;
1444 int1l = TREE_INT_CST_LOW (arg1);
1445 int1h = TREE_INT_CST_HIGH (arg1);
1446 int2l = TREE_INT_CST_LOW (arg2);
1447 int2h = TREE_INT_CST_HIGH (arg2);
1449 switch (code)
1451 case BIT_IOR_EXPR:
1452 low = int1l | int2l, hi = int1h | int2h;
1453 break;
1455 case BIT_XOR_EXPR:
1456 low = int1l ^ int2l, hi = int1h ^ int2h;
1457 break;
1459 case BIT_AND_EXPR:
1460 low = int1l & int2l, hi = int1h & int2h;
1461 break;
1463 case RSHIFT_EXPR:
1464 int2l = -int2l;
1465 case LSHIFT_EXPR:
1466 /* It's unclear from the C standard whether shifts can overflow.
1467 The following code ignores overflow; perhaps a C standard
1468 interpretation ruling is needed. */
1469 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1470 &low, &hi, !uns);
1471 break;
1473 case RROTATE_EXPR:
1474 int2l = - int2l;
1475 case LROTATE_EXPR:
1476 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1477 &low, &hi);
1478 break;
1480 case PLUS_EXPR:
1481 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1482 break;
1484 case MINUS_EXPR:
1485 neg_double (int2l, int2h, &low, &hi);
1486 add_double (int1l, int1h, low, hi, &low, &hi);
1487 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1488 break;
1490 case MULT_EXPR:
1491 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1492 break;
1494 case TRUNC_DIV_EXPR:
1495 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1496 case EXACT_DIV_EXPR:
1497 /* This is a shortcut for a common special case. */
1498 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1499 && ! TREE_CONSTANT_OVERFLOW (arg1)
1500 && ! TREE_CONSTANT_OVERFLOW (arg2)
1501 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1503 if (code == CEIL_DIV_EXPR)
1504 int1l += int2l - 1;
1506 low = int1l / int2l, hi = 0;
1507 break;
1510 /* ... fall through ... */
1512 case ROUND_DIV_EXPR:
1513 if (int2h == 0 && int2l == 0)
1514 return NULL_TREE;
1515 if (int2h == 0 && int2l == 1)
1517 low = int1l, hi = int1h;
1518 break;
1520 if (int1l == int2l && int1h == int2h
1521 && ! (int1l == 0 && int1h == 0))
1523 low = 1, hi = 0;
1524 break;
1526 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1527 &low, &hi, &garbagel, &garbageh);
1528 break;
1530 case TRUNC_MOD_EXPR:
1531 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1532 /* This is a shortcut for a common special case. */
1533 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1534 && ! TREE_CONSTANT_OVERFLOW (arg1)
1535 && ! TREE_CONSTANT_OVERFLOW (arg2)
1536 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1538 if (code == CEIL_MOD_EXPR)
1539 int1l += int2l - 1;
1540 low = int1l % int2l, hi = 0;
1541 break;
1544 /* ... fall through ... */
1546 case ROUND_MOD_EXPR:
1547 if (int2h == 0 && int2l == 0)
1548 return NULL_TREE;
1549 overflow = div_and_round_double (code, uns,
1550 int1l, int1h, int2l, int2h,
1551 &garbagel, &garbageh, &low, &hi);
1552 break;
1554 case MIN_EXPR:
1555 case MAX_EXPR:
1556 if (uns)
1557 low = (((unsigned HOST_WIDE_INT) int1h
1558 < (unsigned HOST_WIDE_INT) int2h)
1559 || (((unsigned HOST_WIDE_INT) int1h
1560 == (unsigned HOST_WIDE_INT) int2h)
1561 && int1l < int2l));
1562 else
1563 low = (int1h < int2h
1564 || (int1h == int2h && int1l < int2l));
1566 if (low == (code == MIN_EXPR))
1567 low = int1l, hi = int1h;
1568 else
1569 low = int2l, hi = int2h;
1570 break;
1572 default:
1573 return NULL_TREE;
1576 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1578 if (notrunc)
1580 /* Propagate overflow flags ourselves. */
1581 if (((!uns || is_sizetype) && overflow)
1582 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1584 t = copy_node (t);
1585 TREE_OVERFLOW (t) = 1;
1586 TREE_CONSTANT_OVERFLOW (t) = 1;
1588 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1590 t = copy_node (t);
1591 TREE_CONSTANT_OVERFLOW (t) = 1;
1594 else
1595 t = force_fit_type (t, 1,
1596 ((!uns || is_sizetype) && overflow)
1597 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1598 TREE_CONSTANT_OVERFLOW (arg1)
1599 | TREE_CONSTANT_OVERFLOW (arg2));
1601 return t;
1604 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1605 constant. We assume ARG1 and ARG2 have the same data type, or at least
1606 are the same kind of constant and the same machine mode. Return zero if
1607 combining the constants is not allowed in the current operating mode.
1609 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1611 static tree
1612 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1614 /* Sanity check for the recursive cases. */
1615 if (!arg1 || !arg2)
1616 return NULL_TREE;
1618 STRIP_NOPS (arg1);
1619 STRIP_NOPS (arg2);
1621 if (TREE_CODE (arg1) == INTEGER_CST)
1622 return int_const_binop (code, arg1, arg2, notrunc);
1624 if (TREE_CODE (arg1) == REAL_CST)
1626 enum machine_mode mode;
1627 REAL_VALUE_TYPE d1;
1628 REAL_VALUE_TYPE d2;
1629 REAL_VALUE_TYPE value;
1630 REAL_VALUE_TYPE result;
1631 bool inexact;
1632 tree t, type;
1634 /* The following codes are handled by real_arithmetic. */
1635 switch (code)
1637 case PLUS_EXPR:
1638 case MINUS_EXPR:
1639 case MULT_EXPR:
1640 case RDIV_EXPR:
1641 case MIN_EXPR:
1642 case MAX_EXPR:
1643 break;
1645 default:
1646 return NULL_TREE;
1649 d1 = TREE_REAL_CST (arg1);
1650 d2 = TREE_REAL_CST (arg2);
1652 type = TREE_TYPE (arg1);
1653 mode = TYPE_MODE (type);
1655 /* Don't perform operation if we honor signaling NaNs and
1656 either operand is a NaN. */
1657 if (HONOR_SNANS (mode)
1658 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1659 return NULL_TREE;
1661 /* Don't perform operation if it would raise a division
1662 by zero exception. */
1663 if (code == RDIV_EXPR
1664 && REAL_VALUES_EQUAL (d2, dconst0)
1665 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1666 return NULL_TREE;
1668 /* If either operand is a NaN, just return it. Otherwise, set up
1669 for floating-point trap; we return an overflow. */
1670 if (REAL_VALUE_ISNAN (d1))
1671 return arg1;
1672 else if (REAL_VALUE_ISNAN (d2))
1673 return arg2;
1675 inexact = real_arithmetic (&value, code, &d1, &d2);
1676 real_convert (&result, mode, &value);
1678 /* Don't constant fold this floating point operation if
1679 the result has overflowed and flag_trapping_math. */
1680 if (flag_trapping_math
1681 && MODE_HAS_INFINITIES (mode)
1682 && REAL_VALUE_ISINF (result)
1683 && !REAL_VALUE_ISINF (d1)
1684 && !REAL_VALUE_ISINF (d2))
1685 return NULL_TREE;
1687 /* Don't constant fold this floating point operation if the
1688 result may dependent upon the run-time rounding mode and
1689 flag_rounding_math is set, or if GCC's software emulation
1690 is unable to accurately represent the result. */
1691 if ((flag_rounding_math
1692 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1693 && !flag_unsafe_math_optimizations))
1694 && (inexact || !real_identical (&result, &value)))
1695 return NULL_TREE;
1697 t = build_real (type, result);
1699 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1700 TREE_CONSTANT_OVERFLOW (t)
1701 = TREE_OVERFLOW (t)
1702 | TREE_CONSTANT_OVERFLOW (arg1)
1703 | TREE_CONSTANT_OVERFLOW (arg2);
1704 return t;
1707 if (TREE_CODE (arg1) == COMPLEX_CST)
1709 tree type = TREE_TYPE (arg1);
1710 tree r1 = TREE_REALPART (arg1);
1711 tree i1 = TREE_IMAGPART (arg1);
1712 tree r2 = TREE_REALPART (arg2);
1713 tree i2 = TREE_IMAGPART (arg2);
1714 tree real, imag;
1716 switch (code)
1718 case PLUS_EXPR:
1719 case MINUS_EXPR:
1720 real = const_binop (code, r1, r2, notrunc);
1721 imag = const_binop (code, i1, i2, notrunc);
1722 break;
1724 case MULT_EXPR:
1725 real = const_binop (MINUS_EXPR,
1726 const_binop (MULT_EXPR, r1, r2, notrunc),
1727 const_binop (MULT_EXPR, i1, i2, notrunc),
1728 notrunc);
1729 imag = const_binop (PLUS_EXPR,
1730 const_binop (MULT_EXPR, r1, i2, notrunc),
1731 const_binop (MULT_EXPR, i1, r2, notrunc),
1732 notrunc);
1733 break;
1735 case RDIV_EXPR:
1737 tree magsquared
1738 = const_binop (PLUS_EXPR,
1739 const_binop (MULT_EXPR, r2, r2, notrunc),
1740 const_binop (MULT_EXPR, i2, i2, notrunc),
1741 notrunc);
1742 tree t1
1743 = const_binop (PLUS_EXPR,
1744 const_binop (MULT_EXPR, r1, r2, notrunc),
1745 const_binop (MULT_EXPR, i1, i2, notrunc),
1746 notrunc);
1747 tree t2
1748 = const_binop (MINUS_EXPR,
1749 const_binop (MULT_EXPR, i1, r2, notrunc),
1750 const_binop (MULT_EXPR, r1, i2, notrunc),
1751 notrunc);
1753 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1754 code = TRUNC_DIV_EXPR;
1756 real = const_binop (code, t1, magsquared, notrunc);
1757 imag = const_binop (code, t2, magsquared, notrunc);
1759 break;
1761 default:
1762 return NULL_TREE;
1765 if (real && imag)
1766 return build_complex (type, real, imag);
1769 return NULL_TREE;
1772 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1773 indicates which particular sizetype to create. */
1775 tree
1776 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1778 return build_int_cst (sizetype_tab[(int) kind], number);
1781 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1782 is a tree code. The type of the result is taken from the operands.
1783 Both must be equivalent integer types, ala int_binop_types_match_p.
1784 If the operands are constant, so is the result. */
1786 tree
1787 size_binop (enum tree_code code, tree arg0, tree arg1)
1789 tree type = TREE_TYPE (arg0);
1791 if (arg0 == error_mark_node || arg1 == error_mark_node)
1792 return error_mark_node;
1794 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1795 TREE_TYPE (arg1)));
1797 /* Handle the special case of two integer constants faster. */
1798 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1800 /* And some specific cases even faster than that. */
1801 if (code == PLUS_EXPR && integer_zerop (arg0))
1802 return arg1;
1803 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1804 && integer_zerop (arg1))
1805 return arg0;
1806 else if (code == MULT_EXPR && integer_onep (arg0))
1807 return arg1;
1809 /* Handle general case of two integer constants. */
1810 return int_const_binop (code, arg0, arg1, 0);
1813 return fold_build2 (code, type, arg0, arg1);
1816 /* Given two values, either both of sizetype or both of bitsizetype,
1817 compute the difference between the two values. Return the value
1818 in signed type corresponding to the type of the operands. */
1820 tree
1821 size_diffop (tree arg0, tree arg1)
1823 tree type = TREE_TYPE (arg0);
1824 tree ctype;
1826 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1827 TREE_TYPE (arg1)));
1829 /* If the type is already signed, just do the simple thing. */
1830 if (!TYPE_UNSIGNED (type))
1831 return size_binop (MINUS_EXPR, arg0, arg1);
1833 if (type == sizetype)
1834 ctype = ssizetype;
1835 else if (type == bitsizetype)
1836 ctype = sbitsizetype;
1837 else
1838 ctype = lang_hooks.types.signed_type (type);
1840 /* If either operand is not a constant, do the conversions to the signed
1841 type and subtract. The hardware will do the right thing with any
1842 overflow in the subtraction. */
1843 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1844 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1845 fold_convert (ctype, arg1));
1847 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1848 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1849 overflow) and negate (which can't either). Special-case a result
1850 of zero while we're here. */
1851 if (tree_int_cst_equal (arg0, arg1))
1852 return build_int_cst (ctype, 0);
1853 else if (tree_int_cst_lt (arg1, arg0))
1854 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1855 else
1856 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1857 fold_convert (ctype, size_binop (MINUS_EXPR,
1858 arg1, arg0)));
1861 /* A subroutine of fold_convert_const handling conversions of an
1862 INTEGER_CST to another integer type. */
1864 static tree
1865 fold_convert_const_int_from_int (tree type, tree arg1)
1867 tree t;
1869 /* Given an integer constant, make new constant with new type,
1870 appropriately sign-extended or truncated. */
1871 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1872 TREE_INT_CST_HIGH (arg1));
1874 t = force_fit_type (t,
1875 /* Don't set the overflow when
1876 converting a pointer */
1877 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1878 (TREE_INT_CST_HIGH (arg1) < 0
1879 && (TYPE_UNSIGNED (type)
1880 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1881 | TREE_OVERFLOW (arg1),
1882 TREE_CONSTANT_OVERFLOW (arg1));
1884 return t;
1887 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1888 to an integer type. */
1890 static tree
1891 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1893 int overflow = 0;
1894 tree t;
1896 /* The following code implements the floating point to integer
1897 conversion rules required by the Java Language Specification,
1898 that IEEE NaNs are mapped to zero and values that overflow
1899 the target precision saturate, i.e. values greater than
1900 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1901 are mapped to INT_MIN. These semantics are allowed by the
1902 C and C++ standards that simply state that the behavior of
1903 FP-to-integer conversion is unspecified upon overflow. */
1905 HOST_WIDE_INT high, low;
1906 REAL_VALUE_TYPE r;
1907 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1909 switch (code)
1911 case FIX_TRUNC_EXPR:
1912 real_trunc (&r, VOIDmode, &x);
1913 break;
1915 default:
1916 gcc_unreachable ();
1919 /* If R is NaN, return zero and show we have an overflow. */
1920 if (REAL_VALUE_ISNAN (r))
1922 overflow = 1;
1923 high = 0;
1924 low = 0;
1927 /* See if R is less than the lower bound or greater than the
1928 upper bound. */
1930 if (! overflow)
1932 tree lt = TYPE_MIN_VALUE (type);
1933 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1934 if (REAL_VALUES_LESS (r, l))
1936 overflow = 1;
1937 high = TREE_INT_CST_HIGH (lt);
1938 low = TREE_INT_CST_LOW (lt);
1942 if (! overflow)
1944 tree ut = TYPE_MAX_VALUE (type);
1945 if (ut)
1947 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1948 if (REAL_VALUES_LESS (u, r))
1950 overflow = 1;
1951 high = TREE_INT_CST_HIGH (ut);
1952 low = TREE_INT_CST_LOW (ut);
1957 if (! overflow)
1958 REAL_VALUE_TO_INT (&low, &high, r);
1960 t = build_int_cst_wide (type, low, high);
1962 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1963 TREE_CONSTANT_OVERFLOW (arg1));
1964 return t;
1967 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1968 to another floating point type. */
1970 static tree
1971 fold_convert_const_real_from_real (tree type, tree arg1)
1973 REAL_VALUE_TYPE value;
1974 tree t;
1976 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1977 t = build_real (type, value);
1979 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1980 TREE_CONSTANT_OVERFLOW (t)
1981 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1982 return t;
1985 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1986 type TYPE. If no simplification can be done return NULL_TREE. */
1988 static tree
1989 fold_convert_const (enum tree_code code, tree type, tree arg1)
1991 if (TREE_TYPE (arg1) == type)
1992 return arg1;
1994 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1996 if (TREE_CODE (arg1) == INTEGER_CST)
1997 return fold_convert_const_int_from_int (type, arg1);
1998 else if (TREE_CODE (arg1) == REAL_CST)
1999 return fold_convert_const_int_from_real (code, type, arg1);
2001 else if (TREE_CODE (type) == REAL_TYPE)
2003 if (TREE_CODE (arg1) == INTEGER_CST)
2004 return build_real_from_int_cst (type, arg1);
2005 if (TREE_CODE (arg1) == REAL_CST)
2006 return fold_convert_const_real_from_real (type, arg1);
2008 return NULL_TREE;
2011 /* Construct a vector of zero elements of vector type TYPE. */
2013 static tree
2014 build_zero_vector (tree type)
2016 tree elem, list;
2017 int i, units;
2019 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2020 units = TYPE_VECTOR_SUBPARTS (type);
2022 list = NULL_TREE;
2023 for (i = 0; i < units; i++)
2024 list = tree_cons (NULL_TREE, elem, list);
2025 return build_vector (type, list);
2028 /* Convert expression ARG to type TYPE. Used by the middle-end for
2029 simple conversions in preference to calling the front-end's convert. */
2031 tree
2032 fold_convert (tree type, tree arg)
2034 tree orig = TREE_TYPE (arg);
2035 tree tem;
2037 if (type == orig)
2038 return arg;
2040 if (TREE_CODE (arg) == ERROR_MARK
2041 || TREE_CODE (type) == ERROR_MARK
2042 || TREE_CODE (orig) == ERROR_MARK)
2043 return error_mark_node;
2045 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2046 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2047 TYPE_MAIN_VARIANT (orig)))
2048 return fold_build1 (NOP_EXPR, type, arg);
2050 switch (TREE_CODE (type))
2052 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2053 case POINTER_TYPE: case REFERENCE_TYPE:
2054 case OFFSET_TYPE:
2055 if (TREE_CODE (arg) == INTEGER_CST)
2057 tem = fold_convert_const (NOP_EXPR, type, arg);
2058 if (tem != NULL_TREE)
2059 return tem;
2061 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2062 || TREE_CODE (orig) == OFFSET_TYPE)
2063 return fold_build1 (NOP_EXPR, type, arg);
2064 if (TREE_CODE (orig) == COMPLEX_TYPE)
2066 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2067 return fold_convert (type, tem);
2069 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2070 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2071 return fold_build1 (NOP_EXPR, type, arg);
2073 case REAL_TYPE:
2074 if (TREE_CODE (arg) == INTEGER_CST)
2076 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2077 if (tem != NULL_TREE)
2078 return tem;
2080 else if (TREE_CODE (arg) == REAL_CST)
2082 tem = fold_convert_const (NOP_EXPR, type, arg);
2083 if (tem != NULL_TREE)
2084 return tem;
2087 switch (TREE_CODE (orig))
2089 case INTEGER_TYPE:
2090 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2091 case POINTER_TYPE: case REFERENCE_TYPE:
2092 return fold_build1 (FLOAT_EXPR, type, arg);
2094 case REAL_TYPE:
2095 return fold_build1 (NOP_EXPR, type, arg);
2097 case COMPLEX_TYPE:
2098 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2099 return fold_convert (type, tem);
2101 default:
2102 gcc_unreachable ();
2105 case COMPLEX_TYPE:
2106 switch (TREE_CODE (orig))
2108 case INTEGER_TYPE:
2109 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2110 case POINTER_TYPE: case REFERENCE_TYPE:
2111 case REAL_TYPE:
2112 return build2 (COMPLEX_EXPR, type,
2113 fold_convert (TREE_TYPE (type), arg),
2114 fold_convert (TREE_TYPE (type), integer_zero_node));
2115 case COMPLEX_TYPE:
2117 tree rpart, ipart;
2119 if (TREE_CODE (arg) == COMPLEX_EXPR)
2121 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2122 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2123 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2126 arg = save_expr (arg);
2127 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2128 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2129 rpart = fold_convert (TREE_TYPE (type), rpart);
2130 ipart = fold_convert (TREE_TYPE (type), ipart);
2131 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2134 default:
2135 gcc_unreachable ();
2138 case VECTOR_TYPE:
2139 if (integer_zerop (arg))
2140 return build_zero_vector (type);
2141 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2142 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2143 || TREE_CODE (orig) == VECTOR_TYPE);
2144 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2146 case VOID_TYPE:
2147 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2149 default:
2150 gcc_unreachable ();
2154 /* Return false if expr can be assumed not to be an lvalue, true
2155 otherwise. */
2157 static bool
2158 maybe_lvalue_p (tree x)
2160 /* We only need to wrap lvalue tree codes. */
2161 switch (TREE_CODE (x))
2163 case VAR_DECL:
2164 case PARM_DECL:
2165 case RESULT_DECL:
2166 case LABEL_DECL:
2167 case FUNCTION_DECL:
2168 case SSA_NAME:
2170 case COMPONENT_REF:
2171 case INDIRECT_REF:
2172 case ALIGN_INDIRECT_REF:
2173 case MISALIGNED_INDIRECT_REF:
2174 case ARRAY_REF:
2175 case ARRAY_RANGE_REF:
2176 case BIT_FIELD_REF:
2177 case OBJ_TYPE_REF:
2179 case REALPART_EXPR:
2180 case IMAGPART_EXPR:
2181 case PREINCREMENT_EXPR:
2182 case PREDECREMENT_EXPR:
2183 case SAVE_EXPR:
2184 case TRY_CATCH_EXPR:
2185 case WITH_CLEANUP_EXPR:
2186 case COMPOUND_EXPR:
2187 case MODIFY_EXPR:
2188 case GIMPLE_MODIFY_STMT:
2189 case TARGET_EXPR:
2190 case COND_EXPR:
2191 case BIND_EXPR:
2192 case MIN_EXPR:
2193 case MAX_EXPR:
2194 break;
2196 default:
2197 /* Assume the worst for front-end tree codes. */
2198 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2199 break;
2200 return false;
2203 return true;
2206 /* Return an expr equal to X but certainly not valid as an lvalue. */
2208 tree
2209 non_lvalue (tree x)
2211 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2212 us. */
2213 if (in_gimple_form)
2214 return x;
2216 if (! maybe_lvalue_p (x))
2217 return x;
2218 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2221 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2222 Zero means allow extended lvalues. */
2224 int pedantic_lvalues;
2226 /* When pedantic, return an expr equal to X but certainly not valid as a
2227 pedantic lvalue. Otherwise, return X. */
2229 static tree
2230 pedantic_non_lvalue (tree x)
2232 if (pedantic_lvalues)
2233 return non_lvalue (x);
2234 else
2235 return x;
2238 /* Given a tree comparison code, return the code that is the logical inverse
2239 of the given code. It is not safe to do this for floating-point
2240 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2241 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2243 enum tree_code
2244 invert_tree_comparison (enum tree_code code, bool honor_nans)
2246 if (honor_nans && flag_trapping_math)
2247 return ERROR_MARK;
2249 switch (code)
2251 case EQ_EXPR:
2252 return NE_EXPR;
2253 case NE_EXPR:
2254 return EQ_EXPR;
2255 case GT_EXPR:
2256 return honor_nans ? UNLE_EXPR : LE_EXPR;
2257 case GE_EXPR:
2258 return honor_nans ? UNLT_EXPR : LT_EXPR;
2259 case LT_EXPR:
2260 return honor_nans ? UNGE_EXPR : GE_EXPR;
2261 case LE_EXPR:
2262 return honor_nans ? UNGT_EXPR : GT_EXPR;
2263 case LTGT_EXPR:
2264 return UNEQ_EXPR;
2265 case UNEQ_EXPR:
2266 return LTGT_EXPR;
2267 case UNGT_EXPR:
2268 return LE_EXPR;
2269 case UNGE_EXPR:
2270 return LT_EXPR;
2271 case UNLT_EXPR:
2272 return GE_EXPR;
2273 case UNLE_EXPR:
2274 return GT_EXPR;
2275 case ORDERED_EXPR:
2276 return UNORDERED_EXPR;
2277 case UNORDERED_EXPR:
2278 return ORDERED_EXPR;
2279 default:
2280 gcc_unreachable ();
2284 /* Similar, but return the comparison that results if the operands are
2285 swapped. This is safe for floating-point. */
2287 enum tree_code
2288 swap_tree_comparison (enum tree_code code)
2290 switch (code)
2292 case EQ_EXPR:
2293 case NE_EXPR:
2294 case ORDERED_EXPR:
2295 case UNORDERED_EXPR:
2296 case LTGT_EXPR:
2297 case UNEQ_EXPR:
2298 return code;
2299 case GT_EXPR:
2300 return LT_EXPR;
2301 case GE_EXPR:
2302 return LE_EXPR;
2303 case LT_EXPR:
2304 return GT_EXPR;
2305 case LE_EXPR:
2306 return GE_EXPR;
2307 case UNGT_EXPR:
2308 return UNLT_EXPR;
2309 case UNGE_EXPR:
2310 return UNLE_EXPR;
2311 case UNLT_EXPR:
2312 return UNGT_EXPR;
2313 case UNLE_EXPR:
2314 return UNGE_EXPR;
2315 default:
2316 gcc_unreachable ();
2321 /* Convert a comparison tree code from an enum tree_code representation
2322 into a compcode bit-based encoding. This function is the inverse of
2323 compcode_to_comparison. */
2325 static enum comparison_code
2326 comparison_to_compcode (enum tree_code code)
2328 switch (code)
2330 case LT_EXPR:
2331 return COMPCODE_LT;
2332 case EQ_EXPR:
2333 return COMPCODE_EQ;
2334 case LE_EXPR:
2335 return COMPCODE_LE;
2336 case GT_EXPR:
2337 return COMPCODE_GT;
2338 case NE_EXPR:
2339 return COMPCODE_NE;
2340 case GE_EXPR:
2341 return COMPCODE_GE;
2342 case ORDERED_EXPR:
2343 return COMPCODE_ORD;
2344 case UNORDERED_EXPR:
2345 return COMPCODE_UNORD;
2346 case UNLT_EXPR:
2347 return COMPCODE_UNLT;
2348 case UNEQ_EXPR:
2349 return COMPCODE_UNEQ;
2350 case UNLE_EXPR:
2351 return COMPCODE_UNLE;
2352 case UNGT_EXPR:
2353 return COMPCODE_UNGT;
2354 case LTGT_EXPR:
2355 return COMPCODE_LTGT;
2356 case UNGE_EXPR:
2357 return COMPCODE_UNGE;
2358 default:
2359 gcc_unreachable ();
2363 /* Convert a compcode bit-based encoding of a comparison operator back
2364 to GCC's enum tree_code representation. This function is the
2365 inverse of comparison_to_compcode. */
2367 static enum tree_code
2368 compcode_to_comparison (enum comparison_code code)
2370 switch (code)
2372 case COMPCODE_LT:
2373 return LT_EXPR;
2374 case COMPCODE_EQ:
2375 return EQ_EXPR;
2376 case COMPCODE_LE:
2377 return LE_EXPR;
2378 case COMPCODE_GT:
2379 return GT_EXPR;
2380 case COMPCODE_NE:
2381 return NE_EXPR;
2382 case COMPCODE_GE:
2383 return GE_EXPR;
2384 case COMPCODE_ORD:
2385 return ORDERED_EXPR;
2386 case COMPCODE_UNORD:
2387 return UNORDERED_EXPR;
2388 case COMPCODE_UNLT:
2389 return UNLT_EXPR;
2390 case COMPCODE_UNEQ:
2391 return UNEQ_EXPR;
2392 case COMPCODE_UNLE:
2393 return UNLE_EXPR;
2394 case COMPCODE_UNGT:
2395 return UNGT_EXPR;
2396 case COMPCODE_LTGT:
2397 return LTGT_EXPR;
2398 case COMPCODE_UNGE:
2399 return UNGE_EXPR;
2400 default:
2401 gcc_unreachable ();
2405 /* Return a tree for the comparison which is the combination of
2406 doing the AND or OR (depending on CODE) of the two operations LCODE
2407 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2408 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2409 if this makes the transformation invalid. */
2411 tree
2412 combine_comparisons (enum tree_code code, enum tree_code lcode,
2413 enum tree_code rcode, tree truth_type,
2414 tree ll_arg, tree lr_arg)
2416 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2417 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2418 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2419 enum comparison_code compcode;
2421 switch (code)
2423 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2424 compcode = lcompcode & rcompcode;
2425 break;
2427 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2428 compcode = lcompcode | rcompcode;
2429 break;
2431 default:
2432 return NULL_TREE;
2435 if (!honor_nans)
2437 /* Eliminate unordered comparisons, as well as LTGT and ORD
2438 which are not used unless the mode has NaNs. */
2439 compcode &= ~COMPCODE_UNORD;
2440 if (compcode == COMPCODE_LTGT)
2441 compcode = COMPCODE_NE;
2442 else if (compcode == COMPCODE_ORD)
2443 compcode = COMPCODE_TRUE;
2445 else if (flag_trapping_math)
2447 /* Check that the original operation and the optimized ones will trap
2448 under the same condition. */
2449 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2450 && (lcompcode != COMPCODE_EQ)
2451 && (lcompcode != COMPCODE_ORD);
2452 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2453 && (rcompcode != COMPCODE_EQ)
2454 && (rcompcode != COMPCODE_ORD);
2455 bool trap = (compcode & COMPCODE_UNORD) == 0
2456 && (compcode != COMPCODE_EQ)
2457 && (compcode != COMPCODE_ORD);
2459 /* In a short-circuited boolean expression the LHS might be
2460 such that the RHS, if evaluated, will never trap. For
2461 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2462 if neither x nor y is NaN. (This is a mixed blessing: for
2463 example, the expression above will never trap, hence
2464 optimizing it to x < y would be invalid). */
2465 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2466 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2467 rtrap = false;
2469 /* If the comparison was short-circuited, and only the RHS
2470 trapped, we may now generate a spurious trap. */
2471 if (rtrap && !ltrap
2472 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2473 return NULL_TREE;
2475 /* If we changed the conditions that cause a trap, we lose. */
2476 if ((ltrap || rtrap) != trap)
2477 return NULL_TREE;
2480 if (compcode == COMPCODE_TRUE)
2481 return constant_boolean_node (true, truth_type);
2482 else if (compcode == COMPCODE_FALSE)
2483 return constant_boolean_node (false, truth_type);
2484 else
2485 return fold_build2 (compcode_to_comparison (compcode),
2486 truth_type, ll_arg, lr_arg);
2489 /* Return nonzero if CODE is a tree code that represents a truth value. */
2491 static int
2492 truth_value_p (enum tree_code code)
2494 return (TREE_CODE_CLASS (code) == tcc_comparison
2495 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2496 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2497 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2500 /* Return nonzero if two operands (typically of the same tree node)
2501 are necessarily equal. If either argument has side-effects this
2502 function returns zero. FLAGS modifies behavior as follows:
2504 If OEP_ONLY_CONST is set, only return nonzero for constants.
2505 This function tests whether the operands are indistinguishable;
2506 it does not test whether they are equal using C's == operation.
2507 The distinction is important for IEEE floating point, because
2508 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2509 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2511 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2512 even though it may hold multiple values during a function.
2513 This is because a GCC tree node guarantees that nothing else is
2514 executed between the evaluation of its "operands" (which may often
2515 be evaluated in arbitrary order). Hence if the operands themselves
2516 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2517 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2518 unset means assuming isochronic (or instantaneous) tree equivalence.
2519 Unless comparing arbitrary expression trees, such as from different
2520 statements, this flag can usually be left unset.
2522 If OEP_PURE_SAME is set, then pure functions with identical arguments
2523 are considered the same. It is used when the caller has other ways
2524 to ensure that global memory is unchanged in between. */
2527 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2529 /* If either is ERROR_MARK, they aren't equal. */
2530 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2531 return 0;
2533 /* If both types don't have the same signedness, then we can't consider
2534 them equal. We must check this before the STRIP_NOPS calls
2535 because they may change the signedness of the arguments. */
2536 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2537 return 0;
2539 /* If both types don't have the same precision, then it is not safe
2540 to strip NOPs. */
2541 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2542 return 0;
2544 STRIP_NOPS (arg0);
2545 STRIP_NOPS (arg1);
2547 /* In case both args are comparisons but with different comparison
2548 code, try to swap the comparison operands of one arg to produce
2549 a match and compare that variant. */
2550 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2551 && COMPARISON_CLASS_P (arg0)
2552 && COMPARISON_CLASS_P (arg1))
2554 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2556 if (TREE_CODE (arg0) == swap_code)
2557 return operand_equal_p (TREE_OPERAND (arg0, 0),
2558 TREE_OPERAND (arg1, 1), flags)
2559 && operand_equal_p (TREE_OPERAND (arg0, 1),
2560 TREE_OPERAND (arg1, 0), flags);
2563 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2564 /* This is needed for conversions and for COMPONENT_REF.
2565 Might as well play it safe and always test this. */
2566 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2567 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2568 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2569 return 0;
2571 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2572 We don't care about side effects in that case because the SAVE_EXPR
2573 takes care of that for us. In all other cases, two expressions are
2574 equal if they have no side effects. If we have two identical
2575 expressions with side effects that should be treated the same due
2576 to the only side effects being identical SAVE_EXPR's, that will
2577 be detected in the recursive calls below. */
2578 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2579 && (TREE_CODE (arg0) == SAVE_EXPR
2580 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2581 return 1;
2583 /* Next handle constant cases, those for which we can return 1 even
2584 if ONLY_CONST is set. */
2585 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2586 switch (TREE_CODE (arg0))
2588 case INTEGER_CST:
2589 return tree_int_cst_equal (arg0, arg1);
2591 case REAL_CST:
2592 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2593 TREE_REAL_CST (arg1)))
2594 return 1;
2597 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2599 /* If we do not distinguish between signed and unsigned zero,
2600 consider them equal. */
2601 if (real_zerop (arg0) && real_zerop (arg1))
2602 return 1;
2604 return 0;
2606 case VECTOR_CST:
2608 tree v1, v2;
2610 v1 = TREE_VECTOR_CST_ELTS (arg0);
2611 v2 = TREE_VECTOR_CST_ELTS (arg1);
2612 while (v1 && v2)
2614 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2615 flags))
2616 return 0;
2617 v1 = TREE_CHAIN (v1);
2618 v2 = TREE_CHAIN (v2);
2621 return v1 == v2;
2624 case COMPLEX_CST:
2625 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2626 flags)
2627 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2628 flags));
2630 case STRING_CST:
2631 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2632 && ! memcmp (TREE_STRING_POINTER (arg0),
2633 TREE_STRING_POINTER (arg1),
2634 TREE_STRING_LENGTH (arg0)));
2636 case ADDR_EXPR:
2637 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2639 default:
2640 break;
2643 if (flags & OEP_ONLY_CONST)
2644 return 0;
2646 /* Define macros to test an operand from arg0 and arg1 for equality and a
2647 variant that allows null and views null as being different from any
2648 non-null value. In the latter case, if either is null, the both
2649 must be; otherwise, do the normal comparison. */
2650 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2651 TREE_OPERAND (arg1, N), flags)
2653 #define OP_SAME_WITH_NULL(N) \
2654 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2655 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2657 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2659 case tcc_unary:
2660 /* Two conversions are equal only if signedness and modes match. */
2661 switch (TREE_CODE (arg0))
2663 case NOP_EXPR:
2664 case CONVERT_EXPR:
2665 case FIX_TRUNC_EXPR:
2666 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2667 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2668 return 0;
2669 break;
2670 default:
2671 break;
2674 return OP_SAME (0);
2677 case tcc_comparison:
2678 case tcc_binary:
2679 if (OP_SAME (0) && OP_SAME (1))
2680 return 1;
2682 /* For commutative ops, allow the other order. */
2683 return (commutative_tree_code (TREE_CODE (arg0))
2684 && operand_equal_p (TREE_OPERAND (arg0, 0),
2685 TREE_OPERAND (arg1, 1), flags)
2686 && operand_equal_p (TREE_OPERAND (arg0, 1),
2687 TREE_OPERAND (arg1, 0), flags));
2689 case tcc_reference:
2690 /* If either of the pointer (or reference) expressions we are
2691 dereferencing contain a side effect, these cannot be equal. */
2692 if (TREE_SIDE_EFFECTS (arg0)
2693 || TREE_SIDE_EFFECTS (arg1))
2694 return 0;
2696 switch (TREE_CODE (arg0))
2698 case INDIRECT_REF:
2699 case ALIGN_INDIRECT_REF:
2700 case MISALIGNED_INDIRECT_REF:
2701 case REALPART_EXPR:
2702 case IMAGPART_EXPR:
2703 return OP_SAME (0);
2705 case ARRAY_REF:
2706 case ARRAY_RANGE_REF:
2707 /* Operands 2 and 3 may be null. */
2708 return (OP_SAME (0)
2709 && OP_SAME (1)
2710 && OP_SAME_WITH_NULL (2)
2711 && OP_SAME_WITH_NULL (3));
2713 case COMPONENT_REF:
2714 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2715 may be NULL when we're called to compare MEM_EXPRs. */
2716 return OP_SAME_WITH_NULL (0)
2717 && OP_SAME (1)
2718 && OP_SAME_WITH_NULL (2);
2720 case BIT_FIELD_REF:
2721 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2723 default:
2724 return 0;
2727 case tcc_expression:
2728 switch (TREE_CODE (arg0))
2730 case ADDR_EXPR:
2731 case TRUTH_NOT_EXPR:
2732 return OP_SAME (0);
2734 case TRUTH_ANDIF_EXPR:
2735 case TRUTH_ORIF_EXPR:
2736 return OP_SAME (0) && OP_SAME (1);
2738 case TRUTH_AND_EXPR:
2739 case TRUTH_OR_EXPR:
2740 case TRUTH_XOR_EXPR:
2741 if (OP_SAME (0) && OP_SAME (1))
2742 return 1;
2744 /* Otherwise take into account this is a commutative operation. */
2745 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2746 TREE_OPERAND (arg1, 1), flags)
2747 && operand_equal_p (TREE_OPERAND (arg0, 1),
2748 TREE_OPERAND (arg1, 0), flags));
2750 case CALL_EXPR:
2751 /* If the CALL_EXPRs call different functions, then they
2752 clearly can not be equal. */
2753 if (!OP_SAME (0))
2754 return 0;
2757 unsigned int cef = call_expr_flags (arg0);
2758 if (flags & OEP_PURE_SAME)
2759 cef &= ECF_CONST | ECF_PURE;
2760 else
2761 cef &= ECF_CONST;
2762 if (!cef)
2763 return 0;
2766 /* Now see if all the arguments are the same. operand_equal_p
2767 does not handle TREE_LIST, so we walk the operands here
2768 feeding them to operand_equal_p. */
2769 arg0 = TREE_OPERAND (arg0, 1);
2770 arg1 = TREE_OPERAND (arg1, 1);
2771 while (arg0 && arg1)
2773 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2774 flags))
2775 return 0;
2777 arg0 = TREE_CHAIN (arg0);
2778 arg1 = TREE_CHAIN (arg1);
2781 /* If we get here and both argument lists are exhausted
2782 then the CALL_EXPRs are equal. */
2783 return ! (arg0 || arg1);
2785 default:
2786 return 0;
2789 case tcc_declaration:
2790 /* Consider __builtin_sqrt equal to sqrt. */
2791 return (TREE_CODE (arg0) == FUNCTION_DECL
2792 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2793 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2794 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2796 default:
2797 return 0;
2800 #undef OP_SAME
2801 #undef OP_SAME_WITH_NULL
2804 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2805 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2807 When in doubt, return 0. */
2809 static int
2810 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2812 int unsignedp1, unsignedpo;
2813 tree primarg0, primarg1, primother;
2814 unsigned int correct_width;
2816 if (operand_equal_p (arg0, arg1, 0))
2817 return 1;
2819 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2820 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2821 return 0;
2823 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2824 and see if the inner values are the same. This removes any
2825 signedness comparison, which doesn't matter here. */
2826 primarg0 = arg0, primarg1 = arg1;
2827 STRIP_NOPS (primarg0);
2828 STRIP_NOPS (primarg1);
2829 if (operand_equal_p (primarg0, primarg1, 0))
2830 return 1;
2832 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2833 actual comparison operand, ARG0.
2835 First throw away any conversions to wider types
2836 already present in the operands. */
2838 primarg1 = get_narrower (arg1, &unsignedp1);
2839 primother = get_narrower (other, &unsignedpo);
2841 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2842 if (unsignedp1 == unsignedpo
2843 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2844 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2846 tree type = TREE_TYPE (arg0);
2848 /* Make sure shorter operand is extended the right way
2849 to match the longer operand. */
2850 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2851 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2853 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2854 return 1;
2857 return 0;
2860 /* See if ARG is an expression that is either a comparison or is performing
2861 arithmetic on comparisons. The comparisons must only be comparing
2862 two different values, which will be stored in *CVAL1 and *CVAL2; if
2863 they are nonzero it means that some operands have already been found.
2864 No variables may be used anywhere else in the expression except in the
2865 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2866 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2868 If this is true, return 1. Otherwise, return zero. */
2870 static int
2871 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2873 enum tree_code code = TREE_CODE (arg);
2874 enum tree_code_class class = TREE_CODE_CLASS (code);
2876 /* We can handle some of the tcc_expression cases here. */
2877 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2878 class = tcc_unary;
2879 else if (class == tcc_expression
2880 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2881 || code == COMPOUND_EXPR))
2882 class = tcc_binary;
2884 else if (class == tcc_expression && code == SAVE_EXPR
2885 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2887 /* If we've already found a CVAL1 or CVAL2, this expression is
2888 two complex to handle. */
2889 if (*cval1 || *cval2)
2890 return 0;
2892 class = tcc_unary;
2893 *save_p = 1;
2896 switch (class)
2898 case tcc_unary:
2899 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2901 case tcc_binary:
2902 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2903 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2904 cval1, cval2, save_p));
2906 case tcc_constant:
2907 return 1;
2909 case tcc_expression:
2910 if (code == COND_EXPR)
2911 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2912 cval1, cval2, save_p)
2913 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2914 cval1, cval2, save_p)
2915 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2916 cval1, cval2, save_p));
2917 return 0;
2919 case tcc_comparison:
2920 /* First see if we can handle the first operand, then the second. For
2921 the second operand, we know *CVAL1 can't be zero. It must be that
2922 one side of the comparison is each of the values; test for the
2923 case where this isn't true by failing if the two operands
2924 are the same. */
2926 if (operand_equal_p (TREE_OPERAND (arg, 0),
2927 TREE_OPERAND (arg, 1), 0))
2928 return 0;
2930 if (*cval1 == 0)
2931 *cval1 = TREE_OPERAND (arg, 0);
2932 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2934 else if (*cval2 == 0)
2935 *cval2 = TREE_OPERAND (arg, 0);
2936 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2938 else
2939 return 0;
2941 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2943 else if (*cval2 == 0)
2944 *cval2 = TREE_OPERAND (arg, 1);
2945 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2947 else
2948 return 0;
2950 return 1;
2952 default:
2953 return 0;
2957 /* ARG is a tree that is known to contain just arithmetic operations and
2958 comparisons. Evaluate the operations in the tree substituting NEW0 for
2959 any occurrence of OLD0 as an operand of a comparison and likewise for
2960 NEW1 and OLD1. */
2962 static tree
2963 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2965 tree type = TREE_TYPE (arg);
2966 enum tree_code code = TREE_CODE (arg);
2967 enum tree_code_class class = TREE_CODE_CLASS (code);
2969 /* We can handle some of the tcc_expression cases here. */
2970 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2971 class = tcc_unary;
2972 else if (class == tcc_expression
2973 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2974 class = tcc_binary;
2976 switch (class)
2978 case tcc_unary:
2979 return fold_build1 (code, type,
2980 eval_subst (TREE_OPERAND (arg, 0),
2981 old0, new0, old1, new1));
2983 case tcc_binary:
2984 return fold_build2 (code, type,
2985 eval_subst (TREE_OPERAND (arg, 0),
2986 old0, new0, old1, new1),
2987 eval_subst (TREE_OPERAND (arg, 1),
2988 old0, new0, old1, new1));
2990 case tcc_expression:
2991 switch (code)
2993 case SAVE_EXPR:
2994 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2996 case COMPOUND_EXPR:
2997 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2999 case COND_EXPR:
3000 return fold_build3 (code, type,
3001 eval_subst (TREE_OPERAND (arg, 0),
3002 old0, new0, old1, new1),
3003 eval_subst (TREE_OPERAND (arg, 1),
3004 old0, new0, old1, new1),
3005 eval_subst (TREE_OPERAND (arg, 2),
3006 old0, new0, old1, new1));
3007 default:
3008 break;
3010 /* Fall through - ??? */
3012 case tcc_comparison:
3014 tree arg0 = TREE_OPERAND (arg, 0);
3015 tree arg1 = TREE_OPERAND (arg, 1);
3017 /* We need to check both for exact equality and tree equality. The
3018 former will be true if the operand has a side-effect. In that
3019 case, we know the operand occurred exactly once. */
3021 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3022 arg0 = new0;
3023 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3024 arg0 = new1;
3026 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3027 arg1 = new0;
3028 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3029 arg1 = new1;
3031 return fold_build2 (code, type, arg0, arg1);
3034 default:
3035 return arg;
3039 /* Return a tree for the case when the result of an expression is RESULT
3040 converted to TYPE and OMITTED was previously an operand of the expression
3041 but is now not needed (e.g., we folded OMITTED * 0).
3043 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3044 the conversion of RESULT to TYPE. */
3046 tree
3047 omit_one_operand (tree type, tree result, tree omitted)
3049 tree t = fold_convert (type, result);
3051 if (TREE_SIDE_EFFECTS (omitted))
3052 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3054 return non_lvalue (t);
3057 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3059 static tree
3060 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3062 tree t = fold_convert (type, result);
3064 if (TREE_SIDE_EFFECTS (omitted))
3065 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3067 return pedantic_non_lvalue (t);
3070 /* Return a tree for the case when the result of an expression is RESULT
3071 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3072 of the expression but are now not needed.
3074 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3075 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3076 evaluated before OMITTED2. Otherwise, if neither has side effects,
3077 just do the conversion of RESULT to TYPE. */
3079 tree
3080 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3082 tree t = fold_convert (type, result);
3084 if (TREE_SIDE_EFFECTS (omitted2))
3085 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3086 if (TREE_SIDE_EFFECTS (omitted1))
3087 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3089 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3093 /* Return a simplified tree node for the truth-negation of ARG. This
3094 never alters ARG itself. We assume that ARG is an operation that
3095 returns a truth value (0 or 1).
3097 FIXME: one would think we would fold the result, but it causes
3098 problems with the dominator optimizer. */
3100 tree
3101 fold_truth_not_expr (tree arg)
3103 tree type = TREE_TYPE (arg);
3104 enum tree_code code = TREE_CODE (arg);
3106 /* If this is a comparison, we can simply invert it, except for
3107 floating-point non-equality comparisons, in which case we just
3108 enclose a TRUTH_NOT_EXPR around what we have. */
3110 if (TREE_CODE_CLASS (code) == tcc_comparison)
3112 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3113 if (FLOAT_TYPE_P (op_type)
3114 && flag_trapping_math
3115 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3116 && code != NE_EXPR && code != EQ_EXPR)
3117 return NULL_TREE;
3118 else
3120 code = invert_tree_comparison (code,
3121 HONOR_NANS (TYPE_MODE (op_type)));
3122 if (code == ERROR_MARK)
3123 return NULL_TREE;
3124 else
3125 return build2 (code, type,
3126 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3130 switch (code)
3132 case INTEGER_CST:
3133 return constant_boolean_node (integer_zerop (arg), type);
3135 case TRUTH_AND_EXPR:
3136 return build2 (TRUTH_OR_EXPR, type,
3137 invert_truthvalue (TREE_OPERAND (arg, 0)),
3138 invert_truthvalue (TREE_OPERAND (arg, 1)));
3140 case TRUTH_OR_EXPR:
3141 return build2 (TRUTH_AND_EXPR, type,
3142 invert_truthvalue (TREE_OPERAND (arg, 0)),
3143 invert_truthvalue (TREE_OPERAND (arg, 1)));
3145 case TRUTH_XOR_EXPR:
3146 /* Here we can invert either operand. We invert the first operand
3147 unless the second operand is a TRUTH_NOT_EXPR in which case our
3148 result is the XOR of the first operand with the inside of the
3149 negation of the second operand. */
3151 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3152 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3153 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3154 else
3155 return build2 (TRUTH_XOR_EXPR, type,
3156 invert_truthvalue (TREE_OPERAND (arg, 0)),
3157 TREE_OPERAND (arg, 1));
3159 case TRUTH_ANDIF_EXPR:
3160 return build2 (TRUTH_ORIF_EXPR, type,
3161 invert_truthvalue (TREE_OPERAND (arg, 0)),
3162 invert_truthvalue (TREE_OPERAND (arg, 1)));
3164 case TRUTH_ORIF_EXPR:
3165 return build2 (TRUTH_ANDIF_EXPR, type,
3166 invert_truthvalue (TREE_OPERAND (arg, 0)),
3167 invert_truthvalue (TREE_OPERAND (arg, 1)));
3169 case TRUTH_NOT_EXPR:
3170 return TREE_OPERAND (arg, 0);
3172 case COND_EXPR:
3174 tree arg1 = TREE_OPERAND (arg, 1);
3175 tree arg2 = TREE_OPERAND (arg, 2);
3176 /* A COND_EXPR may have a throw as one operand, which
3177 then has void type. Just leave void operands
3178 as they are. */
3179 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3180 VOID_TYPE_P (TREE_TYPE (arg1))
3181 ? arg1 : invert_truthvalue (arg1),
3182 VOID_TYPE_P (TREE_TYPE (arg2))
3183 ? arg2 : invert_truthvalue (arg2));
3186 case COMPOUND_EXPR:
3187 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3188 invert_truthvalue (TREE_OPERAND (arg, 1)));
3190 case NON_LVALUE_EXPR:
3191 return invert_truthvalue (TREE_OPERAND (arg, 0));
3193 case NOP_EXPR:
3194 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3195 return build1 (TRUTH_NOT_EXPR, type, arg);
3197 case CONVERT_EXPR:
3198 case FLOAT_EXPR:
3199 return build1 (TREE_CODE (arg), type,
3200 invert_truthvalue (TREE_OPERAND (arg, 0)));
3202 case BIT_AND_EXPR:
3203 if (!integer_onep (TREE_OPERAND (arg, 1)))
3204 break;
3205 return build2 (EQ_EXPR, type, arg,
3206 build_int_cst (type, 0));
3208 case SAVE_EXPR:
3209 return build1 (TRUTH_NOT_EXPR, type, arg);
3211 case CLEANUP_POINT_EXPR:
3212 return build1 (CLEANUP_POINT_EXPR, type,
3213 invert_truthvalue (TREE_OPERAND (arg, 0)));
3215 default:
3216 break;
3219 return NULL_TREE;
3222 /* Return a simplified tree node for the truth-negation of ARG. This
3223 never alters ARG itself. We assume that ARG is an operation that
3224 returns a truth value (0 or 1).
3226 FIXME: one would think we would fold the result, but it causes
3227 problems with the dominator optimizer. */
3229 tree
3230 invert_truthvalue (tree arg)
3232 tree tem;
3234 if (TREE_CODE (arg) == ERROR_MARK)
3235 return arg;
3237 tem = fold_truth_not_expr (arg);
3238 if (!tem)
3239 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3241 return tem;
3244 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3245 operands are another bit-wise operation with a common input. If so,
3246 distribute the bit operations to save an operation and possibly two if
3247 constants are involved. For example, convert
3248 (A | B) & (A | C) into A | (B & C)
3249 Further simplification will occur if B and C are constants.
3251 If this optimization cannot be done, 0 will be returned. */
3253 static tree
3254 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3256 tree common;
3257 tree left, right;
3259 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3260 || TREE_CODE (arg0) == code
3261 || (TREE_CODE (arg0) != BIT_AND_EXPR
3262 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3263 return 0;
3265 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3267 common = TREE_OPERAND (arg0, 0);
3268 left = TREE_OPERAND (arg0, 1);
3269 right = TREE_OPERAND (arg1, 1);
3271 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3273 common = TREE_OPERAND (arg0, 0);
3274 left = TREE_OPERAND (arg0, 1);
3275 right = TREE_OPERAND (arg1, 0);
3277 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3279 common = TREE_OPERAND (arg0, 1);
3280 left = TREE_OPERAND (arg0, 0);
3281 right = TREE_OPERAND (arg1, 1);
3283 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3285 common = TREE_OPERAND (arg0, 1);
3286 left = TREE_OPERAND (arg0, 0);
3287 right = TREE_OPERAND (arg1, 0);
3289 else
3290 return 0;
3292 return fold_build2 (TREE_CODE (arg0), type, common,
3293 fold_build2 (code, type, left, right));
3296 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3297 with code CODE. This optimization is unsafe. */
3298 static tree
3299 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3301 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3302 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3304 /* (A / C) +- (B / C) -> (A +- B) / C. */
3305 if (mul0 == mul1
3306 && operand_equal_p (TREE_OPERAND (arg0, 1),
3307 TREE_OPERAND (arg1, 1), 0))
3308 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3309 fold_build2 (code, type,
3310 TREE_OPERAND (arg0, 0),
3311 TREE_OPERAND (arg1, 0)),
3312 TREE_OPERAND (arg0, 1));
3314 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3315 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3316 TREE_OPERAND (arg1, 0), 0)
3317 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3318 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3320 REAL_VALUE_TYPE r0, r1;
3321 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3322 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3323 if (!mul0)
3324 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3325 if (!mul1)
3326 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3327 real_arithmetic (&r0, code, &r0, &r1);
3328 return fold_build2 (MULT_EXPR, type,
3329 TREE_OPERAND (arg0, 0),
3330 build_real (type, r0));
3333 return NULL_TREE;
3336 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3337 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3339 static tree
3340 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3341 int unsignedp)
3343 tree result;
3345 if (bitpos == 0)
3347 tree size = TYPE_SIZE (TREE_TYPE (inner));
3348 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3349 || POINTER_TYPE_P (TREE_TYPE (inner)))
3350 && host_integerp (size, 0)
3351 && tree_low_cst (size, 0) == bitsize)
3352 return fold_convert (type, inner);
3355 result = build3 (BIT_FIELD_REF, type, inner,
3356 size_int (bitsize), bitsize_int (bitpos));
3358 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3360 return result;
3363 /* Optimize a bit-field compare.
3365 There are two cases: First is a compare against a constant and the
3366 second is a comparison of two items where the fields are at the same
3367 bit position relative to the start of a chunk (byte, halfword, word)
3368 large enough to contain it. In these cases we can avoid the shift
3369 implicit in bitfield extractions.
3371 For constants, we emit a compare of the shifted constant with the
3372 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3373 compared. For two fields at the same position, we do the ANDs with the
3374 similar mask and compare the result of the ANDs.
3376 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3377 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3378 are the left and right operands of the comparison, respectively.
3380 If the optimization described above can be done, we return the resulting
3381 tree. Otherwise we return zero. */
3383 static tree
3384 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3385 tree lhs, tree rhs)
3387 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3388 tree type = TREE_TYPE (lhs);
3389 tree signed_type, unsigned_type;
3390 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3391 enum machine_mode lmode, rmode, nmode;
3392 int lunsignedp, runsignedp;
3393 int lvolatilep = 0, rvolatilep = 0;
3394 tree linner, rinner = NULL_TREE;
3395 tree mask;
3396 tree offset;
3398 /* Get all the information about the extractions being done. If the bit size
3399 if the same as the size of the underlying object, we aren't doing an
3400 extraction at all and so can do nothing. We also don't want to
3401 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3402 then will no longer be able to replace it. */
3403 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3404 &lunsignedp, &lvolatilep, false);
3405 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3406 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3407 return 0;
3409 if (!const_p)
3411 /* If this is not a constant, we can only do something if bit positions,
3412 sizes, and signedness are the same. */
3413 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3414 &runsignedp, &rvolatilep, false);
3416 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3417 || lunsignedp != runsignedp || offset != 0
3418 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3419 return 0;
3422 /* See if we can find a mode to refer to this field. We should be able to,
3423 but fail if we can't. */
3424 nmode = get_best_mode (lbitsize, lbitpos,
3425 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3426 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3427 TYPE_ALIGN (TREE_TYPE (rinner))),
3428 word_mode, lvolatilep || rvolatilep);
3429 if (nmode == VOIDmode)
3430 return 0;
3432 /* Set signed and unsigned types of the precision of this mode for the
3433 shifts below. */
3434 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3435 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3437 /* Compute the bit position and size for the new reference and our offset
3438 within it. If the new reference is the same size as the original, we
3439 won't optimize anything, so return zero. */
3440 nbitsize = GET_MODE_BITSIZE (nmode);
3441 nbitpos = lbitpos & ~ (nbitsize - 1);
3442 lbitpos -= nbitpos;
3443 if (nbitsize == lbitsize)
3444 return 0;
3446 if (BYTES_BIG_ENDIAN)
3447 lbitpos = nbitsize - lbitsize - lbitpos;
3449 /* Make the mask to be used against the extracted field. */
3450 mask = build_int_cst (unsigned_type, -1);
3451 mask = force_fit_type (mask, 0, false, false);
3452 mask = fold_convert (unsigned_type, mask);
3453 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3454 mask = const_binop (RSHIFT_EXPR, mask,
3455 size_int (nbitsize - lbitsize - lbitpos), 0);
3457 if (! const_p)
3458 /* If not comparing with constant, just rework the comparison
3459 and return. */
3460 return fold_build2 (code, compare_type,
3461 fold_build2 (BIT_AND_EXPR, unsigned_type,
3462 make_bit_field_ref (linner,
3463 unsigned_type,
3464 nbitsize, nbitpos,
3466 mask),
3467 fold_build2 (BIT_AND_EXPR, unsigned_type,
3468 make_bit_field_ref (rinner,
3469 unsigned_type,
3470 nbitsize, nbitpos,
3472 mask));
3474 /* Otherwise, we are handling the constant case. See if the constant is too
3475 big for the field. Warn and return a tree of for 0 (false) if so. We do
3476 this not only for its own sake, but to avoid having to test for this
3477 error case below. If we didn't, we might generate wrong code.
3479 For unsigned fields, the constant shifted right by the field length should
3480 be all zero. For signed fields, the high-order bits should agree with
3481 the sign bit. */
3483 if (lunsignedp)
3485 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3486 fold_convert (unsigned_type, rhs),
3487 size_int (lbitsize), 0)))
3489 warning (0, "comparison is always %d due to width of bit-field",
3490 code == NE_EXPR);
3491 return constant_boolean_node (code == NE_EXPR, compare_type);
3494 else
3496 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3497 size_int (lbitsize - 1), 0);
3498 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3500 warning (0, "comparison is always %d due to width of bit-field",
3501 code == NE_EXPR);
3502 return constant_boolean_node (code == NE_EXPR, compare_type);
3506 /* Single-bit compares should always be against zero. */
3507 if (lbitsize == 1 && ! integer_zerop (rhs))
3509 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3510 rhs = build_int_cst (type, 0);
3513 /* Make a new bitfield reference, shift the constant over the
3514 appropriate number of bits and mask it with the computed mask
3515 (in case this was a signed field). If we changed it, make a new one. */
3516 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3517 if (lvolatilep)
3519 TREE_SIDE_EFFECTS (lhs) = 1;
3520 TREE_THIS_VOLATILE (lhs) = 1;
3523 rhs = const_binop (BIT_AND_EXPR,
3524 const_binop (LSHIFT_EXPR,
3525 fold_convert (unsigned_type, rhs),
3526 size_int (lbitpos), 0),
3527 mask, 0);
3529 return build2 (code, compare_type,
3530 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3531 rhs);
3534 /* Subroutine for fold_truthop: decode a field reference.
3536 If EXP is a comparison reference, we return the innermost reference.
3538 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3539 set to the starting bit number.
3541 If the innermost field can be completely contained in a mode-sized
3542 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3544 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3545 otherwise it is not changed.
3547 *PUNSIGNEDP is set to the signedness of the field.
3549 *PMASK is set to the mask used. This is either contained in a
3550 BIT_AND_EXPR or derived from the width of the field.
3552 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3554 Return 0 if this is not a component reference or is one that we can't
3555 do anything with. */
3557 static tree
3558 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3559 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3560 int *punsignedp, int *pvolatilep,
3561 tree *pmask, tree *pand_mask)
3563 tree outer_type = 0;
3564 tree and_mask = 0;
3565 tree mask, inner, offset;
3566 tree unsigned_type;
3567 unsigned int precision;
3569 /* All the optimizations using this function assume integer fields.
3570 There are problems with FP fields since the type_for_size call
3571 below can fail for, e.g., XFmode. */
3572 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3573 return 0;
3575 /* We are interested in the bare arrangement of bits, so strip everything
3576 that doesn't affect the machine mode. However, record the type of the
3577 outermost expression if it may matter below. */
3578 if (TREE_CODE (exp) == NOP_EXPR
3579 || TREE_CODE (exp) == CONVERT_EXPR
3580 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3581 outer_type = TREE_TYPE (exp);
3582 STRIP_NOPS (exp);
3584 if (TREE_CODE (exp) == BIT_AND_EXPR)
3586 and_mask = TREE_OPERAND (exp, 1);
3587 exp = TREE_OPERAND (exp, 0);
3588 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3589 if (TREE_CODE (and_mask) != INTEGER_CST)
3590 return 0;
3593 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3594 punsignedp, pvolatilep, false);
3595 if ((inner == exp && and_mask == 0)
3596 || *pbitsize < 0 || offset != 0
3597 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3598 return 0;
3600 /* If the number of bits in the reference is the same as the bitsize of
3601 the outer type, then the outer type gives the signedness. Otherwise
3602 (in case of a small bitfield) the signedness is unchanged. */
3603 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3604 *punsignedp = TYPE_UNSIGNED (outer_type);
3606 /* Compute the mask to access the bitfield. */
3607 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3608 precision = TYPE_PRECISION (unsigned_type);
3610 mask = build_int_cst (unsigned_type, -1);
3611 mask = force_fit_type (mask, 0, false, false);
3613 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3614 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3616 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3617 if (and_mask != 0)
3618 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3619 fold_convert (unsigned_type, and_mask), mask);
3621 *pmask = mask;
3622 *pand_mask = and_mask;
3623 return inner;
3626 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3627 bit positions. */
3629 static int
3630 all_ones_mask_p (tree mask, int size)
3632 tree type = TREE_TYPE (mask);
3633 unsigned int precision = TYPE_PRECISION (type);
3634 tree tmask;
3636 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3637 tmask = force_fit_type (tmask, 0, false, false);
3639 return
3640 tree_int_cst_equal (mask,
3641 const_binop (RSHIFT_EXPR,
3642 const_binop (LSHIFT_EXPR, tmask,
3643 size_int (precision - size),
3645 size_int (precision - size), 0));
3648 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3649 represents the sign bit of EXP's type. If EXP represents a sign
3650 or zero extension, also test VAL against the unextended type.
3651 The return value is the (sub)expression whose sign bit is VAL,
3652 or NULL_TREE otherwise. */
3654 static tree
3655 sign_bit_p (tree exp, tree val)
3657 unsigned HOST_WIDE_INT mask_lo, lo;
3658 HOST_WIDE_INT mask_hi, hi;
3659 int width;
3660 tree t;
3662 /* Tree EXP must have an integral type. */
3663 t = TREE_TYPE (exp);
3664 if (! INTEGRAL_TYPE_P (t))
3665 return NULL_TREE;
3667 /* Tree VAL must be an integer constant. */
3668 if (TREE_CODE (val) != INTEGER_CST
3669 || TREE_CONSTANT_OVERFLOW (val))
3670 return NULL_TREE;
3672 width = TYPE_PRECISION (t);
3673 if (width > HOST_BITS_PER_WIDE_INT)
3675 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3676 lo = 0;
3678 mask_hi = ((unsigned HOST_WIDE_INT) -1
3679 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3680 mask_lo = -1;
3682 else
3684 hi = 0;
3685 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3687 mask_hi = 0;
3688 mask_lo = ((unsigned HOST_WIDE_INT) -1
3689 >> (HOST_BITS_PER_WIDE_INT - width));
3692 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3693 treat VAL as if it were unsigned. */
3694 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3695 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3696 return exp;
3698 /* Handle extension from a narrower type. */
3699 if (TREE_CODE (exp) == NOP_EXPR
3700 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3701 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3703 return NULL_TREE;
3706 /* Subroutine for fold_truthop: determine if an operand is simple enough
3707 to be evaluated unconditionally. */
3709 static int
3710 simple_operand_p (tree exp)
3712 /* Strip any conversions that don't change the machine mode. */
3713 STRIP_NOPS (exp);
3715 return (CONSTANT_CLASS_P (exp)
3716 || TREE_CODE (exp) == SSA_NAME
3717 || (DECL_P (exp)
3718 && ! TREE_ADDRESSABLE (exp)
3719 && ! TREE_THIS_VOLATILE (exp)
3720 && ! DECL_NONLOCAL (exp)
3721 /* Don't regard global variables as simple. They may be
3722 allocated in ways unknown to the compiler (shared memory,
3723 #pragma weak, etc). */
3724 && ! TREE_PUBLIC (exp)
3725 && ! DECL_EXTERNAL (exp)
3726 /* Loading a static variable is unduly expensive, but global
3727 registers aren't expensive. */
3728 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3731 /* The following functions are subroutines to fold_range_test and allow it to
3732 try to change a logical combination of comparisons into a range test.
3734 For example, both
3735 X == 2 || X == 3 || X == 4 || X == 5
3737 X >= 2 && X <= 5
3738 are converted to
3739 (unsigned) (X - 2) <= 3
3741 We describe each set of comparisons as being either inside or outside
3742 a range, using a variable named like IN_P, and then describe the
3743 range with a lower and upper bound. If one of the bounds is omitted,
3744 it represents either the highest or lowest value of the type.
3746 In the comments below, we represent a range by two numbers in brackets
3747 preceded by a "+" to designate being inside that range, or a "-" to
3748 designate being outside that range, so the condition can be inverted by
3749 flipping the prefix. An omitted bound is represented by a "-". For
3750 example, "- [-, 10]" means being outside the range starting at the lowest
3751 possible value and ending at 10, in other words, being greater than 10.
3752 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3753 always false.
3755 We set up things so that the missing bounds are handled in a consistent
3756 manner so neither a missing bound nor "true" and "false" need to be
3757 handled using a special case. */
3759 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3760 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3761 and UPPER1_P are nonzero if the respective argument is an upper bound
3762 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3763 must be specified for a comparison. ARG1 will be converted to ARG0's
3764 type if both are specified. */
3766 static tree
3767 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3768 tree arg1, int upper1_p)
3770 tree tem;
3771 int result;
3772 int sgn0, sgn1;
3774 /* If neither arg represents infinity, do the normal operation.
3775 Else, if not a comparison, return infinity. Else handle the special
3776 comparison rules. Note that most of the cases below won't occur, but
3777 are handled for consistency. */
3779 if (arg0 != 0 && arg1 != 0)
3781 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3782 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3783 STRIP_NOPS (tem);
3784 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3787 if (TREE_CODE_CLASS (code) != tcc_comparison)
3788 return 0;
3790 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3791 for neither. In real maths, we cannot assume open ended ranges are
3792 the same. But, this is computer arithmetic, where numbers are finite.
3793 We can therefore make the transformation of any unbounded range with
3794 the value Z, Z being greater than any representable number. This permits
3795 us to treat unbounded ranges as equal. */
3796 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3797 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3798 switch (code)
3800 case EQ_EXPR:
3801 result = sgn0 == sgn1;
3802 break;
3803 case NE_EXPR:
3804 result = sgn0 != sgn1;
3805 break;
3806 case LT_EXPR:
3807 result = sgn0 < sgn1;
3808 break;
3809 case LE_EXPR:
3810 result = sgn0 <= sgn1;
3811 break;
3812 case GT_EXPR:
3813 result = sgn0 > sgn1;
3814 break;
3815 case GE_EXPR:
3816 result = sgn0 >= sgn1;
3817 break;
3818 default:
3819 gcc_unreachable ();
3822 return constant_boolean_node (result, type);
3825 /* Given EXP, a logical expression, set the range it is testing into
3826 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3827 actually being tested. *PLOW and *PHIGH will be made of the same type
3828 as the returned expression. If EXP is not a comparison, we will most
3829 likely not be returning a useful value and range. */
3831 static tree
3832 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3834 enum tree_code code;
3835 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3836 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3837 int in_p, n_in_p;
3838 tree low, high, n_low, n_high;
3840 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3841 and see if we can refine the range. Some of the cases below may not
3842 happen, but it doesn't seem worth worrying about this. We "continue"
3843 the outer loop when we've changed something; otherwise we "break"
3844 the switch, which will "break" the while. */
3846 in_p = 0;
3847 low = high = build_int_cst (TREE_TYPE (exp), 0);
3849 while (1)
3851 code = TREE_CODE (exp);
3852 exp_type = TREE_TYPE (exp);
3854 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3856 if (TREE_CODE_LENGTH (code) > 0)
3857 arg0 = TREE_OPERAND (exp, 0);
3858 if (TREE_CODE_CLASS (code) == tcc_comparison
3859 || TREE_CODE_CLASS (code) == tcc_unary
3860 || TREE_CODE_CLASS (code) == tcc_binary)
3861 arg0_type = TREE_TYPE (arg0);
3862 if (TREE_CODE_CLASS (code) == tcc_binary
3863 || TREE_CODE_CLASS (code) == tcc_comparison
3864 || (TREE_CODE_CLASS (code) == tcc_expression
3865 && TREE_CODE_LENGTH (code) > 1))
3866 arg1 = TREE_OPERAND (exp, 1);
3869 switch (code)
3871 case TRUTH_NOT_EXPR:
3872 in_p = ! in_p, exp = arg0;
3873 continue;
3875 case EQ_EXPR: case NE_EXPR:
3876 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3877 /* We can only do something if the range is testing for zero
3878 and if the second operand is an integer constant. Note that
3879 saying something is "in" the range we make is done by
3880 complementing IN_P since it will set in the initial case of
3881 being not equal to zero; "out" is leaving it alone. */
3882 if (low == 0 || high == 0
3883 || ! integer_zerop (low) || ! integer_zerop (high)
3884 || TREE_CODE (arg1) != INTEGER_CST)
3885 break;
3887 switch (code)
3889 case NE_EXPR: /* - [c, c] */
3890 low = high = arg1;
3891 break;
3892 case EQ_EXPR: /* + [c, c] */
3893 in_p = ! in_p, low = high = arg1;
3894 break;
3895 case GT_EXPR: /* - [-, c] */
3896 low = 0, high = arg1;
3897 break;
3898 case GE_EXPR: /* + [c, -] */
3899 in_p = ! in_p, low = arg1, high = 0;
3900 break;
3901 case LT_EXPR: /* - [c, -] */
3902 low = arg1, high = 0;
3903 break;
3904 case LE_EXPR: /* + [-, c] */
3905 in_p = ! in_p, low = 0, high = arg1;
3906 break;
3907 default:
3908 gcc_unreachable ();
3911 /* If this is an unsigned comparison, we also know that EXP is
3912 greater than or equal to zero. We base the range tests we make
3913 on that fact, so we record it here so we can parse existing
3914 range tests. We test arg0_type since often the return type
3915 of, e.g. EQ_EXPR, is boolean. */
3916 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3918 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3919 in_p, low, high, 1,
3920 build_int_cst (arg0_type, 0),
3921 NULL_TREE))
3922 break;
3924 in_p = n_in_p, low = n_low, high = n_high;
3926 /* If the high bound is missing, but we have a nonzero low
3927 bound, reverse the range so it goes from zero to the low bound
3928 minus 1. */
3929 if (high == 0 && low && ! integer_zerop (low))
3931 in_p = ! in_p;
3932 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3933 integer_one_node, 0);
3934 low = build_int_cst (arg0_type, 0);
3938 exp = arg0;
3939 continue;
3941 case NEGATE_EXPR:
3942 /* (-x) IN [a,b] -> x in [-b, -a] */
3943 n_low = range_binop (MINUS_EXPR, exp_type,
3944 build_int_cst (exp_type, 0),
3945 0, high, 1);
3946 n_high = range_binop (MINUS_EXPR, exp_type,
3947 build_int_cst (exp_type, 0),
3948 0, low, 0);
3949 low = n_low, high = n_high;
3950 exp = arg0;
3951 continue;
3953 case BIT_NOT_EXPR:
3954 /* ~ X -> -X - 1 */
3955 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3956 build_int_cst (exp_type, 1));
3957 continue;
3959 case PLUS_EXPR: case MINUS_EXPR:
3960 if (TREE_CODE (arg1) != INTEGER_CST)
3961 break;
3963 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3964 move a constant to the other side. */
3965 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3966 break;
3968 /* If EXP is signed, any overflow in the computation is undefined,
3969 so we don't worry about it so long as our computations on
3970 the bounds don't overflow. For unsigned, overflow is defined
3971 and this is exactly the right thing. */
3972 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3973 arg0_type, low, 0, arg1, 0);
3974 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3975 arg0_type, high, 1, arg1, 0);
3976 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3977 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3978 break;
3980 /* Check for an unsigned range which has wrapped around the maximum
3981 value thus making n_high < n_low, and normalize it. */
3982 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3984 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3985 integer_one_node, 0);
3986 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3987 integer_one_node, 0);
3989 /* If the range is of the form +/- [ x+1, x ], we won't
3990 be able to normalize it. But then, it represents the
3991 whole range or the empty set, so make it
3992 +/- [ -, - ]. */
3993 if (tree_int_cst_equal (n_low, low)
3994 && tree_int_cst_equal (n_high, high))
3995 low = high = 0;
3996 else
3997 in_p = ! in_p;
3999 else
4000 low = n_low, high = n_high;
4002 exp = arg0;
4003 continue;
4005 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4006 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4007 break;
4009 if (! INTEGRAL_TYPE_P (arg0_type)
4010 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4011 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4012 break;
4014 n_low = low, n_high = high;
4016 if (n_low != 0)
4017 n_low = fold_convert (arg0_type, n_low);
4019 if (n_high != 0)
4020 n_high = fold_convert (arg0_type, n_high);
4023 /* If we're converting arg0 from an unsigned type, to exp,
4024 a signed type, we will be doing the comparison as unsigned.
4025 The tests above have already verified that LOW and HIGH
4026 are both positive.
4028 So we have to ensure that we will handle large unsigned
4029 values the same way that the current signed bounds treat
4030 negative values. */
4032 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4034 tree high_positive;
4035 tree equiv_type = lang_hooks.types.type_for_mode
4036 (TYPE_MODE (arg0_type), 1);
4038 /* A range without an upper bound is, naturally, unbounded.
4039 Since convert would have cropped a very large value, use
4040 the max value for the destination type. */
4041 high_positive
4042 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4043 : TYPE_MAX_VALUE (arg0_type);
4045 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4046 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4047 fold_convert (arg0_type,
4048 high_positive),
4049 build_int_cst (arg0_type, 1));
4051 /* If the low bound is specified, "and" the range with the
4052 range for which the original unsigned value will be
4053 positive. */
4054 if (low != 0)
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 1, n_low, n_high, 1,
4058 fold_convert (arg0_type,
4059 integer_zero_node),
4060 high_positive))
4061 break;
4063 in_p = (n_in_p == in_p);
4065 else
4067 /* Otherwise, "or" the range with the range of the input
4068 that will be interpreted as negative. */
4069 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4070 0, n_low, n_high, 1,
4071 fold_convert (arg0_type,
4072 integer_zero_node),
4073 high_positive))
4074 break;
4076 in_p = (in_p != n_in_p);
4080 exp = arg0;
4081 low = n_low, high = n_high;
4082 continue;
4084 default:
4085 break;
4088 break;
4091 /* If EXP is a constant, we can evaluate whether this is true or false. */
4092 if (TREE_CODE (exp) == INTEGER_CST)
4094 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4095 exp, 0, low, 0))
4096 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4097 exp, 1, high, 1)));
4098 low = high = 0;
4099 exp = 0;
4102 *pin_p = in_p, *plow = low, *phigh = high;
4103 return exp;
4106 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4107 type, TYPE, return an expression to test if EXP is in (or out of, depending
4108 on IN_P) the range. Return 0 if the test couldn't be created. */
4110 static tree
4111 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4113 tree etype = TREE_TYPE (exp);
4114 tree value;
4116 #ifdef HAVE_canonicalize_funcptr_for_compare
4117 /* Disable this optimization for function pointer expressions
4118 on targets that require function pointer canonicalization. */
4119 if (HAVE_canonicalize_funcptr_for_compare
4120 && TREE_CODE (etype) == POINTER_TYPE
4121 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4122 return NULL_TREE;
4123 #endif
4125 if (! in_p)
4127 value = build_range_check (type, exp, 1, low, high);
4128 if (value != 0)
4129 return invert_truthvalue (value);
4131 return 0;
4134 if (low == 0 && high == 0)
4135 return build_int_cst (type, 1);
4137 if (low == 0)
4138 return fold_build2 (LE_EXPR, type, exp,
4139 fold_convert (etype, high));
4141 if (high == 0)
4142 return fold_build2 (GE_EXPR, type, exp,
4143 fold_convert (etype, low));
4145 if (operand_equal_p (low, high, 0))
4146 return fold_build2 (EQ_EXPR, type, exp,
4147 fold_convert (etype, low));
4149 if (integer_zerop (low))
4151 if (! TYPE_UNSIGNED (etype))
4153 etype = lang_hooks.types.unsigned_type (etype);
4154 high = fold_convert (etype, high);
4155 exp = fold_convert (etype, exp);
4157 return build_range_check (type, exp, 1, 0, high);
4160 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4161 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4163 unsigned HOST_WIDE_INT lo;
4164 HOST_WIDE_INT hi;
4165 int prec;
4167 prec = TYPE_PRECISION (etype);
4168 if (prec <= HOST_BITS_PER_WIDE_INT)
4170 hi = 0;
4171 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4173 else
4175 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4176 lo = (unsigned HOST_WIDE_INT) -1;
4179 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4181 if (TYPE_UNSIGNED (etype))
4183 etype = lang_hooks.types.signed_type (etype);
4184 exp = fold_convert (etype, exp);
4186 return fold_build2 (GT_EXPR, type, exp,
4187 build_int_cst (etype, 0));
4191 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4192 This requires wrap-around arithmetics for the type of the expression. */
4193 switch (TREE_CODE (etype))
4195 case INTEGER_TYPE:
4196 /* There is no requirement that LOW be within the range of ETYPE
4197 if the latter is a subtype. It must, however, be within the base
4198 type of ETYPE. So be sure we do the subtraction in that type. */
4199 if (TREE_TYPE (etype))
4200 etype = TREE_TYPE (etype);
4201 break;
4203 case ENUMERAL_TYPE:
4204 case BOOLEAN_TYPE:
4205 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4206 TYPE_UNSIGNED (etype));
4207 break;
4209 default:
4210 break;
4213 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4214 if (TREE_CODE (etype) == INTEGER_TYPE
4215 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4217 tree utype, minv, maxv;
4219 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4220 for the type in question, as we rely on this here. */
4221 utype = lang_hooks.types.unsigned_type (etype);
4222 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4223 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4224 integer_one_node, 1);
4225 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4227 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4228 minv, 1, maxv, 1)))
4229 etype = utype;
4230 else
4231 return 0;
4234 high = fold_convert (etype, high);
4235 low = fold_convert (etype, low);
4236 exp = fold_convert (etype, exp);
4238 value = const_binop (MINUS_EXPR, high, low, 0);
4240 if (value != 0 && !TREE_OVERFLOW (value))
4241 return build_range_check (type,
4242 fold_build2 (MINUS_EXPR, etype, exp, low),
4243 1, build_int_cst (etype, 0), value);
4245 return 0;
4248 /* Return the predecessor of VAL in its type, handling the infinite case. */
4250 static tree
4251 range_predecessor (tree val)
4253 tree type = TREE_TYPE (val);
4255 if (INTEGRAL_TYPE_P (type)
4256 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4257 return 0;
4258 else
4259 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4262 /* Return the successor of VAL in its type, handling the infinite case. */
4264 static tree
4265 range_successor (tree val)
4267 tree type = TREE_TYPE (val);
4269 if (INTEGRAL_TYPE_P (type)
4270 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4271 return 0;
4272 else
4273 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4276 /* Given two ranges, see if we can merge them into one. Return 1 if we
4277 can, 0 if we can't. Set the output range into the specified parameters. */
4279 static int
4280 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4281 tree high0, int in1_p, tree low1, tree high1)
4283 int no_overlap;
4284 int subset;
4285 int temp;
4286 tree tem;
4287 int in_p;
4288 tree low, high;
4289 int lowequal = ((low0 == 0 && low1 == 0)
4290 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4291 low0, 0, low1, 0)));
4292 int highequal = ((high0 == 0 && high1 == 0)
4293 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4294 high0, 1, high1, 1)));
4296 /* Make range 0 be the range that starts first, or ends last if they
4297 start at the same value. Swap them if it isn't. */
4298 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4299 low0, 0, low1, 0))
4300 || (lowequal
4301 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4302 high1, 1, high0, 1))))
4304 temp = in0_p, in0_p = in1_p, in1_p = temp;
4305 tem = low0, low0 = low1, low1 = tem;
4306 tem = high0, high0 = high1, high1 = tem;
4309 /* Now flag two cases, whether the ranges are disjoint or whether the
4310 second range is totally subsumed in the first. Note that the tests
4311 below are simplified by the ones above. */
4312 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4313 high0, 1, low1, 0));
4314 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4315 high1, 1, high0, 1));
4317 /* We now have four cases, depending on whether we are including or
4318 excluding the two ranges. */
4319 if (in0_p && in1_p)
4321 /* If they don't overlap, the result is false. If the second range
4322 is a subset it is the result. Otherwise, the range is from the start
4323 of the second to the end of the first. */
4324 if (no_overlap)
4325 in_p = 0, low = high = 0;
4326 else if (subset)
4327 in_p = 1, low = low1, high = high1;
4328 else
4329 in_p = 1, low = low1, high = high0;
4332 else if (in0_p && ! in1_p)
4334 /* If they don't overlap, the result is the first range. If they are
4335 equal, the result is false. If the second range is a subset of the
4336 first, and the ranges begin at the same place, we go from just after
4337 the end of the second range to the end of the first. If the second
4338 range is not a subset of the first, or if it is a subset and both
4339 ranges end at the same place, the range starts at the start of the
4340 first range and ends just before the second range.
4341 Otherwise, we can't describe this as a single range. */
4342 if (no_overlap)
4343 in_p = 1, low = low0, high = high0;
4344 else if (lowequal && highequal)
4345 in_p = 0, low = high = 0;
4346 else if (subset && lowequal)
4348 low = range_successor (high1);
4349 high = high0;
4350 in_p = (low != 0);
4352 else if (! subset || highequal)
4354 low = low0;
4355 high = range_predecessor (low1);
4356 in_p = (high != 0);
4358 else
4359 return 0;
4362 else if (! in0_p && in1_p)
4364 /* If they don't overlap, the result is the second range. If the second
4365 is a subset of the first, the result is false. Otherwise,
4366 the range starts just after the first range and ends at the
4367 end of the second. */
4368 if (no_overlap)
4369 in_p = 1, low = low1, high = high1;
4370 else if (subset || highequal)
4371 in_p = 0, low = high = 0;
4372 else
4374 low = range_successor (high0);
4375 high = high1;
4376 in_p = (low != 0);
4380 else
4382 /* The case where we are excluding both ranges. Here the complex case
4383 is if they don't overlap. In that case, the only time we have a
4384 range is if they are adjacent. If the second is a subset of the
4385 first, the result is the first. Otherwise, the range to exclude
4386 starts at the beginning of the first range and ends at the end of the
4387 second. */
4388 if (no_overlap)
4390 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4391 range_successor (high0),
4392 1, low1, 0)))
4393 in_p = 0, low = low0, high = high1;
4394 else
4396 /* Canonicalize - [min, x] into - [-, x]. */
4397 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4398 switch (TREE_CODE (TREE_TYPE (low0)))
4400 case ENUMERAL_TYPE:
4401 if (TYPE_PRECISION (TREE_TYPE (low0))
4402 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4403 break;
4404 /* FALLTHROUGH */
4405 case INTEGER_TYPE:
4406 if (tree_int_cst_equal (low0,
4407 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4408 low0 = 0;
4409 break;
4410 case POINTER_TYPE:
4411 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4412 && integer_zerop (low0))
4413 low0 = 0;
4414 break;
4415 default:
4416 break;
4419 /* Canonicalize - [x, max] into - [x, -]. */
4420 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4421 switch (TREE_CODE (TREE_TYPE (high1)))
4423 case ENUMERAL_TYPE:
4424 if (TYPE_PRECISION (TREE_TYPE (high1))
4425 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4426 break;
4427 /* FALLTHROUGH */
4428 case INTEGER_TYPE:
4429 if (tree_int_cst_equal (high1,
4430 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4431 high1 = 0;
4432 break;
4433 case POINTER_TYPE:
4434 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4435 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4436 high1, 1,
4437 integer_one_node, 1)))
4438 high1 = 0;
4439 break;
4440 default:
4441 break;
4444 /* The ranges might be also adjacent between the maximum and
4445 minimum values of the given type. For
4446 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4447 return + [x + 1, y - 1]. */
4448 if (low0 == 0 && high1 == 0)
4450 low = range_successor (high0);
4451 high = range_predecessor (low1);
4452 if (low == 0 || high == 0)
4453 return 0;
4455 in_p = 1;
4457 else
4458 return 0;
4461 else if (subset)
4462 in_p = 0, low = low0, high = high0;
4463 else
4464 in_p = 0, low = low0, high = high1;
4467 *pin_p = in_p, *plow = low, *phigh = high;
4468 return 1;
4472 /* Subroutine of fold, looking inside expressions of the form
4473 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4474 of the COND_EXPR. This function is being used also to optimize
4475 A op B ? C : A, by reversing the comparison first.
4477 Return a folded expression whose code is not a COND_EXPR
4478 anymore, or NULL_TREE if no folding opportunity is found. */
4480 static tree
4481 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4483 enum tree_code comp_code = TREE_CODE (arg0);
4484 tree arg00 = TREE_OPERAND (arg0, 0);
4485 tree arg01 = TREE_OPERAND (arg0, 1);
4486 tree arg1_type = TREE_TYPE (arg1);
4487 tree tem;
4489 STRIP_NOPS (arg1);
4490 STRIP_NOPS (arg2);
4492 /* If we have A op 0 ? A : -A, consider applying the following
4493 transformations:
4495 A == 0? A : -A same as -A
4496 A != 0? A : -A same as A
4497 A >= 0? A : -A same as abs (A)
4498 A > 0? A : -A same as abs (A)
4499 A <= 0? A : -A same as -abs (A)
4500 A < 0? A : -A same as -abs (A)
4502 None of these transformations work for modes with signed
4503 zeros. If A is +/-0, the first two transformations will
4504 change the sign of the result (from +0 to -0, or vice
4505 versa). The last four will fix the sign of the result,
4506 even though the original expressions could be positive or
4507 negative, depending on the sign of A.
4509 Note that all these transformations are correct if A is
4510 NaN, since the two alternatives (A and -A) are also NaNs. */
4511 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4512 ? real_zerop (arg01)
4513 : integer_zerop (arg01))
4514 && ((TREE_CODE (arg2) == NEGATE_EXPR
4515 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4516 /* In the case that A is of the form X-Y, '-A' (arg2) may
4517 have already been folded to Y-X, check for that. */
4518 || (TREE_CODE (arg1) == MINUS_EXPR
4519 && TREE_CODE (arg2) == MINUS_EXPR
4520 && operand_equal_p (TREE_OPERAND (arg1, 0),
4521 TREE_OPERAND (arg2, 1), 0)
4522 && operand_equal_p (TREE_OPERAND (arg1, 1),
4523 TREE_OPERAND (arg2, 0), 0))))
4524 switch (comp_code)
4526 case EQ_EXPR:
4527 case UNEQ_EXPR:
4528 tem = fold_convert (arg1_type, arg1);
4529 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4530 case NE_EXPR:
4531 case LTGT_EXPR:
4532 return pedantic_non_lvalue (fold_convert (type, arg1));
4533 case UNGE_EXPR:
4534 case UNGT_EXPR:
4535 if (flag_trapping_math)
4536 break;
4537 /* Fall through. */
4538 case GE_EXPR:
4539 case GT_EXPR:
4540 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4541 arg1 = fold_convert (lang_hooks.types.signed_type
4542 (TREE_TYPE (arg1)), arg1);
4543 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4544 return pedantic_non_lvalue (fold_convert (type, tem));
4545 case UNLE_EXPR:
4546 case UNLT_EXPR:
4547 if (flag_trapping_math)
4548 break;
4549 case LE_EXPR:
4550 case LT_EXPR:
4551 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4552 arg1 = fold_convert (lang_hooks.types.signed_type
4553 (TREE_TYPE (arg1)), arg1);
4554 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4555 return negate_expr (fold_convert (type, tem));
4556 default:
4557 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4558 break;
4561 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4562 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4563 both transformations are correct when A is NaN: A != 0
4564 is then true, and A == 0 is false. */
4566 if (integer_zerop (arg01) && integer_zerop (arg2))
4568 if (comp_code == NE_EXPR)
4569 return pedantic_non_lvalue (fold_convert (type, arg1));
4570 else if (comp_code == EQ_EXPR)
4571 return build_int_cst (type, 0);
4574 /* Try some transformations of A op B ? A : B.
4576 A == B? A : B same as B
4577 A != B? A : B same as A
4578 A >= B? A : B same as max (A, B)
4579 A > B? A : B same as max (B, A)
4580 A <= B? A : B same as min (A, B)
4581 A < B? A : B same as min (B, A)
4583 As above, these transformations don't work in the presence
4584 of signed zeros. For example, if A and B are zeros of
4585 opposite sign, the first two transformations will change
4586 the sign of the result. In the last four, the original
4587 expressions give different results for (A=+0, B=-0) and
4588 (A=-0, B=+0), but the transformed expressions do not.
4590 The first two transformations are correct if either A or B
4591 is a NaN. In the first transformation, the condition will
4592 be false, and B will indeed be chosen. In the case of the
4593 second transformation, the condition A != B will be true,
4594 and A will be chosen.
4596 The conversions to max() and min() are not correct if B is
4597 a number and A is not. The conditions in the original
4598 expressions will be false, so all four give B. The min()
4599 and max() versions would give a NaN instead. */
4600 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4601 /* Avoid these transformations if the COND_EXPR may be used
4602 as an lvalue in the C++ front-end. PR c++/19199. */
4603 && (in_gimple_form
4604 || (strcmp (lang_hooks.name, "GNU C++") != 0
4605 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4606 || ! maybe_lvalue_p (arg1)
4607 || ! maybe_lvalue_p (arg2)))
4609 tree comp_op0 = arg00;
4610 tree comp_op1 = arg01;
4611 tree comp_type = TREE_TYPE (comp_op0);
4613 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4614 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4616 comp_type = type;
4617 comp_op0 = arg1;
4618 comp_op1 = arg2;
4621 switch (comp_code)
4623 case EQ_EXPR:
4624 return pedantic_non_lvalue (fold_convert (type, arg2));
4625 case NE_EXPR:
4626 return pedantic_non_lvalue (fold_convert (type, arg1));
4627 case LE_EXPR:
4628 case LT_EXPR:
4629 case UNLE_EXPR:
4630 case UNLT_EXPR:
4631 /* In C++ a ?: expression can be an lvalue, so put the
4632 operand which will be used if they are equal first
4633 so that we can convert this back to the
4634 corresponding COND_EXPR. */
4635 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4637 comp_op0 = fold_convert (comp_type, comp_op0);
4638 comp_op1 = fold_convert (comp_type, comp_op1);
4639 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4640 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4641 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4642 return pedantic_non_lvalue (fold_convert (type, tem));
4644 break;
4645 case GE_EXPR:
4646 case GT_EXPR:
4647 case UNGE_EXPR:
4648 case UNGT_EXPR:
4649 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4651 comp_op0 = fold_convert (comp_type, comp_op0);
4652 comp_op1 = fold_convert (comp_type, comp_op1);
4653 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4654 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4655 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4656 return pedantic_non_lvalue (fold_convert (type, tem));
4658 break;
4659 case UNEQ_EXPR:
4660 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4661 return pedantic_non_lvalue (fold_convert (type, arg2));
4662 break;
4663 case LTGT_EXPR:
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4665 return pedantic_non_lvalue (fold_convert (type, arg1));
4666 break;
4667 default:
4668 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4669 break;
4673 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4674 we might still be able to simplify this. For example,
4675 if C1 is one less or one more than C2, this might have started
4676 out as a MIN or MAX and been transformed by this function.
4677 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4679 if (INTEGRAL_TYPE_P (type)
4680 && TREE_CODE (arg01) == INTEGER_CST
4681 && TREE_CODE (arg2) == INTEGER_CST)
4682 switch (comp_code)
4684 case EQ_EXPR:
4685 /* We can replace A with C1 in this case. */
4686 arg1 = fold_convert (type, arg01);
4687 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4689 case LT_EXPR:
4690 /* If C1 is C2 + 1, this is min(A, C2). */
4691 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4692 OEP_ONLY_CONST)
4693 && operand_equal_p (arg01,
4694 const_binop (PLUS_EXPR, arg2,
4695 build_int_cst (type, 1), 0),
4696 OEP_ONLY_CONST))
4697 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4698 type, arg1, arg2));
4699 break;
4701 case LE_EXPR:
4702 /* If C1 is C2 - 1, this is min(A, C2). */
4703 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4704 OEP_ONLY_CONST)
4705 && operand_equal_p (arg01,
4706 const_binop (MINUS_EXPR, arg2,
4707 build_int_cst (type, 1), 0),
4708 OEP_ONLY_CONST))
4709 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4710 type, arg1, arg2));
4711 break;
4713 case GT_EXPR:
4714 /* If C1 is C2 - 1, this is max(A, C2). */
4715 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4716 OEP_ONLY_CONST)
4717 && operand_equal_p (arg01,
4718 const_binop (MINUS_EXPR, arg2,
4719 build_int_cst (type, 1), 0),
4720 OEP_ONLY_CONST))
4721 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4722 type, arg1, arg2));
4723 break;
4725 case GE_EXPR:
4726 /* If C1 is C2 + 1, this is max(A, C2). */
4727 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4728 OEP_ONLY_CONST)
4729 && operand_equal_p (arg01,
4730 const_binop (PLUS_EXPR, arg2,
4731 build_int_cst (type, 1), 0),
4732 OEP_ONLY_CONST))
4733 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4734 type, arg1, arg2));
4735 break;
4736 case NE_EXPR:
4737 break;
4738 default:
4739 gcc_unreachable ();
4742 return NULL_TREE;
4747 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4748 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4749 #endif
4751 /* EXP is some logical combination of boolean tests. See if we can
4752 merge it into some range test. Return the new tree if so. */
4754 static tree
4755 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4757 int or_op = (code == TRUTH_ORIF_EXPR
4758 || code == TRUTH_OR_EXPR);
4759 int in0_p, in1_p, in_p;
4760 tree low0, low1, low, high0, high1, high;
4761 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4762 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4763 tree tem;
4765 /* If this is an OR operation, invert both sides; we will invert
4766 again at the end. */
4767 if (or_op)
4768 in0_p = ! in0_p, in1_p = ! in1_p;
4770 /* If both expressions are the same, if we can merge the ranges, and we
4771 can build the range test, return it or it inverted. If one of the
4772 ranges is always true or always false, consider it to be the same
4773 expression as the other. */
4774 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4775 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4776 in1_p, low1, high1)
4777 && 0 != (tem = (build_range_check (type,
4778 lhs != 0 ? lhs
4779 : rhs != 0 ? rhs : integer_zero_node,
4780 in_p, low, high))))
4781 return or_op ? invert_truthvalue (tem) : tem;
4783 /* On machines where the branch cost is expensive, if this is a
4784 short-circuited branch and the underlying object on both sides
4785 is the same, make a non-short-circuit operation. */
4786 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4787 && lhs != 0 && rhs != 0
4788 && (code == TRUTH_ANDIF_EXPR
4789 || code == TRUTH_ORIF_EXPR)
4790 && operand_equal_p (lhs, rhs, 0))
4792 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4793 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4794 which cases we can't do this. */
4795 if (simple_operand_p (lhs))
4796 return build2 (code == TRUTH_ANDIF_EXPR
4797 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4798 type, op0, op1);
4800 else if (lang_hooks.decls.global_bindings_p () == 0
4801 && ! CONTAINS_PLACEHOLDER_P (lhs))
4803 tree common = save_expr (lhs);
4805 if (0 != (lhs = build_range_check (type, common,
4806 or_op ? ! in0_p : in0_p,
4807 low0, high0))
4808 && (0 != (rhs = build_range_check (type, common,
4809 or_op ? ! in1_p : in1_p,
4810 low1, high1))))
4811 return build2 (code == TRUTH_ANDIF_EXPR
4812 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4813 type, lhs, rhs);
4817 return 0;
4820 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4821 bit value. Arrange things so the extra bits will be set to zero if and
4822 only if C is signed-extended to its full width. If MASK is nonzero,
4823 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4825 static tree
4826 unextend (tree c, int p, int unsignedp, tree mask)
4828 tree type = TREE_TYPE (c);
4829 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4830 tree temp;
4832 if (p == modesize || unsignedp)
4833 return c;
4835 /* We work by getting just the sign bit into the low-order bit, then
4836 into the high-order bit, then sign-extend. We then XOR that value
4837 with C. */
4838 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4839 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4841 /* We must use a signed type in order to get an arithmetic right shift.
4842 However, we must also avoid introducing accidental overflows, so that
4843 a subsequent call to integer_zerop will work. Hence we must
4844 do the type conversion here. At this point, the constant is either
4845 zero or one, and the conversion to a signed type can never overflow.
4846 We could get an overflow if this conversion is done anywhere else. */
4847 if (TYPE_UNSIGNED (type))
4848 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4850 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4851 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4852 if (mask != 0)
4853 temp = const_binop (BIT_AND_EXPR, temp,
4854 fold_convert (TREE_TYPE (c), mask), 0);
4855 /* If necessary, convert the type back to match the type of C. */
4856 if (TYPE_UNSIGNED (type))
4857 temp = fold_convert (type, temp);
4859 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4862 /* Find ways of folding logical expressions of LHS and RHS:
4863 Try to merge two comparisons to the same innermost item.
4864 Look for range tests like "ch >= '0' && ch <= '9'".
4865 Look for combinations of simple terms on machines with expensive branches
4866 and evaluate the RHS unconditionally.
4868 For example, if we have p->a == 2 && p->b == 4 and we can make an
4869 object large enough to span both A and B, we can do this with a comparison
4870 against the object ANDed with the a mask.
4872 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4873 operations to do this with one comparison.
4875 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4876 function and the one above.
4878 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4879 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4881 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4882 two operands.
4884 We return the simplified tree or 0 if no optimization is possible. */
4886 static tree
4887 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4889 /* If this is the "or" of two comparisons, we can do something if
4890 the comparisons are NE_EXPR. If this is the "and", we can do something
4891 if the comparisons are EQ_EXPR. I.e.,
4892 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4894 WANTED_CODE is this operation code. For single bit fields, we can
4895 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4896 comparison for one-bit fields. */
4898 enum tree_code wanted_code;
4899 enum tree_code lcode, rcode;
4900 tree ll_arg, lr_arg, rl_arg, rr_arg;
4901 tree ll_inner, lr_inner, rl_inner, rr_inner;
4902 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4903 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4904 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4905 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4906 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4907 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4908 enum machine_mode lnmode, rnmode;
4909 tree ll_mask, lr_mask, rl_mask, rr_mask;
4910 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4911 tree l_const, r_const;
4912 tree lntype, rntype, result;
4913 int first_bit, end_bit;
4914 int volatilep;
4915 tree orig_lhs = lhs, orig_rhs = rhs;
4916 enum tree_code orig_code = code;
4918 /* Start by getting the comparison codes. Fail if anything is volatile.
4919 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4920 it were surrounded with a NE_EXPR. */
4922 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4923 return 0;
4925 lcode = TREE_CODE (lhs);
4926 rcode = TREE_CODE (rhs);
4928 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4930 lhs = build2 (NE_EXPR, truth_type, lhs,
4931 build_int_cst (TREE_TYPE (lhs), 0));
4932 lcode = NE_EXPR;
4935 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4937 rhs = build2 (NE_EXPR, truth_type, rhs,
4938 build_int_cst (TREE_TYPE (rhs), 0));
4939 rcode = NE_EXPR;
4942 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4943 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4944 return 0;
4946 ll_arg = TREE_OPERAND (lhs, 0);
4947 lr_arg = TREE_OPERAND (lhs, 1);
4948 rl_arg = TREE_OPERAND (rhs, 0);
4949 rr_arg = TREE_OPERAND (rhs, 1);
4951 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4952 if (simple_operand_p (ll_arg)
4953 && simple_operand_p (lr_arg))
4955 tree result;
4956 if (operand_equal_p (ll_arg, rl_arg, 0)
4957 && operand_equal_p (lr_arg, rr_arg, 0))
4959 result = combine_comparisons (code, lcode, rcode,
4960 truth_type, ll_arg, lr_arg);
4961 if (result)
4962 return result;
4964 else if (operand_equal_p (ll_arg, rr_arg, 0)
4965 && operand_equal_p (lr_arg, rl_arg, 0))
4967 result = combine_comparisons (code, lcode,
4968 swap_tree_comparison (rcode),
4969 truth_type, ll_arg, lr_arg);
4970 if (result)
4971 return result;
4975 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4976 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4978 /* If the RHS can be evaluated unconditionally and its operands are
4979 simple, it wins to evaluate the RHS unconditionally on machines
4980 with expensive branches. In this case, this isn't a comparison
4981 that can be merged. Avoid doing this if the RHS is a floating-point
4982 comparison since those can trap. */
4984 if (BRANCH_COST >= 2
4985 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4986 && simple_operand_p (rl_arg)
4987 && simple_operand_p (rr_arg))
4989 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4990 if (code == TRUTH_OR_EXPR
4991 && lcode == NE_EXPR && integer_zerop (lr_arg)
4992 && rcode == NE_EXPR && integer_zerop (rr_arg)
4993 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4994 return build2 (NE_EXPR, truth_type,
4995 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4996 ll_arg, rl_arg),
4997 build_int_cst (TREE_TYPE (ll_arg), 0));
4999 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5000 if (code == TRUTH_AND_EXPR
5001 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5002 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5003 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5004 return build2 (EQ_EXPR, truth_type,
5005 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5006 ll_arg, rl_arg),
5007 build_int_cst (TREE_TYPE (ll_arg), 0));
5009 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5011 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5012 return build2 (code, truth_type, lhs, rhs);
5013 return NULL_TREE;
5017 /* See if the comparisons can be merged. Then get all the parameters for
5018 each side. */
5020 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5021 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5022 return 0;
5024 volatilep = 0;
5025 ll_inner = decode_field_reference (ll_arg,
5026 &ll_bitsize, &ll_bitpos, &ll_mode,
5027 &ll_unsignedp, &volatilep, &ll_mask,
5028 &ll_and_mask);
5029 lr_inner = decode_field_reference (lr_arg,
5030 &lr_bitsize, &lr_bitpos, &lr_mode,
5031 &lr_unsignedp, &volatilep, &lr_mask,
5032 &lr_and_mask);
5033 rl_inner = decode_field_reference (rl_arg,
5034 &rl_bitsize, &rl_bitpos, &rl_mode,
5035 &rl_unsignedp, &volatilep, &rl_mask,
5036 &rl_and_mask);
5037 rr_inner = decode_field_reference (rr_arg,
5038 &rr_bitsize, &rr_bitpos, &rr_mode,
5039 &rr_unsignedp, &volatilep, &rr_mask,
5040 &rr_and_mask);
5042 /* It must be true that the inner operation on the lhs of each
5043 comparison must be the same if we are to be able to do anything.
5044 Then see if we have constants. If not, the same must be true for
5045 the rhs's. */
5046 if (volatilep || ll_inner == 0 || rl_inner == 0
5047 || ! operand_equal_p (ll_inner, rl_inner, 0))
5048 return 0;
5050 if (TREE_CODE (lr_arg) == INTEGER_CST
5051 && TREE_CODE (rr_arg) == INTEGER_CST)
5052 l_const = lr_arg, r_const = rr_arg;
5053 else if (lr_inner == 0 || rr_inner == 0
5054 || ! operand_equal_p (lr_inner, rr_inner, 0))
5055 return 0;
5056 else
5057 l_const = r_const = 0;
5059 /* If either comparison code is not correct for our logical operation,
5060 fail. However, we can convert a one-bit comparison against zero into
5061 the opposite comparison against that bit being set in the field. */
5063 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5064 if (lcode != wanted_code)
5066 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5068 /* Make the left operand unsigned, since we are only interested
5069 in the value of one bit. Otherwise we are doing the wrong
5070 thing below. */
5071 ll_unsignedp = 1;
5072 l_const = ll_mask;
5074 else
5075 return 0;
5078 /* This is analogous to the code for l_const above. */
5079 if (rcode != wanted_code)
5081 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5083 rl_unsignedp = 1;
5084 r_const = rl_mask;
5086 else
5087 return 0;
5090 /* After this point all optimizations will generate bit-field
5091 references, which we might not want. */
5092 if (! lang_hooks.can_use_bit_fields_p ())
5093 return 0;
5095 /* See if we can find a mode that contains both fields being compared on
5096 the left. If we can't, fail. Otherwise, update all constants and masks
5097 to be relative to a field of that size. */
5098 first_bit = MIN (ll_bitpos, rl_bitpos);
5099 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5100 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5101 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5102 volatilep);
5103 if (lnmode == VOIDmode)
5104 return 0;
5106 lnbitsize = GET_MODE_BITSIZE (lnmode);
5107 lnbitpos = first_bit & ~ (lnbitsize - 1);
5108 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5109 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5111 if (BYTES_BIG_ENDIAN)
5113 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5114 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5117 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5118 size_int (xll_bitpos), 0);
5119 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5120 size_int (xrl_bitpos), 0);
5122 if (l_const)
5124 l_const = fold_convert (lntype, l_const);
5125 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5126 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5127 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5128 fold_build1 (BIT_NOT_EXPR,
5129 lntype, ll_mask),
5130 0)))
5132 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5134 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5137 if (r_const)
5139 r_const = fold_convert (lntype, r_const);
5140 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5141 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5142 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5143 fold_build1 (BIT_NOT_EXPR,
5144 lntype, rl_mask),
5145 0)))
5147 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5149 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5153 /* If the right sides are not constant, do the same for it. Also,
5154 disallow this optimization if a size or signedness mismatch occurs
5155 between the left and right sides. */
5156 if (l_const == 0)
5158 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5159 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5160 /* Make sure the two fields on the right
5161 correspond to the left without being swapped. */
5162 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5163 return 0;
5165 first_bit = MIN (lr_bitpos, rr_bitpos);
5166 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5167 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5168 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5169 volatilep);
5170 if (rnmode == VOIDmode)
5171 return 0;
5173 rnbitsize = GET_MODE_BITSIZE (rnmode);
5174 rnbitpos = first_bit & ~ (rnbitsize - 1);
5175 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5176 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5178 if (BYTES_BIG_ENDIAN)
5180 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5181 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5184 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5185 size_int (xlr_bitpos), 0);
5186 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5187 size_int (xrr_bitpos), 0);
5189 /* Make a mask that corresponds to both fields being compared.
5190 Do this for both items being compared. If the operands are the
5191 same size and the bits being compared are in the same position
5192 then we can do this by masking both and comparing the masked
5193 results. */
5194 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5195 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5196 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5198 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5199 ll_unsignedp || rl_unsignedp);
5200 if (! all_ones_mask_p (ll_mask, lnbitsize))
5201 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5203 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5204 lr_unsignedp || rr_unsignedp);
5205 if (! all_ones_mask_p (lr_mask, rnbitsize))
5206 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5208 return build2 (wanted_code, truth_type, lhs, rhs);
5211 /* There is still another way we can do something: If both pairs of
5212 fields being compared are adjacent, we may be able to make a wider
5213 field containing them both.
5215 Note that we still must mask the lhs/rhs expressions. Furthermore,
5216 the mask must be shifted to account for the shift done by
5217 make_bit_field_ref. */
5218 if ((ll_bitsize + ll_bitpos == rl_bitpos
5219 && lr_bitsize + lr_bitpos == rr_bitpos)
5220 || (ll_bitpos == rl_bitpos + rl_bitsize
5221 && lr_bitpos == rr_bitpos + rr_bitsize))
5223 tree type;
5225 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5226 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5227 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5228 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5230 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5231 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5232 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5233 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5235 /* Convert to the smaller type before masking out unwanted bits. */
5236 type = lntype;
5237 if (lntype != rntype)
5239 if (lnbitsize > rnbitsize)
5241 lhs = fold_convert (rntype, lhs);
5242 ll_mask = fold_convert (rntype, ll_mask);
5243 type = rntype;
5245 else if (lnbitsize < rnbitsize)
5247 rhs = fold_convert (lntype, rhs);
5248 lr_mask = fold_convert (lntype, lr_mask);
5249 type = lntype;
5253 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5254 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5256 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5257 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5259 return build2 (wanted_code, truth_type, lhs, rhs);
5262 return 0;
5265 /* Handle the case of comparisons with constants. If there is something in
5266 common between the masks, those bits of the constants must be the same.
5267 If not, the condition is always false. Test for this to avoid generating
5268 incorrect code below. */
5269 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5270 if (! integer_zerop (result)
5271 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5272 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5274 if (wanted_code == NE_EXPR)
5276 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5277 return constant_boolean_node (true, truth_type);
5279 else
5281 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5282 return constant_boolean_node (false, truth_type);
5286 /* Construct the expression we will return. First get the component
5287 reference we will make. Unless the mask is all ones the width of
5288 that field, perform the mask operation. Then compare with the
5289 merged constant. */
5290 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5291 ll_unsignedp || rl_unsignedp);
5293 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5294 if (! all_ones_mask_p (ll_mask, lnbitsize))
5295 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5297 return build2 (wanted_code, truth_type, result,
5298 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5301 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5302 constant. */
5304 static tree
5305 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5307 tree arg0 = op0;
5308 enum tree_code op_code;
5309 tree comp_const = op1;
5310 tree minmax_const;
5311 int consts_equal, consts_lt;
5312 tree inner;
5314 STRIP_SIGN_NOPS (arg0);
5316 op_code = TREE_CODE (arg0);
5317 minmax_const = TREE_OPERAND (arg0, 1);
5318 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5319 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5320 inner = TREE_OPERAND (arg0, 0);
5322 /* If something does not permit us to optimize, return the original tree. */
5323 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5324 || TREE_CODE (comp_const) != INTEGER_CST
5325 || TREE_CONSTANT_OVERFLOW (comp_const)
5326 || TREE_CODE (minmax_const) != INTEGER_CST
5327 || TREE_CONSTANT_OVERFLOW (minmax_const))
5328 return NULL_TREE;
5330 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5331 and GT_EXPR, doing the rest with recursive calls using logical
5332 simplifications. */
5333 switch (code)
5335 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5337 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5338 type, op0, op1);
5339 if (tem)
5340 return invert_truthvalue (tem);
5341 return NULL_TREE;
5344 case GE_EXPR:
5345 return
5346 fold_build2 (TRUTH_ORIF_EXPR, type,
5347 optimize_minmax_comparison
5348 (EQ_EXPR, type, arg0, comp_const),
5349 optimize_minmax_comparison
5350 (GT_EXPR, type, arg0, comp_const));
5352 case EQ_EXPR:
5353 if (op_code == MAX_EXPR && consts_equal)
5354 /* MAX (X, 0) == 0 -> X <= 0 */
5355 return fold_build2 (LE_EXPR, type, inner, comp_const);
5357 else if (op_code == MAX_EXPR && consts_lt)
5358 /* MAX (X, 0) == 5 -> X == 5 */
5359 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5361 else if (op_code == MAX_EXPR)
5362 /* MAX (X, 0) == -1 -> false */
5363 return omit_one_operand (type, integer_zero_node, inner);
5365 else if (consts_equal)
5366 /* MIN (X, 0) == 0 -> X >= 0 */
5367 return fold_build2 (GE_EXPR, type, inner, comp_const);
5369 else if (consts_lt)
5370 /* MIN (X, 0) == 5 -> false */
5371 return omit_one_operand (type, integer_zero_node, inner);
5373 else
5374 /* MIN (X, 0) == -1 -> X == -1 */
5375 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5377 case GT_EXPR:
5378 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5379 /* MAX (X, 0) > 0 -> X > 0
5380 MAX (X, 0) > 5 -> X > 5 */
5381 return fold_build2 (GT_EXPR, type, inner, comp_const);
5383 else if (op_code == MAX_EXPR)
5384 /* MAX (X, 0) > -1 -> true */
5385 return omit_one_operand (type, integer_one_node, inner);
5387 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5388 /* MIN (X, 0) > 0 -> false
5389 MIN (X, 0) > 5 -> false */
5390 return omit_one_operand (type, integer_zero_node, inner);
5392 else
5393 /* MIN (X, 0) > -1 -> X > -1 */
5394 return fold_build2 (GT_EXPR, type, inner, comp_const);
5396 default:
5397 return NULL_TREE;
5401 /* T is an integer expression that is being multiplied, divided, or taken a
5402 modulus (CODE says which and what kind of divide or modulus) by a
5403 constant C. See if we can eliminate that operation by folding it with
5404 other operations already in T. WIDE_TYPE, if non-null, is a type that
5405 should be used for the computation if wider than our type.
5407 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5408 (X * 2) + (Y * 4). We must, however, be assured that either the original
5409 expression would not overflow or that overflow is undefined for the type
5410 in the language in question.
5412 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5413 the machine has a multiply-accumulate insn or that this is part of an
5414 addressing calculation.
5416 If we return a non-null expression, it is an equivalent form of the
5417 original computation, but need not be in the original type. */
5419 static tree
5420 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5422 /* To avoid exponential search depth, refuse to allow recursion past
5423 three levels. Beyond that (1) it's highly unlikely that we'll find
5424 something interesting and (2) we've probably processed it before
5425 when we built the inner expression. */
5427 static int depth;
5428 tree ret;
5430 if (depth > 3)
5431 return NULL;
5433 depth++;
5434 ret = extract_muldiv_1 (t, c, code, wide_type);
5435 depth--;
5437 return ret;
5440 static tree
5441 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5443 tree type = TREE_TYPE (t);
5444 enum tree_code tcode = TREE_CODE (t);
5445 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5446 > GET_MODE_SIZE (TYPE_MODE (type)))
5447 ? wide_type : type);
5448 tree t1, t2;
5449 int same_p = tcode == code;
5450 tree op0 = NULL_TREE, op1 = NULL_TREE;
5452 /* Don't deal with constants of zero here; they confuse the code below. */
5453 if (integer_zerop (c))
5454 return NULL_TREE;
5456 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5457 op0 = TREE_OPERAND (t, 0);
5459 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5460 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5462 /* Note that we need not handle conditional operations here since fold
5463 already handles those cases. So just do arithmetic here. */
5464 switch (tcode)
5466 case INTEGER_CST:
5467 /* For a constant, we can always simplify if we are a multiply
5468 or (for divide and modulus) if it is a multiple of our constant. */
5469 if (code == MULT_EXPR
5470 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5471 return const_binop (code, fold_convert (ctype, t),
5472 fold_convert (ctype, c), 0);
5473 break;
5475 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5476 /* If op0 is an expression ... */
5477 if ((COMPARISON_CLASS_P (op0)
5478 || UNARY_CLASS_P (op0)
5479 || BINARY_CLASS_P (op0)
5480 || EXPRESSION_CLASS_P (op0))
5481 /* ... and is unsigned, and its type is smaller than ctype,
5482 then we cannot pass through as widening. */
5483 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5484 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5485 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5486 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5487 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5488 /* ... or this is a truncation (t is narrower than op0),
5489 then we cannot pass through this narrowing. */
5490 || (GET_MODE_SIZE (TYPE_MODE (type))
5491 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5492 /* ... or signedness changes for division or modulus,
5493 then we cannot pass through this conversion. */
5494 || (code != MULT_EXPR
5495 && (TYPE_UNSIGNED (ctype)
5496 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5497 break;
5499 /* Pass the constant down and see if we can make a simplification. If
5500 we can, replace this expression with the inner simplification for
5501 possible later conversion to our or some other type. */
5502 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5503 && TREE_CODE (t2) == INTEGER_CST
5504 && ! TREE_CONSTANT_OVERFLOW (t2)
5505 && (0 != (t1 = extract_muldiv (op0, t2, code,
5506 code == MULT_EXPR
5507 ? ctype : NULL_TREE))))
5508 return t1;
5509 break;
5511 case ABS_EXPR:
5512 /* If widening the type changes it from signed to unsigned, then we
5513 must avoid building ABS_EXPR itself as unsigned. */
5514 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5516 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5517 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5519 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5520 return fold_convert (ctype, t1);
5522 break;
5524 /* FALLTHROUGH */
5525 case NEGATE_EXPR:
5526 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5527 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5528 break;
5530 case MIN_EXPR: case MAX_EXPR:
5531 /* If widening the type changes the signedness, then we can't perform
5532 this optimization as that changes the result. */
5533 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5534 break;
5536 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5537 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5538 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5540 if (tree_int_cst_sgn (c) < 0)
5541 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5543 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5544 fold_convert (ctype, t2));
5546 break;
5548 case LSHIFT_EXPR: case RSHIFT_EXPR:
5549 /* If the second operand is constant, this is a multiplication
5550 or floor division, by a power of two, so we can treat it that
5551 way unless the multiplier or divisor overflows. Signed
5552 left-shift overflow is implementation-defined rather than
5553 undefined in C90, so do not convert signed left shift into
5554 multiplication. */
5555 if (TREE_CODE (op1) == INTEGER_CST
5556 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5557 /* const_binop may not detect overflow correctly,
5558 so check for it explicitly here. */
5559 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5560 && TREE_INT_CST_HIGH (op1) == 0
5561 && 0 != (t1 = fold_convert (ctype,
5562 const_binop (LSHIFT_EXPR,
5563 size_one_node,
5564 op1, 0)))
5565 && ! TREE_OVERFLOW (t1))
5566 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5567 ? MULT_EXPR : FLOOR_DIV_EXPR,
5568 ctype, fold_convert (ctype, op0), t1),
5569 c, code, wide_type);
5570 break;
5572 case PLUS_EXPR: case MINUS_EXPR:
5573 /* See if we can eliminate the operation on both sides. If we can, we
5574 can return a new PLUS or MINUS. If we can't, the only remaining
5575 cases where we can do anything are if the second operand is a
5576 constant. */
5577 t1 = extract_muldiv (op0, c, code, wide_type);
5578 t2 = extract_muldiv (op1, c, code, wide_type);
5579 if (t1 != 0 && t2 != 0
5580 && (code == MULT_EXPR
5581 /* If not multiplication, we can only do this if both operands
5582 are divisible by c. */
5583 || (multiple_of_p (ctype, op0, c)
5584 && multiple_of_p (ctype, op1, c))))
5585 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5586 fold_convert (ctype, t2));
5588 /* If this was a subtraction, negate OP1 and set it to be an addition.
5589 This simplifies the logic below. */
5590 if (tcode == MINUS_EXPR)
5591 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5593 if (TREE_CODE (op1) != INTEGER_CST)
5594 break;
5596 /* If either OP1 or C are negative, this optimization is not safe for
5597 some of the division and remainder types while for others we need
5598 to change the code. */
5599 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5601 if (code == CEIL_DIV_EXPR)
5602 code = FLOOR_DIV_EXPR;
5603 else if (code == FLOOR_DIV_EXPR)
5604 code = CEIL_DIV_EXPR;
5605 else if (code != MULT_EXPR
5606 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5607 break;
5610 /* If it's a multiply or a division/modulus operation of a multiple
5611 of our constant, do the operation and verify it doesn't overflow. */
5612 if (code == MULT_EXPR
5613 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5615 op1 = const_binop (code, fold_convert (ctype, op1),
5616 fold_convert (ctype, c), 0);
5617 /* We allow the constant to overflow with wrapping semantics. */
5618 if (op1 == 0
5619 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5620 break;
5622 else
5623 break;
5625 /* If we have an unsigned type is not a sizetype, we cannot widen
5626 the operation since it will change the result if the original
5627 computation overflowed. */
5628 if (TYPE_UNSIGNED (ctype)
5629 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5630 && ctype != type)
5631 break;
5633 /* If we were able to eliminate our operation from the first side,
5634 apply our operation to the second side and reform the PLUS. */
5635 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5636 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5638 /* The last case is if we are a multiply. In that case, we can
5639 apply the distributive law to commute the multiply and addition
5640 if the multiplication of the constants doesn't overflow. */
5641 if (code == MULT_EXPR)
5642 return fold_build2 (tcode, ctype,
5643 fold_build2 (code, ctype,
5644 fold_convert (ctype, op0),
5645 fold_convert (ctype, c)),
5646 op1);
5648 break;
5650 case MULT_EXPR:
5651 /* We have a special case here if we are doing something like
5652 (C * 8) % 4 since we know that's zero. */
5653 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5654 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5655 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5656 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5657 return omit_one_operand (type, integer_zero_node, op0);
5659 /* ... fall through ... */
5661 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5662 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5663 /* If we can extract our operation from the LHS, do so and return a
5664 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5665 do something only if the second operand is a constant. */
5666 if (same_p
5667 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5668 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5669 fold_convert (ctype, op1));
5670 else if (tcode == MULT_EXPR && code == MULT_EXPR
5671 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5672 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5673 fold_convert (ctype, t1));
5674 else if (TREE_CODE (op1) != INTEGER_CST)
5675 return 0;
5677 /* If these are the same operation types, we can associate them
5678 assuming no overflow. */
5679 if (tcode == code
5680 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5681 fold_convert (ctype, c), 0))
5682 && ! TREE_OVERFLOW (t1))
5683 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5685 /* If these operations "cancel" each other, we have the main
5686 optimizations of this pass, which occur when either constant is a
5687 multiple of the other, in which case we replace this with either an
5688 operation or CODE or TCODE.
5690 If we have an unsigned type that is not a sizetype, we cannot do
5691 this since it will change the result if the original computation
5692 overflowed. */
5693 if ((! TYPE_UNSIGNED (ctype)
5694 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5695 && ! flag_wrapv
5696 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5697 || (tcode == MULT_EXPR
5698 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5699 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5701 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5702 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5703 fold_convert (ctype,
5704 const_binop (TRUNC_DIV_EXPR,
5705 op1, c, 0)));
5706 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5707 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5708 fold_convert (ctype,
5709 const_binop (TRUNC_DIV_EXPR,
5710 c, op1, 0)));
5712 break;
5714 default:
5715 break;
5718 return 0;
5721 /* Return a node which has the indicated constant VALUE (either 0 or
5722 1), and is of the indicated TYPE. */
5724 tree
5725 constant_boolean_node (int value, tree type)
5727 if (type == integer_type_node)
5728 return value ? integer_one_node : integer_zero_node;
5729 else if (type == boolean_type_node)
5730 return value ? boolean_true_node : boolean_false_node;
5731 else
5732 return build_int_cst (type, value);
5736 /* Return true if expr looks like an ARRAY_REF and set base and
5737 offset to the appropriate trees. If there is no offset,
5738 offset is set to NULL_TREE. Base will be canonicalized to
5739 something you can get the element type from using
5740 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5741 in bytes to the base. */
5743 static bool
5744 extract_array_ref (tree expr, tree *base, tree *offset)
5746 /* One canonical form is a PLUS_EXPR with the first
5747 argument being an ADDR_EXPR with a possible NOP_EXPR
5748 attached. */
5749 if (TREE_CODE (expr) == PLUS_EXPR)
5751 tree op0 = TREE_OPERAND (expr, 0);
5752 tree inner_base, dummy1;
5753 /* Strip NOP_EXPRs here because the C frontends and/or
5754 folders present us (int *)&x.a + 4B possibly. */
5755 STRIP_NOPS (op0);
5756 if (extract_array_ref (op0, &inner_base, &dummy1))
5758 *base = inner_base;
5759 if (dummy1 == NULL_TREE)
5760 *offset = TREE_OPERAND (expr, 1);
5761 else
5762 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5763 dummy1, TREE_OPERAND (expr, 1));
5764 return true;
5767 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5768 which we transform into an ADDR_EXPR with appropriate
5769 offset. For other arguments to the ADDR_EXPR we assume
5770 zero offset and as such do not care about the ADDR_EXPR
5771 type and strip possible nops from it. */
5772 else if (TREE_CODE (expr) == ADDR_EXPR)
5774 tree op0 = TREE_OPERAND (expr, 0);
5775 if (TREE_CODE (op0) == ARRAY_REF)
5777 tree idx = TREE_OPERAND (op0, 1);
5778 *base = TREE_OPERAND (op0, 0);
5779 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5780 array_ref_element_size (op0));
5782 else
5784 /* Handle array-to-pointer decay as &a. */
5785 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5786 *base = TREE_OPERAND (expr, 0);
5787 else
5788 *base = expr;
5789 *offset = NULL_TREE;
5791 return true;
5793 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5794 else if (SSA_VAR_P (expr)
5795 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5797 *base = expr;
5798 *offset = NULL_TREE;
5799 return true;
5802 return false;
5806 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5807 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5808 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5809 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5810 COND is the first argument to CODE; otherwise (as in the example
5811 given here), it is the second argument. TYPE is the type of the
5812 original expression. Return NULL_TREE if no simplification is
5813 possible. */
5815 static tree
5816 fold_binary_op_with_conditional_arg (enum tree_code code,
5817 tree type, tree op0, tree op1,
5818 tree cond, tree arg, int cond_first_p)
5820 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5821 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5822 tree test, true_value, false_value;
5823 tree lhs = NULL_TREE;
5824 tree rhs = NULL_TREE;
5826 /* This transformation is only worthwhile if we don't have to wrap
5827 arg in a SAVE_EXPR, and the operation can be simplified on at least
5828 one of the branches once its pushed inside the COND_EXPR. */
5829 if (!TREE_CONSTANT (arg))
5830 return NULL_TREE;
5832 if (TREE_CODE (cond) == COND_EXPR)
5834 test = TREE_OPERAND (cond, 0);
5835 true_value = TREE_OPERAND (cond, 1);
5836 false_value = TREE_OPERAND (cond, 2);
5837 /* If this operand throws an expression, then it does not make
5838 sense to try to perform a logical or arithmetic operation
5839 involving it. */
5840 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5841 lhs = true_value;
5842 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5843 rhs = false_value;
5845 else
5847 tree testtype = TREE_TYPE (cond);
5848 test = cond;
5849 true_value = constant_boolean_node (true, testtype);
5850 false_value = constant_boolean_node (false, testtype);
5853 arg = fold_convert (arg_type, arg);
5854 if (lhs == 0)
5856 true_value = fold_convert (cond_type, true_value);
5857 if (cond_first_p)
5858 lhs = fold_build2 (code, type, true_value, arg);
5859 else
5860 lhs = fold_build2 (code, type, arg, true_value);
5862 if (rhs == 0)
5864 false_value = fold_convert (cond_type, false_value);
5865 if (cond_first_p)
5866 rhs = fold_build2 (code, type, false_value, arg);
5867 else
5868 rhs = fold_build2 (code, type, arg, false_value);
5871 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5872 return fold_convert (type, test);
5876 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5878 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5879 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5880 ADDEND is the same as X.
5882 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5883 and finite. The problematic cases are when X is zero, and its mode
5884 has signed zeros. In the case of rounding towards -infinity,
5885 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5886 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5888 static bool
5889 fold_real_zero_addition_p (tree type, tree addend, int negate)
5891 if (!real_zerop (addend))
5892 return false;
5894 /* Don't allow the fold with -fsignaling-nans. */
5895 if (HONOR_SNANS (TYPE_MODE (type)))
5896 return false;
5898 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5899 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5900 return true;
5902 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5903 if (TREE_CODE (addend) == REAL_CST
5904 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5905 negate = !negate;
5907 /* The mode has signed zeros, and we have to honor their sign.
5908 In this situation, there is only one case we can return true for.
5909 X - 0 is the same as X unless rounding towards -infinity is
5910 supported. */
5911 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5914 /* Subroutine of fold() that checks comparisons of built-in math
5915 functions against real constants.
5917 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5918 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5919 is the type of the result and ARG0 and ARG1 are the operands of the
5920 comparison. ARG1 must be a TREE_REAL_CST.
5922 The function returns the constant folded tree if a simplification
5923 can be made, and NULL_TREE otherwise. */
5925 static tree
5926 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5927 tree type, tree arg0, tree arg1)
5929 REAL_VALUE_TYPE c;
5931 if (BUILTIN_SQRT_P (fcode))
5933 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5934 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5936 c = TREE_REAL_CST (arg1);
5937 if (REAL_VALUE_NEGATIVE (c))
5939 /* sqrt(x) < y is always false, if y is negative. */
5940 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5941 return omit_one_operand (type, integer_zero_node, arg);
5943 /* sqrt(x) > y is always true, if y is negative and we
5944 don't care about NaNs, i.e. negative values of x. */
5945 if (code == NE_EXPR || !HONOR_NANS (mode))
5946 return omit_one_operand (type, integer_one_node, arg);
5948 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5949 return fold_build2 (GE_EXPR, type, arg,
5950 build_real (TREE_TYPE (arg), dconst0));
5952 else if (code == GT_EXPR || code == GE_EXPR)
5954 REAL_VALUE_TYPE c2;
5956 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5957 real_convert (&c2, mode, &c2);
5959 if (REAL_VALUE_ISINF (c2))
5961 /* sqrt(x) > y is x == +Inf, when y is very large. */
5962 if (HONOR_INFINITIES (mode))
5963 return fold_build2 (EQ_EXPR, type, arg,
5964 build_real (TREE_TYPE (arg), c2));
5966 /* sqrt(x) > y is always false, when y is very large
5967 and we don't care about infinities. */
5968 return omit_one_operand (type, integer_zero_node, arg);
5971 /* sqrt(x) > c is the same as x > c*c. */
5972 return fold_build2 (code, type, arg,
5973 build_real (TREE_TYPE (arg), c2));
5975 else if (code == LT_EXPR || code == LE_EXPR)
5977 REAL_VALUE_TYPE c2;
5979 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5980 real_convert (&c2, mode, &c2);
5982 if (REAL_VALUE_ISINF (c2))
5984 /* sqrt(x) < y is always true, when y is a very large
5985 value and we don't care about NaNs or Infinities. */
5986 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5987 return omit_one_operand (type, integer_one_node, arg);
5989 /* sqrt(x) < y is x != +Inf when y is very large and we
5990 don't care about NaNs. */
5991 if (! HONOR_NANS (mode))
5992 return fold_build2 (NE_EXPR, type, arg,
5993 build_real (TREE_TYPE (arg), c2));
5995 /* sqrt(x) < y is x >= 0 when y is very large and we
5996 don't care about Infinities. */
5997 if (! HONOR_INFINITIES (mode))
5998 return fold_build2 (GE_EXPR, type, arg,
5999 build_real (TREE_TYPE (arg), dconst0));
6001 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6002 if (lang_hooks.decls.global_bindings_p () != 0
6003 || CONTAINS_PLACEHOLDER_P (arg))
6004 return NULL_TREE;
6006 arg = save_expr (arg);
6007 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6008 fold_build2 (GE_EXPR, type, arg,
6009 build_real (TREE_TYPE (arg),
6010 dconst0)),
6011 fold_build2 (NE_EXPR, type, arg,
6012 build_real (TREE_TYPE (arg),
6013 c2)));
6016 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6017 if (! HONOR_NANS (mode))
6018 return fold_build2 (code, type, arg,
6019 build_real (TREE_TYPE (arg), c2));
6021 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6022 if (lang_hooks.decls.global_bindings_p () == 0
6023 && ! CONTAINS_PLACEHOLDER_P (arg))
6025 arg = save_expr (arg);
6026 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6027 fold_build2 (GE_EXPR, type, arg,
6028 build_real (TREE_TYPE (arg),
6029 dconst0)),
6030 fold_build2 (code, type, arg,
6031 build_real (TREE_TYPE (arg),
6032 c2)));
6037 return NULL_TREE;
6040 /* Subroutine of fold() that optimizes comparisons against Infinities,
6041 either +Inf or -Inf.
6043 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6044 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6045 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6047 The function returns the constant folded tree if a simplification
6048 can be made, and NULL_TREE otherwise. */
6050 static tree
6051 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6053 enum machine_mode mode;
6054 REAL_VALUE_TYPE max;
6055 tree temp;
6056 bool neg;
6058 mode = TYPE_MODE (TREE_TYPE (arg0));
6060 /* For negative infinity swap the sense of the comparison. */
6061 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6062 if (neg)
6063 code = swap_tree_comparison (code);
6065 switch (code)
6067 case GT_EXPR:
6068 /* x > +Inf is always false, if with ignore sNANs. */
6069 if (HONOR_SNANS (mode))
6070 return NULL_TREE;
6071 return omit_one_operand (type, integer_zero_node, arg0);
6073 case LE_EXPR:
6074 /* x <= +Inf is always true, if we don't case about NaNs. */
6075 if (! HONOR_NANS (mode))
6076 return omit_one_operand (type, integer_one_node, arg0);
6078 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6079 if (lang_hooks.decls.global_bindings_p () == 0
6080 && ! CONTAINS_PLACEHOLDER_P (arg0))
6082 arg0 = save_expr (arg0);
6083 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6085 break;
6087 case EQ_EXPR:
6088 case GE_EXPR:
6089 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6090 real_maxval (&max, neg, mode);
6091 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6092 arg0, build_real (TREE_TYPE (arg0), max));
6094 case LT_EXPR:
6095 /* x < +Inf is always equal to x <= DBL_MAX. */
6096 real_maxval (&max, neg, mode);
6097 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6098 arg0, build_real (TREE_TYPE (arg0), max));
6100 case NE_EXPR:
6101 /* x != +Inf is always equal to !(x > DBL_MAX). */
6102 real_maxval (&max, neg, mode);
6103 if (! HONOR_NANS (mode))
6104 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6105 arg0, build_real (TREE_TYPE (arg0), max));
6107 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6108 arg0, build_real (TREE_TYPE (arg0), max));
6109 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6111 default:
6112 break;
6115 return NULL_TREE;
6118 /* Subroutine of fold() that optimizes comparisons of a division by
6119 a nonzero integer constant against an integer constant, i.e.
6120 X/C1 op C2.
6122 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6123 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6124 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6126 The function returns the constant folded tree if a simplification
6127 can be made, and NULL_TREE otherwise. */
6129 static tree
6130 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6132 tree prod, tmp, hi, lo;
6133 tree arg00 = TREE_OPERAND (arg0, 0);
6134 tree arg01 = TREE_OPERAND (arg0, 1);
6135 unsigned HOST_WIDE_INT lpart;
6136 HOST_WIDE_INT hpart;
6137 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6138 bool neg_overflow;
6139 int overflow;
6141 /* We have to do this the hard way to detect unsigned overflow.
6142 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6143 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6144 TREE_INT_CST_HIGH (arg01),
6145 TREE_INT_CST_LOW (arg1),
6146 TREE_INT_CST_HIGH (arg1),
6147 &lpart, &hpart, unsigned_p);
6148 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6149 prod = force_fit_type (prod, -1, overflow, false);
6150 neg_overflow = false;
6152 if (unsigned_p)
6154 tmp = int_const_binop (MINUS_EXPR, arg01,
6155 build_int_cst (TREE_TYPE (arg01), 1), 0);
6156 lo = prod;
6158 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6159 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6160 TREE_INT_CST_HIGH (prod),
6161 TREE_INT_CST_LOW (tmp),
6162 TREE_INT_CST_HIGH (tmp),
6163 &lpart, &hpart, unsigned_p);
6164 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6165 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6166 TREE_CONSTANT_OVERFLOW (prod));
6168 else if (tree_int_cst_sgn (arg01) >= 0)
6170 tmp = int_const_binop (MINUS_EXPR, arg01,
6171 build_int_cst (TREE_TYPE (arg01), 1), 0);
6172 switch (tree_int_cst_sgn (arg1))
6174 case -1:
6175 neg_overflow = true;
6176 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6177 hi = prod;
6178 break;
6180 case 0:
6181 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6182 hi = tmp;
6183 break;
6185 case 1:
6186 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6187 lo = prod;
6188 break;
6190 default:
6191 gcc_unreachable ();
6194 else
6196 /* A negative divisor reverses the relational operators. */
6197 code = swap_tree_comparison (code);
6199 tmp = int_const_binop (PLUS_EXPR, arg01,
6200 build_int_cst (TREE_TYPE (arg01), 1), 0);
6201 switch (tree_int_cst_sgn (arg1))
6203 case -1:
6204 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6205 lo = prod;
6206 break;
6208 case 0:
6209 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6210 lo = tmp;
6211 break;
6213 case 1:
6214 neg_overflow = true;
6215 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6216 hi = prod;
6217 break;
6219 default:
6220 gcc_unreachable ();
6224 switch (code)
6226 case EQ_EXPR:
6227 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6228 return omit_one_operand (type, integer_zero_node, arg00);
6229 if (TREE_OVERFLOW (hi))
6230 return fold_build2 (GE_EXPR, type, arg00, lo);
6231 if (TREE_OVERFLOW (lo))
6232 return fold_build2 (LE_EXPR, type, arg00, hi);
6233 return build_range_check (type, arg00, 1, lo, hi);
6235 case NE_EXPR:
6236 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6237 return omit_one_operand (type, integer_one_node, arg00);
6238 if (TREE_OVERFLOW (hi))
6239 return fold_build2 (LT_EXPR, type, arg00, lo);
6240 if (TREE_OVERFLOW (lo))
6241 return fold_build2 (GT_EXPR, type, arg00, hi);
6242 return build_range_check (type, arg00, 0, lo, hi);
6244 case LT_EXPR:
6245 if (TREE_OVERFLOW (lo))
6247 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6248 return omit_one_operand (type, tmp, arg00);
6250 return fold_build2 (LT_EXPR, type, arg00, lo);
6252 case LE_EXPR:
6253 if (TREE_OVERFLOW (hi))
6255 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6256 return omit_one_operand (type, tmp, arg00);
6258 return fold_build2 (LE_EXPR, type, arg00, hi);
6260 case GT_EXPR:
6261 if (TREE_OVERFLOW (hi))
6263 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6264 return omit_one_operand (type, tmp, arg00);
6266 return fold_build2 (GT_EXPR, type, arg00, hi);
6268 case GE_EXPR:
6269 if (TREE_OVERFLOW (lo))
6271 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6272 return omit_one_operand (type, tmp, arg00);
6274 return fold_build2 (GE_EXPR, type, arg00, lo);
6276 default:
6277 break;
6280 return NULL_TREE;
6284 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6285 equality/inequality test, then return a simplified form of the test
6286 using a sign testing. Otherwise return NULL. TYPE is the desired
6287 result type. */
6289 static tree
6290 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6291 tree result_type)
6293 /* If this is testing a single bit, we can optimize the test. */
6294 if ((code == NE_EXPR || code == EQ_EXPR)
6295 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6296 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6298 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6299 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6300 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6302 if (arg00 != NULL_TREE
6303 /* This is only a win if casting to a signed type is cheap,
6304 i.e. when arg00's type is not a partial mode. */
6305 && TYPE_PRECISION (TREE_TYPE (arg00))
6306 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6308 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6309 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6310 result_type, fold_convert (stype, arg00),
6311 build_int_cst (stype, 0));
6315 return NULL_TREE;
6318 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6319 equality/inequality test, then return a simplified form of
6320 the test using shifts and logical operations. Otherwise return
6321 NULL. TYPE is the desired result type. */
6323 tree
6324 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6325 tree result_type)
6327 /* If this is testing a single bit, we can optimize the test. */
6328 if ((code == NE_EXPR || code == EQ_EXPR)
6329 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6330 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6332 tree inner = TREE_OPERAND (arg0, 0);
6333 tree type = TREE_TYPE (arg0);
6334 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6335 enum machine_mode operand_mode = TYPE_MODE (type);
6336 int ops_unsigned;
6337 tree signed_type, unsigned_type, intermediate_type;
6338 tree tem, one;
6340 /* First, see if we can fold the single bit test into a sign-bit
6341 test. */
6342 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6343 result_type);
6344 if (tem)
6345 return tem;
6347 /* Otherwise we have (A & C) != 0 where C is a single bit,
6348 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6349 Similarly for (A & C) == 0. */
6351 /* If INNER is a right shift of a constant and it plus BITNUM does
6352 not overflow, adjust BITNUM and INNER. */
6353 if (TREE_CODE (inner) == RSHIFT_EXPR
6354 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6355 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6356 && bitnum < TYPE_PRECISION (type)
6357 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6358 bitnum - TYPE_PRECISION (type)))
6360 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6361 inner = TREE_OPERAND (inner, 0);
6364 /* If we are going to be able to omit the AND below, we must do our
6365 operations as unsigned. If we must use the AND, we have a choice.
6366 Normally unsigned is faster, but for some machines signed is. */
6367 #ifdef LOAD_EXTEND_OP
6368 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6369 && !flag_syntax_only) ? 0 : 1;
6370 #else
6371 ops_unsigned = 1;
6372 #endif
6374 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6375 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6376 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6377 inner = fold_convert (intermediate_type, inner);
6379 if (bitnum != 0)
6380 inner = build2 (RSHIFT_EXPR, intermediate_type,
6381 inner, size_int (bitnum));
6383 one = build_int_cst (intermediate_type, 1);
6385 if (code == EQ_EXPR)
6386 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6388 /* Put the AND last so it can combine with more things. */
6389 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6391 /* Make sure to return the proper type. */
6392 inner = fold_convert (result_type, inner);
6394 return inner;
6396 return NULL_TREE;
6399 /* Check whether we are allowed to reorder operands arg0 and arg1,
6400 such that the evaluation of arg1 occurs before arg0. */
6402 static bool
6403 reorder_operands_p (tree arg0, tree arg1)
6405 if (! flag_evaluation_order)
6406 return true;
6407 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6408 return true;
6409 return ! TREE_SIDE_EFFECTS (arg0)
6410 && ! TREE_SIDE_EFFECTS (arg1);
6413 /* Test whether it is preferable two swap two operands, ARG0 and
6414 ARG1, for example because ARG0 is an integer constant and ARG1
6415 isn't. If REORDER is true, only recommend swapping if we can
6416 evaluate the operands in reverse order. */
6418 bool
6419 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6421 STRIP_SIGN_NOPS (arg0);
6422 STRIP_SIGN_NOPS (arg1);
6424 if (TREE_CODE (arg1) == INTEGER_CST)
6425 return 0;
6426 if (TREE_CODE (arg0) == INTEGER_CST)
6427 return 1;
6429 if (TREE_CODE (arg1) == REAL_CST)
6430 return 0;
6431 if (TREE_CODE (arg0) == REAL_CST)
6432 return 1;
6434 if (TREE_CODE (arg1) == COMPLEX_CST)
6435 return 0;
6436 if (TREE_CODE (arg0) == COMPLEX_CST)
6437 return 1;
6439 if (TREE_CONSTANT (arg1))
6440 return 0;
6441 if (TREE_CONSTANT (arg0))
6442 return 1;
6444 if (optimize_size)
6445 return 0;
6447 if (reorder && flag_evaluation_order
6448 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6449 return 0;
6451 if (DECL_P (arg1))
6452 return 0;
6453 if (DECL_P (arg0))
6454 return 1;
6456 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6457 for commutative and comparison operators. Ensuring a canonical
6458 form allows the optimizers to find additional redundancies without
6459 having to explicitly check for both orderings. */
6460 if (TREE_CODE (arg0) == SSA_NAME
6461 && TREE_CODE (arg1) == SSA_NAME
6462 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6463 return 1;
6465 return 0;
6468 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6469 ARG0 is extended to a wider type. */
6471 static tree
6472 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6474 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6475 tree arg1_unw;
6476 tree shorter_type, outer_type;
6477 tree min, max;
6478 bool above, below;
6480 if (arg0_unw == arg0)
6481 return NULL_TREE;
6482 shorter_type = TREE_TYPE (arg0_unw);
6484 #ifdef HAVE_canonicalize_funcptr_for_compare
6485 /* Disable this optimization if we're casting a function pointer
6486 type on targets that require function pointer canonicalization. */
6487 if (HAVE_canonicalize_funcptr_for_compare
6488 && TREE_CODE (shorter_type) == POINTER_TYPE
6489 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6490 return NULL_TREE;
6491 #endif
6493 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6494 return NULL_TREE;
6496 arg1_unw = get_unwidened (arg1, shorter_type);
6498 /* If possible, express the comparison in the shorter mode. */
6499 if ((code == EQ_EXPR || code == NE_EXPR
6500 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6501 && (TREE_TYPE (arg1_unw) == shorter_type
6502 || (TREE_CODE (arg1_unw) == INTEGER_CST
6503 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6504 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6505 && int_fits_type_p (arg1_unw, shorter_type))))
6506 return fold_build2 (code, type, arg0_unw,
6507 fold_convert (shorter_type, arg1_unw));
6509 if (TREE_CODE (arg1_unw) != INTEGER_CST
6510 || TREE_CODE (shorter_type) != INTEGER_TYPE
6511 || !int_fits_type_p (arg1_unw, shorter_type))
6512 return NULL_TREE;
6514 /* If we are comparing with the integer that does not fit into the range
6515 of the shorter type, the result is known. */
6516 outer_type = TREE_TYPE (arg1_unw);
6517 min = lower_bound_in_type (outer_type, shorter_type);
6518 max = upper_bound_in_type (outer_type, shorter_type);
6520 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6521 max, arg1_unw));
6522 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6523 arg1_unw, min));
6525 switch (code)
6527 case EQ_EXPR:
6528 if (above || below)
6529 return omit_one_operand (type, integer_zero_node, arg0);
6530 break;
6532 case NE_EXPR:
6533 if (above || below)
6534 return omit_one_operand (type, integer_one_node, arg0);
6535 break;
6537 case LT_EXPR:
6538 case LE_EXPR:
6539 if (above)
6540 return omit_one_operand (type, integer_one_node, arg0);
6541 else if (below)
6542 return omit_one_operand (type, integer_zero_node, arg0);
6544 case GT_EXPR:
6545 case GE_EXPR:
6546 if (above)
6547 return omit_one_operand (type, integer_zero_node, arg0);
6548 else if (below)
6549 return omit_one_operand (type, integer_one_node, arg0);
6551 default:
6552 break;
6555 return NULL_TREE;
6558 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6559 ARG0 just the signedness is changed. */
6561 static tree
6562 fold_sign_changed_comparison (enum tree_code code, tree type,
6563 tree arg0, tree arg1)
6565 tree arg0_inner, tmp;
6566 tree inner_type, outer_type;
6568 if (TREE_CODE (arg0) != NOP_EXPR
6569 && TREE_CODE (arg0) != CONVERT_EXPR)
6570 return NULL_TREE;
6572 outer_type = TREE_TYPE (arg0);
6573 arg0_inner = TREE_OPERAND (arg0, 0);
6574 inner_type = TREE_TYPE (arg0_inner);
6576 #ifdef HAVE_canonicalize_funcptr_for_compare
6577 /* Disable this optimization if we're casting a function pointer
6578 type on targets that require function pointer canonicalization. */
6579 if (HAVE_canonicalize_funcptr_for_compare
6580 && TREE_CODE (inner_type) == POINTER_TYPE
6581 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6582 return NULL_TREE;
6583 #endif
6585 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6586 return NULL_TREE;
6588 if (TREE_CODE (arg1) != INTEGER_CST
6589 && !((TREE_CODE (arg1) == NOP_EXPR
6590 || TREE_CODE (arg1) == CONVERT_EXPR)
6591 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6592 return NULL_TREE;
6594 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6595 && code != NE_EXPR
6596 && code != EQ_EXPR)
6597 return NULL_TREE;
6599 if (TREE_CODE (arg1) == INTEGER_CST)
6601 tmp = build_int_cst_wide (inner_type,
6602 TREE_INT_CST_LOW (arg1),
6603 TREE_INT_CST_HIGH (arg1));
6604 arg1 = force_fit_type (tmp, 0,
6605 TREE_OVERFLOW (arg1),
6606 TREE_CONSTANT_OVERFLOW (arg1));
6608 else
6609 arg1 = fold_convert (inner_type, arg1);
6611 return fold_build2 (code, type, arg0_inner, arg1);
6614 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6615 step of the array. Reconstructs s and delta in the case of s * delta
6616 being an integer constant (and thus already folded).
6617 ADDR is the address. MULT is the multiplicative expression.
6618 If the function succeeds, the new address expression is returned. Otherwise
6619 NULL_TREE is returned. */
6621 static tree
6622 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6624 tree s, delta, step;
6625 tree ref = TREE_OPERAND (addr, 0), pref;
6626 tree ret, pos;
6627 tree itype;
6629 /* Canonicalize op1 into a possibly non-constant delta
6630 and an INTEGER_CST s. */
6631 if (TREE_CODE (op1) == MULT_EXPR)
6633 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6635 STRIP_NOPS (arg0);
6636 STRIP_NOPS (arg1);
6638 if (TREE_CODE (arg0) == INTEGER_CST)
6640 s = arg0;
6641 delta = arg1;
6643 else if (TREE_CODE (arg1) == INTEGER_CST)
6645 s = arg1;
6646 delta = arg0;
6648 else
6649 return NULL_TREE;
6651 else if (TREE_CODE (op1) == INTEGER_CST)
6653 delta = op1;
6654 s = NULL_TREE;
6656 else
6658 /* Simulate we are delta * 1. */
6659 delta = op1;
6660 s = integer_one_node;
6663 for (;; ref = TREE_OPERAND (ref, 0))
6665 if (TREE_CODE (ref) == ARRAY_REF)
6667 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6668 if (! itype)
6669 continue;
6671 step = array_ref_element_size (ref);
6672 if (TREE_CODE (step) != INTEGER_CST)
6673 continue;
6675 if (s)
6677 if (! tree_int_cst_equal (step, s))
6678 continue;
6680 else
6682 /* Try if delta is a multiple of step. */
6683 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6684 if (! tmp)
6685 continue;
6686 delta = tmp;
6689 break;
6692 if (!handled_component_p (ref))
6693 return NULL_TREE;
6696 /* We found the suitable array reference. So copy everything up to it,
6697 and replace the index. */
6699 pref = TREE_OPERAND (addr, 0);
6700 ret = copy_node (pref);
6701 pos = ret;
6703 while (pref != ref)
6705 pref = TREE_OPERAND (pref, 0);
6706 TREE_OPERAND (pos, 0) = copy_node (pref);
6707 pos = TREE_OPERAND (pos, 0);
6710 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6711 fold_convert (itype,
6712 TREE_OPERAND (pos, 1)),
6713 fold_convert (itype, delta));
6715 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6719 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6720 means A >= Y && A != MAX, but in this case we know that
6721 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6723 static tree
6724 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6726 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6728 if (TREE_CODE (bound) == LT_EXPR)
6729 a = TREE_OPERAND (bound, 0);
6730 else if (TREE_CODE (bound) == GT_EXPR)
6731 a = TREE_OPERAND (bound, 1);
6732 else
6733 return NULL_TREE;
6735 typea = TREE_TYPE (a);
6736 if (!INTEGRAL_TYPE_P (typea)
6737 && !POINTER_TYPE_P (typea))
6738 return NULL_TREE;
6740 if (TREE_CODE (ineq) == LT_EXPR)
6742 a1 = TREE_OPERAND (ineq, 1);
6743 y = TREE_OPERAND (ineq, 0);
6745 else if (TREE_CODE (ineq) == GT_EXPR)
6747 a1 = TREE_OPERAND (ineq, 0);
6748 y = TREE_OPERAND (ineq, 1);
6750 else
6751 return NULL_TREE;
6753 if (TREE_TYPE (a1) != typea)
6754 return NULL_TREE;
6756 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6757 if (!integer_onep (diff))
6758 return NULL_TREE;
6760 return fold_build2 (GE_EXPR, type, a, y);
6763 /* Fold a sum or difference of at least one multiplication.
6764 Returns the folded tree or NULL if no simplification could be made. */
6766 static tree
6767 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6769 tree arg00, arg01, arg10, arg11;
6770 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6772 /* (A * C) +- (B * C) -> (A+-B) * C.
6773 (A * C) +- A -> A * (C+-1).
6774 We are most concerned about the case where C is a constant,
6775 but other combinations show up during loop reduction. Since
6776 it is not difficult, try all four possibilities. */
6778 if (TREE_CODE (arg0) == MULT_EXPR)
6780 arg00 = TREE_OPERAND (arg0, 0);
6781 arg01 = TREE_OPERAND (arg0, 1);
6783 else
6785 arg00 = arg0;
6786 arg01 = build_one_cst (type);
6788 if (TREE_CODE (arg1) == MULT_EXPR)
6790 arg10 = TREE_OPERAND (arg1, 0);
6791 arg11 = TREE_OPERAND (arg1, 1);
6793 else
6795 arg10 = arg1;
6796 arg11 = build_one_cst (type);
6798 same = NULL_TREE;
6800 if (operand_equal_p (arg01, arg11, 0))
6801 same = arg01, alt0 = arg00, alt1 = arg10;
6802 else if (operand_equal_p (arg00, arg10, 0))
6803 same = arg00, alt0 = arg01, alt1 = arg11;
6804 else if (operand_equal_p (arg00, arg11, 0))
6805 same = arg00, alt0 = arg01, alt1 = arg10;
6806 else if (operand_equal_p (arg01, arg10, 0))
6807 same = arg01, alt0 = arg00, alt1 = arg11;
6809 /* No identical multiplicands; see if we can find a common
6810 power-of-two factor in non-power-of-two multiplies. This
6811 can help in multi-dimensional array access. */
6812 else if (host_integerp (arg01, 0)
6813 && host_integerp (arg11, 0))
6815 HOST_WIDE_INT int01, int11, tmp;
6816 bool swap = false;
6817 tree maybe_same;
6818 int01 = TREE_INT_CST_LOW (arg01);
6819 int11 = TREE_INT_CST_LOW (arg11);
6821 /* Move min of absolute values to int11. */
6822 if ((int01 >= 0 ? int01 : -int01)
6823 < (int11 >= 0 ? int11 : -int11))
6825 tmp = int01, int01 = int11, int11 = tmp;
6826 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6827 maybe_same = arg01;
6828 swap = true;
6830 else
6831 maybe_same = arg11;
6833 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6835 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6836 build_int_cst (TREE_TYPE (arg00),
6837 int01 / int11));
6838 alt1 = arg10;
6839 same = maybe_same;
6840 if (swap)
6841 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6845 if (same)
6846 return fold_build2 (MULT_EXPR, type,
6847 fold_build2 (code, type,
6848 fold_convert (type, alt0),
6849 fold_convert (type, alt1)),
6850 fold_convert (type, same));
6852 return NULL_TREE;
6855 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6856 specified by EXPR into the buffer PTR of length LEN bytes.
6857 Return the number of bytes placed in the buffer, or zero
6858 upon failure. */
6860 static int
6861 native_encode_int (tree expr, unsigned char *ptr, int len)
6863 tree type = TREE_TYPE (expr);
6864 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6865 int byte, offset, word, words;
6866 unsigned char value;
6868 if (total_bytes > len)
6869 return 0;
6870 words = total_bytes / UNITS_PER_WORD;
6872 for (byte = 0; byte < total_bytes; byte++)
6874 int bitpos = byte * BITS_PER_UNIT;
6875 if (bitpos < HOST_BITS_PER_WIDE_INT)
6876 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6877 else
6878 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6879 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6881 if (total_bytes > UNITS_PER_WORD)
6883 word = byte / UNITS_PER_WORD;
6884 if (WORDS_BIG_ENDIAN)
6885 word = (words - 1) - word;
6886 offset = word * UNITS_PER_WORD;
6887 if (BYTES_BIG_ENDIAN)
6888 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6889 else
6890 offset += byte % UNITS_PER_WORD;
6892 else
6893 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6894 ptr[offset] = value;
6896 return total_bytes;
6900 /* Subroutine of native_encode_expr. Encode the REAL_CST
6901 specified by EXPR into the buffer PTR of length LEN bytes.
6902 Return the number of bytes placed in the buffer, or zero
6903 upon failure. */
6905 static int
6906 native_encode_real (tree expr, unsigned char *ptr, int len)
6908 tree type = TREE_TYPE (expr);
6909 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6910 int byte, offset, word, words;
6911 unsigned char value;
6913 /* There are always 32 bits in each long, no matter the size of
6914 the hosts long. We handle floating point representations with
6915 up to 192 bits. */
6916 long tmp[6];
6918 if (total_bytes > len)
6919 return 0;
6920 words = total_bytes / UNITS_PER_WORD;
6922 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6924 for (byte = 0; byte < total_bytes; byte++)
6926 int bitpos = byte * BITS_PER_UNIT;
6927 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6929 if (total_bytes > UNITS_PER_WORD)
6931 word = byte / UNITS_PER_WORD;
6932 if (FLOAT_WORDS_BIG_ENDIAN)
6933 word = (words - 1) - word;
6934 offset = word * UNITS_PER_WORD;
6935 if (BYTES_BIG_ENDIAN)
6936 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6937 else
6938 offset += byte % UNITS_PER_WORD;
6940 else
6941 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6942 ptr[offset] = value;
6944 return total_bytes;
6947 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6948 specified by EXPR into the buffer PTR of length LEN bytes.
6949 Return the number of bytes placed in the buffer, or zero
6950 upon failure. */
6952 static int
6953 native_encode_complex (tree expr, unsigned char *ptr, int len)
6955 int rsize, isize;
6956 tree part;
6958 part = TREE_REALPART (expr);
6959 rsize = native_encode_expr (part, ptr, len);
6960 if (rsize == 0)
6961 return 0;
6962 part = TREE_IMAGPART (expr);
6963 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6964 if (isize != rsize)
6965 return 0;
6966 return rsize + isize;
6970 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6971 specified by EXPR into the buffer PTR of length LEN bytes.
6972 Return the number of bytes placed in the buffer, or zero
6973 upon failure. */
6975 static int
6976 native_encode_vector (tree expr, unsigned char *ptr, int len)
6978 int i, size, offset, count;
6979 tree itype, elem, elements;
6981 offset = 0;
6982 elements = TREE_VECTOR_CST_ELTS (expr);
6983 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6984 itype = TREE_TYPE (TREE_TYPE (expr));
6985 size = GET_MODE_SIZE (TYPE_MODE (itype));
6986 for (i = 0; i < count; i++)
6988 if (elements)
6990 elem = TREE_VALUE (elements);
6991 elements = TREE_CHAIN (elements);
6993 else
6994 elem = NULL_TREE;
6996 if (elem)
6998 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6999 return 0;
7001 else
7003 if (offset + size > len)
7004 return 0;
7005 memset (ptr+offset, 0, size);
7007 offset += size;
7009 return offset;
7013 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7014 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7015 buffer PTR of length LEN bytes. Return the number of bytes
7016 placed in the buffer, or zero upon failure. */
7018 static int
7019 native_encode_expr (tree expr, unsigned char *ptr, int len)
7021 switch (TREE_CODE (expr))
7023 case INTEGER_CST:
7024 return native_encode_int (expr, ptr, len);
7026 case REAL_CST:
7027 return native_encode_real (expr, ptr, len);
7029 case COMPLEX_CST:
7030 return native_encode_complex (expr, ptr, len);
7032 case VECTOR_CST:
7033 return native_encode_vector (expr, ptr, len);
7035 default:
7036 return 0;
7041 /* Subroutine of native_interpret_expr. Interpret the contents of
7042 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7043 If the buffer cannot be interpreted, return NULL_TREE. */
7045 static tree
7046 native_interpret_int (tree type, unsigned char *ptr, int len)
7048 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7049 int byte, offset, word, words;
7050 unsigned char value;
7051 unsigned int HOST_WIDE_INT lo = 0;
7052 HOST_WIDE_INT hi = 0;
7054 if (total_bytes > len)
7055 return NULL_TREE;
7056 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7057 return NULL_TREE;
7058 words = total_bytes / UNITS_PER_WORD;
7060 for (byte = 0; byte < total_bytes; byte++)
7062 int bitpos = byte * BITS_PER_UNIT;
7063 if (total_bytes > UNITS_PER_WORD)
7065 word = byte / UNITS_PER_WORD;
7066 if (WORDS_BIG_ENDIAN)
7067 word = (words - 1) - word;
7068 offset = word * UNITS_PER_WORD;
7069 if (BYTES_BIG_ENDIAN)
7070 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7071 else
7072 offset += byte % UNITS_PER_WORD;
7074 else
7075 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7076 value = ptr[offset];
7078 if (bitpos < HOST_BITS_PER_WIDE_INT)
7079 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7080 else
7081 hi |= (unsigned HOST_WIDE_INT) value
7082 << (bitpos - HOST_BITS_PER_WIDE_INT);
7085 return force_fit_type (build_int_cst_wide (type, lo, hi),
7086 0, false, false);
7090 /* Subroutine of native_interpret_expr. Interpret the contents of
7091 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7092 If the buffer cannot be interpreted, return NULL_TREE. */
7094 static tree
7095 native_interpret_real (tree type, unsigned char *ptr, int len)
7097 enum machine_mode mode = TYPE_MODE (type);
7098 int total_bytes = GET_MODE_SIZE (mode);
7099 int byte, offset, word, words;
7100 unsigned char value;
7101 /* There are always 32 bits in each long, no matter the size of
7102 the hosts long. We handle floating point representations with
7103 up to 192 bits. */
7104 REAL_VALUE_TYPE r;
7105 long tmp[6];
7107 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7108 if (total_bytes > len || total_bytes > 24)
7109 return NULL_TREE;
7110 words = total_bytes / UNITS_PER_WORD;
7112 memset (tmp, 0, sizeof (tmp));
7113 for (byte = 0; byte < total_bytes; byte++)
7115 int bitpos = byte * BITS_PER_UNIT;
7116 if (total_bytes > UNITS_PER_WORD)
7118 word = byte / UNITS_PER_WORD;
7119 if (FLOAT_WORDS_BIG_ENDIAN)
7120 word = (words - 1) - word;
7121 offset = word * UNITS_PER_WORD;
7122 if (BYTES_BIG_ENDIAN)
7123 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7124 else
7125 offset += byte % UNITS_PER_WORD;
7127 else
7128 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7129 value = ptr[offset];
7131 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7134 real_from_target (&r, tmp, mode);
7135 return build_real (type, r);
7139 /* Subroutine of native_interpret_expr. Interpret the contents of
7140 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7141 If the buffer cannot be interpreted, return NULL_TREE. */
7143 static tree
7144 native_interpret_complex (tree type, unsigned char *ptr, int len)
7146 tree etype, rpart, ipart;
7147 int size;
7149 etype = TREE_TYPE (type);
7150 size = GET_MODE_SIZE (TYPE_MODE (etype));
7151 if (size * 2 > len)
7152 return NULL_TREE;
7153 rpart = native_interpret_expr (etype, ptr, size);
7154 if (!rpart)
7155 return NULL_TREE;
7156 ipart = native_interpret_expr (etype, ptr+size, size);
7157 if (!ipart)
7158 return NULL_TREE;
7159 return build_complex (type, rpart, ipart);
7163 /* Subroutine of native_interpret_expr. Interpret the contents of
7164 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7165 If the buffer cannot be interpreted, return NULL_TREE. */
7167 static tree
7168 native_interpret_vector (tree type, unsigned char *ptr, int len)
7170 tree etype, elem, elements;
7171 int i, size, count;
7173 etype = TREE_TYPE (type);
7174 size = GET_MODE_SIZE (TYPE_MODE (etype));
7175 count = TYPE_VECTOR_SUBPARTS (type);
7176 if (size * count > len)
7177 return NULL_TREE;
7179 elements = NULL_TREE;
7180 for (i = count - 1; i >= 0; i--)
7182 elem = native_interpret_expr (etype, ptr+(i*size), size);
7183 if (!elem)
7184 return NULL_TREE;
7185 elements = tree_cons (NULL_TREE, elem, elements);
7187 return build_vector (type, elements);
7191 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7192 the buffer PTR of length LEN as a constant of type TYPE. For
7193 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7194 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7195 return NULL_TREE. */
7197 static tree
7198 native_interpret_expr (tree type, unsigned char *ptr, int len)
7200 switch (TREE_CODE (type))
7202 case INTEGER_TYPE:
7203 case ENUMERAL_TYPE:
7204 case BOOLEAN_TYPE:
7205 return native_interpret_int (type, ptr, len);
7207 case REAL_TYPE:
7208 return native_interpret_real (type, ptr, len);
7210 case COMPLEX_TYPE:
7211 return native_interpret_complex (type, ptr, len);
7213 case VECTOR_TYPE:
7214 return native_interpret_vector (type, ptr, len);
7216 default:
7217 return NULL_TREE;
7222 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7223 TYPE at compile-time. If we're unable to perform the conversion
7224 return NULL_TREE. */
7226 static tree
7227 fold_view_convert_expr (tree type, tree expr)
7229 /* We support up to 512-bit values (for V8DFmode). */
7230 unsigned char buffer[64];
7231 int len;
7233 /* Check that the host and target are sane. */
7234 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7235 return NULL_TREE;
7237 len = native_encode_expr (expr, buffer, sizeof (buffer));
7238 if (len == 0)
7239 return NULL_TREE;
7241 return native_interpret_expr (type, buffer, len);
7245 /* Fold a unary expression of code CODE and type TYPE with operand
7246 OP0. Return the folded expression if folding is successful.
7247 Otherwise, return NULL_TREE. */
7249 tree
7250 fold_unary (enum tree_code code, tree type, tree op0)
7252 tree tem;
7253 tree arg0;
7254 enum tree_code_class kind = TREE_CODE_CLASS (code);
7256 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7257 && TREE_CODE_LENGTH (code) == 1);
7259 arg0 = op0;
7260 if (arg0)
7262 if (code == NOP_EXPR || code == CONVERT_EXPR
7263 || code == FLOAT_EXPR || code == ABS_EXPR)
7265 /* Don't use STRIP_NOPS, because signedness of argument type
7266 matters. */
7267 STRIP_SIGN_NOPS (arg0);
7269 else
7271 /* Strip any conversions that don't change the mode. This
7272 is safe for every expression, except for a comparison
7273 expression because its signedness is derived from its
7274 operands.
7276 Note that this is done as an internal manipulation within
7277 the constant folder, in order to find the simplest
7278 representation of the arguments so that their form can be
7279 studied. In any cases, the appropriate type conversions
7280 should be put back in the tree that will get out of the
7281 constant folder. */
7282 STRIP_NOPS (arg0);
7286 if (TREE_CODE_CLASS (code) == tcc_unary)
7288 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7289 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7290 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7291 else if (TREE_CODE (arg0) == COND_EXPR)
7293 tree arg01 = TREE_OPERAND (arg0, 1);
7294 tree arg02 = TREE_OPERAND (arg0, 2);
7295 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7296 arg01 = fold_build1 (code, type, arg01);
7297 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7298 arg02 = fold_build1 (code, type, arg02);
7299 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7300 arg01, arg02);
7302 /* If this was a conversion, and all we did was to move into
7303 inside the COND_EXPR, bring it back out. But leave it if
7304 it is a conversion from integer to integer and the
7305 result precision is no wider than a word since such a
7306 conversion is cheap and may be optimized away by combine,
7307 while it couldn't if it were outside the COND_EXPR. Then return
7308 so we don't get into an infinite recursion loop taking the
7309 conversion out and then back in. */
7311 if ((code == NOP_EXPR || code == CONVERT_EXPR
7312 || code == NON_LVALUE_EXPR)
7313 && TREE_CODE (tem) == COND_EXPR
7314 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7315 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7316 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7317 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7318 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7319 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7320 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7321 && (INTEGRAL_TYPE_P
7322 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7323 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7324 || flag_syntax_only))
7325 tem = build1 (code, type,
7326 build3 (COND_EXPR,
7327 TREE_TYPE (TREE_OPERAND
7328 (TREE_OPERAND (tem, 1), 0)),
7329 TREE_OPERAND (tem, 0),
7330 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7331 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7332 return tem;
7334 else if (COMPARISON_CLASS_P (arg0))
7336 if (TREE_CODE (type) == BOOLEAN_TYPE)
7338 arg0 = copy_node (arg0);
7339 TREE_TYPE (arg0) = type;
7340 return arg0;
7342 else if (TREE_CODE (type) != INTEGER_TYPE)
7343 return fold_build3 (COND_EXPR, type, arg0,
7344 fold_build1 (code, type,
7345 integer_one_node),
7346 fold_build1 (code, type,
7347 integer_zero_node));
7351 switch (code)
7353 case NOP_EXPR:
7354 case FLOAT_EXPR:
7355 case CONVERT_EXPR:
7356 case FIX_TRUNC_EXPR:
7357 if (TREE_TYPE (op0) == type)
7358 return op0;
7360 /* If we have (type) (a CMP b) and type is an integral type, return
7361 new expression involving the new type. */
7362 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7363 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7364 TREE_OPERAND (op0, 1));
7366 /* Handle cases of two conversions in a row. */
7367 if (TREE_CODE (op0) == NOP_EXPR
7368 || TREE_CODE (op0) == CONVERT_EXPR)
7370 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7371 tree inter_type = TREE_TYPE (op0);
7372 int inside_int = INTEGRAL_TYPE_P (inside_type);
7373 int inside_ptr = POINTER_TYPE_P (inside_type);
7374 int inside_float = FLOAT_TYPE_P (inside_type);
7375 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7376 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7377 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7378 int inter_int = INTEGRAL_TYPE_P (inter_type);
7379 int inter_ptr = POINTER_TYPE_P (inter_type);
7380 int inter_float = FLOAT_TYPE_P (inter_type);
7381 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7382 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7383 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7384 int final_int = INTEGRAL_TYPE_P (type);
7385 int final_ptr = POINTER_TYPE_P (type);
7386 int final_float = FLOAT_TYPE_P (type);
7387 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7388 unsigned int final_prec = TYPE_PRECISION (type);
7389 int final_unsignedp = TYPE_UNSIGNED (type);
7391 /* In addition to the cases of two conversions in a row
7392 handled below, if we are converting something to its own
7393 type via an object of identical or wider precision, neither
7394 conversion is needed. */
7395 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7396 && (((inter_int || inter_ptr) && final_int)
7397 || (inter_float && final_float))
7398 && inter_prec >= final_prec)
7399 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7401 /* Likewise, if the intermediate and final types are either both
7402 float or both integer, we don't need the middle conversion if
7403 it is wider than the final type and doesn't change the signedness
7404 (for integers). Avoid this if the final type is a pointer
7405 since then we sometimes need the inner conversion. Likewise if
7406 the outer has a precision not equal to the size of its mode. */
7407 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7408 || (inter_float && inside_float)
7409 || (inter_vec && inside_vec))
7410 && inter_prec >= inside_prec
7411 && (inter_float || inter_vec
7412 || inter_unsignedp == inside_unsignedp)
7413 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7414 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7415 && ! final_ptr
7416 && (! final_vec || inter_prec == inside_prec))
7417 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7419 /* If we have a sign-extension of a zero-extended value, we can
7420 replace that by a single zero-extension. */
7421 if (inside_int && inter_int && final_int
7422 && inside_prec < inter_prec && inter_prec < final_prec
7423 && inside_unsignedp && !inter_unsignedp)
7424 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7426 /* Two conversions in a row are not needed unless:
7427 - some conversion is floating-point (overstrict for now), or
7428 - some conversion is a vector (overstrict for now), or
7429 - the intermediate type is narrower than both initial and
7430 final, or
7431 - the intermediate type and innermost type differ in signedness,
7432 and the outermost type is wider than the intermediate, or
7433 - the initial type is a pointer type and the precisions of the
7434 intermediate and final types differ, or
7435 - the final type is a pointer type and the precisions of the
7436 initial and intermediate types differ.
7437 - the final type is a pointer type and the initial type not
7438 - the initial type is a pointer to an array and the final type
7439 not. */
7440 if (! inside_float && ! inter_float && ! final_float
7441 && ! inside_vec && ! inter_vec && ! final_vec
7442 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7443 && ! (inside_int && inter_int
7444 && inter_unsignedp != inside_unsignedp
7445 && inter_prec < final_prec)
7446 && ((inter_unsignedp && inter_prec > inside_prec)
7447 == (final_unsignedp && final_prec > inter_prec))
7448 && ! (inside_ptr && inter_prec != final_prec)
7449 && ! (final_ptr && inside_prec != inter_prec)
7450 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7451 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7452 && final_ptr == inside_ptr
7453 && ! (inside_ptr
7454 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7455 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7456 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7459 /* Handle (T *)&A.B.C for A being of type T and B and C
7460 living at offset zero. This occurs frequently in
7461 C++ upcasting and then accessing the base. */
7462 if (TREE_CODE (op0) == ADDR_EXPR
7463 && POINTER_TYPE_P (type)
7464 && handled_component_p (TREE_OPERAND (op0, 0)))
7466 HOST_WIDE_INT bitsize, bitpos;
7467 tree offset;
7468 enum machine_mode mode;
7469 int unsignedp, volatilep;
7470 tree base = TREE_OPERAND (op0, 0);
7471 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7472 &mode, &unsignedp, &volatilep, false);
7473 /* If the reference was to a (constant) zero offset, we can use
7474 the address of the base if it has the same base type
7475 as the result type. */
7476 if (! offset && bitpos == 0
7477 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7478 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7479 return fold_convert (type, build_fold_addr_expr (base));
7482 if ((TREE_CODE (op0) == MODIFY_EXPR
7483 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7484 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7485 /* Detect assigning a bitfield. */
7486 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7487 && DECL_BIT_FIELD
7488 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7490 /* Don't leave an assignment inside a conversion
7491 unless assigning a bitfield. */
7492 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7493 /* First do the assignment, then return converted constant. */
7494 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7495 TREE_NO_WARNING (tem) = 1;
7496 TREE_USED (tem) = 1;
7497 return tem;
7500 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7501 constants (if x has signed type, the sign bit cannot be set
7502 in c). This folds extension into the BIT_AND_EXPR. */
7503 if (INTEGRAL_TYPE_P (type)
7504 && TREE_CODE (type) != BOOLEAN_TYPE
7505 && TREE_CODE (op0) == BIT_AND_EXPR
7506 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7508 tree and = op0;
7509 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7510 int change = 0;
7512 if (TYPE_UNSIGNED (TREE_TYPE (and))
7513 || (TYPE_PRECISION (type)
7514 <= TYPE_PRECISION (TREE_TYPE (and))))
7515 change = 1;
7516 else if (TYPE_PRECISION (TREE_TYPE (and1))
7517 <= HOST_BITS_PER_WIDE_INT
7518 && host_integerp (and1, 1))
7520 unsigned HOST_WIDE_INT cst;
7522 cst = tree_low_cst (and1, 1);
7523 cst &= (HOST_WIDE_INT) -1
7524 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7525 change = (cst == 0);
7526 #ifdef LOAD_EXTEND_OP
7527 if (change
7528 && !flag_syntax_only
7529 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7530 == ZERO_EXTEND))
7532 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7533 and0 = fold_convert (uns, and0);
7534 and1 = fold_convert (uns, and1);
7536 #endif
7538 if (change)
7540 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7541 TREE_INT_CST_HIGH (and1));
7542 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7543 TREE_CONSTANT_OVERFLOW (and1));
7544 return fold_build2 (BIT_AND_EXPR, type,
7545 fold_convert (type, and0), tem);
7549 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7550 T2 being pointers to types of the same size. */
7551 if (POINTER_TYPE_P (type)
7552 && BINARY_CLASS_P (arg0)
7553 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7554 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7556 tree arg00 = TREE_OPERAND (arg0, 0);
7557 tree t0 = type;
7558 tree t1 = TREE_TYPE (arg00);
7559 tree tt0 = TREE_TYPE (t0);
7560 tree tt1 = TREE_TYPE (t1);
7561 tree s0 = TYPE_SIZE (tt0);
7562 tree s1 = TYPE_SIZE (tt1);
7564 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7565 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7566 TREE_OPERAND (arg0, 1));
7569 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7570 of the same precision, and X is a integer type not narrower than
7571 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7572 if (INTEGRAL_TYPE_P (type)
7573 && TREE_CODE (op0) == BIT_NOT_EXPR
7574 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7575 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7576 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7577 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7579 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7580 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7581 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7582 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7585 tem = fold_convert_const (code, type, arg0);
7586 return tem ? tem : NULL_TREE;
7588 case VIEW_CONVERT_EXPR:
7589 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7590 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7591 return fold_view_convert_expr (type, op0);
7593 case NEGATE_EXPR:
7594 tem = fold_negate_expr (arg0);
7595 if (tem)
7596 return fold_convert (type, tem);
7597 return NULL_TREE;
7599 case ABS_EXPR:
7600 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7601 return fold_abs_const (arg0, type);
7602 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7603 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7604 /* Convert fabs((double)float) into (double)fabsf(float). */
7605 else if (TREE_CODE (arg0) == NOP_EXPR
7606 && TREE_CODE (type) == REAL_TYPE)
7608 tree targ0 = strip_float_extensions (arg0);
7609 if (targ0 != arg0)
7610 return fold_convert (type, fold_build1 (ABS_EXPR,
7611 TREE_TYPE (targ0),
7612 targ0));
7614 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7615 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7616 return arg0;
7618 /* Strip sign ops from argument. */
7619 if (TREE_CODE (type) == REAL_TYPE)
7621 tem = fold_strip_sign_ops (arg0);
7622 if (tem)
7623 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7625 return NULL_TREE;
7627 case CONJ_EXPR:
7628 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7629 return fold_convert (type, arg0);
7630 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7632 tree itype = TREE_TYPE (type);
7633 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7634 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7635 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7637 if (TREE_CODE (arg0) == COMPLEX_CST)
7639 tree itype = TREE_TYPE (type);
7640 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7641 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7642 return build_complex (type, rpart, negate_expr (ipart));
7644 if (TREE_CODE (arg0) == CONJ_EXPR)
7645 return fold_convert (type, TREE_OPERAND (arg0, 0));
7646 return NULL_TREE;
7648 case BIT_NOT_EXPR:
7649 if (TREE_CODE (arg0) == INTEGER_CST)
7650 return fold_not_const (arg0, type);
7651 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7652 return TREE_OPERAND (arg0, 0);
7653 /* Convert ~ (-A) to A - 1. */
7654 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7655 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7656 build_int_cst (type, 1));
7657 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7658 else if (INTEGRAL_TYPE_P (type)
7659 && ((TREE_CODE (arg0) == MINUS_EXPR
7660 && integer_onep (TREE_OPERAND (arg0, 1)))
7661 || (TREE_CODE (arg0) == PLUS_EXPR
7662 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7663 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7664 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7665 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7666 && (tem = fold_unary (BIT_NOT_EXPR, type,
7667 fold_convert (type,
7668 TREE_OPERAND (arg0, 0)))))
7669 return fold_build2 (BIT_XOR_EXPR, type, tem,
7670 fold_convert (type, TREE_OPERAND (arg0, 1)));
7671 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7672 && (tem = fold_unary (BIT_NOT_EXPR, type,
7673 fold_convert (type,
7674 TREE_OPERAND (arg0, 1)))))
7675 return fold_build2 (BIT_XOR_EXPR, type,
7676 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7678 return NULL_TREE;
7680 case TRUTH_NOT_EXPR:
7681 /* The argument to invert_truthvalue must have Boolean type. */
7682 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7683 arg0 = fold_convert (boolean_type_node, arg0);
7685 /* Note that the operand of this must be an int
7686 and its values must be 0 or 1.
7687 ("true" is a fixed value perhaps depending on the language,
7688 but we don't handle values other than 1 correctly yet.) */
7689 tem = fold_truth_not_expr (arg0);
7690 if (!tem)
7691 return NULL_TREE;
7692 return fold_convert (type, tem);
7694 case REALPART_EXPR:
7695 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7696 return fold_convert (type, arg0);
7697 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7698 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7699 TREE_OPERAND (arg0, 1));
7700 if (TREE_CODE (arg0) == COMPLEX_CST)
7701 return fold_convert (type, TREE_REALPART (arg0));
7702 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7704 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7705 tem = fold_build2 (TREE_CODE (arg0), itype,
7706 fold_build1 (REALPART_EXPR, itype,
7707 TREE_OPERAND (arg0, 0)),
7708 fold_build1 (REALPART_EXPR, itype,
7709 TREE_OPERAND (arg0, 1)));
7710 return fold_convert (type, tem);
7712 if (TREE_CODE (arg0) == CONJ_EXPR)
7714 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7715 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7716 return fold_convert (type, tem);
7718 if (TREE_CODE (arg0) == CALL_EXPR)
7720 tree fn = get_callee_fndecl (arg0);
7721 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7722 switch (DECL_FUNCTION_CODE (fn))
7724 CASE_FLT_FN (BUILT_IN_CEXPI):
7725 fn = mathfn_built_in (type, BUILT_IN_COS);
7726 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7728 default:;
7731 return NULL_TREE;
7733 case IMAGPART_EXPR:
7734 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7735 return fold_convert (type, integer_zero_node);
7736 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7737 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7738 TREE_OPERAND (arg0, 0));
7739 if (TREE_CODE (arg0) == COMPLEX_CST)
7740 return fold_convert (type, TREE_IMAGPART (arg0));
7741 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7743 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7744 tem = fold_build2 (TREE_CODE (arg0), itype,
7745 fold_build1 (IMAGPART_EXPR, itype,
7746 TREE_OPERAND (arg0, 0)),
7747 fold_build1 (IMAGPART_EXPR, itype,
7748 TREE_OPERAND (arg0, 1)));
7749 return fold_convert (type, tem);
7751 if (TREE_CODE (arg0) == CONJ_EXPR)
7753 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7754 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7755 return fold_convert (type, negate_expr (tem));
7757 if (TREE_CODE (arg0) == CALL_EXPR)
7759 tree fn = get_callee_fndecl (arg0);
7760 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7761 switch (DECL_FUNCTION_CODE (fn))
7763 CASE_FLT_FN (BUILT_IN_CEXPI):
7764 fn = mathfn_built_in (type, BUILT_IN_SIN);
7765 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7767 default:;
7770 return NULL_TREE;
7772 default:
7773 return NULL_TREE;
7774 } /* switch (code) */
7777 /* Fold a binary expression of code CODE and type TYPE with operands
7778 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7779 Return the folded expression if folding is successful. Otherwise,
7780 return NULL_TREE. */
7782 static tree
7783 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7785 enum tree_code compl_code;
7787 if (code == MIN_EXPR)
7788 compl_code = MAX_EXPR;
7789 else if (code == MAX_EXPR)
7790 compl_code = MIN_EXPR;
7791 else
7792 gcc_unreachable ();
7794 /* MIN (MAX (a, b), b) == b. */
7795 if (TREE_CODE (op0) == compl_code
7796 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7797 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7799 /* MIN (MAX (b, a), b) == b. */
7800 if (TREE_CODE (op0) == compl_code
7801 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7802 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7803 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7805 /* MIN (a, MAX (a, b)) == a. */
7806 if (TREE_CODE (op1) == compl_code
7807 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7808 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7809 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7811 /* MIN (a, MAX (b, a)) == a. */
7812 if (TREE_CODE (op1) == compl_code
7813 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7814 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7815 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7817 return NULL_TREE;
7820 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7821 by changing CODE to reduce the magnitude of constants involved in
7822 ARG0 of the comparison.
7823 Returns a canonicalized comparison tree if a simplification was
7824 possible, otherwise returns NULL_TREE. */
7826 static tree
7827 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7828 tree arg0, tree arg1)
7830 enum tree_code code0 = TREE_CODE (arg0);
7831 tree t, cst0 = NULL_TREE;
7832 int sgn0;
7833 bool swap = false;
7835 /* Match A +- CST code arg1 and CST code arg1. */
7836 if (!(((code0 == MINUS_EXPR
7837 || code0 == PLUS_EXPR)
7838 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7839 || code0 == INTEGER_CST))
7840 return NULL_TREE;
7842 /* Identify the constant in arg0 and its sign. */
7843 if (code0 == INTEGER_CST)
7844 cst0 = arg0;
7845 else
7846 cst0 = TREE_OPERAND (arg0, 1);
7847 sgn0 = tree_int_cst_sgn (cst0);
7849 /* Overflowed constants and zero will cause problems. */
7850 if (integer_zerop (cst0)
7851 || TREE_OVERFLOW (cst0))
7852 return NULL_TREE;
7854 /* See if we can reduce the magnitude of the constant in
7855 arg0 by changing the comparison code. */
7856 if (code0 == INTEGER_CST)
7858 /* CST <= arg1 -> CST-1 < arg1. */
7859 if (code == LE_EXPR && sgn0 == 1)
7860 code = LT_EXPR;
7861 /* -CST < arg1 -> -CST-1 <= arg1. */
7862 else if (code == LT_EXPR && sgn0 == -1)
7863 code = LE_EXPR;
7864 /* CST > arg1 -> CST-1 >= arg1. */
7865 else if (code == GT_EXPR && sgn0 == 1)
7866 code = GE_EXPR;
7867 /* -CST >= arg1 -> -CST-1 > arg1. */
7868 else if (code == GE_EXPR && sgn0 == -1)
7869 code = GT_EXPR;
7870 else
7871 return NULL_TREE;
7872 /* arg1 code' CST' might be more canonical. */
7873 swap = true;
7875 else
7877 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7878 if (code == LT_EXPR
7879 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7880 code = LE_EXPR;
7881 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7882 else if (code == GT_EXPR
7883 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7884 code = GE_EXPR;
7885 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7886 else if (code == LE_EXPR
7887 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7888 code = LT_EXPR;
7889 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7890 else if (code == GE_EXPR
7891 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7892 code = GT_EXPR;
7893 else
7894 return NULL_TREE;
7897 /* Now build the constant reduced in magnitude. */
7898 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7899 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7900 if (code0 != INTEGER_CST)
7901 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7903 /* If swapping might yield to a more canonical form, do so. */
7904 if (swap)
7905 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7906 else
7907 return fold_build2 (code, type, t, arg1);
7910 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7911 overflow further. Try to decrease the magnitude of constants involved
7912 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7913 and put sole constants at the second argument position.
7914 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7916 static tree
7917 maybe_canonicalize_comparison (enum tree_code code, tree type,
7918 tree arg0, tree arg1)
7920 tree t;
7922 /* In principle pointers also have undefined overflow behavior,
7923 but that causes problems elsewhere. */
7924 if ((flag_wrapv || flag_trapv)
7925 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7926 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7927 return NULL_TREE;
7929 /* Try canonicalization by simplifying arg0. */
7930 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7931 if (t)
7932 return t;
7934 /* Try canonicalization by simplifying arg1 using the swapped
7935 comparison. */
7936 code = swap_tree_comparison (code);
7937 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7940 /* Subroutine of fold_binary. This routine performs all of the
7941 transformations that are common to the equality/inequality
7942 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7943 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7944 fold_binary should call fold_binary. Fold a comparison with
7945 tree code CODE and type TYPE with operands OP0 and OP1. Return
7946 the folded comparison or NULL_TREE. */
7948 static tree
7949 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7951 tree arg0, arg1, tem;
7953 arg0 = op0;
7954 arg1 = op1;
7956 STRIP_SIGN_NOPS (arg0);
7957 STRIP_SIGN_NOPS (arg1);
7959 tem = fold_relational_const (code, type, arg0, arg1);
7960 if (tem != NULL_TREE)
7961 return tem;
7963 /* If one arg is a real or integer constant, put it last. */
7964 if (tree_swap_operands_p (arg0, arg1, true))
7965 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7967 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7968 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7969 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7970 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7971 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7972 && !(flag_wrapv || flag_trapv))
7973 && (TREE_CODE (arg1) == INTEGER_CST
7974 && !TREE_OVERFLOW (arg1)))
7976 tree const1 = TREE_OPERAND (arg0, 1);
7977 tree const2 = arg1;
7978 tree variable = TREE_OPERAND (arg0, 0);
7979 tree lhs;
7980 int lhs_add;
7981 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7983 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7984 TREE_TYPE (arg1), const2, const1);
7985 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7986 && (TREE_CODE (lhs) != INTEGER_CST
7987 || !TREE_OVERFLOW (lhs)))
7988 return fold_build2 (code, type, variable, lhs);
7991 /* For comparisons of pointers we can decompose it to a compile time
7992 comparison of the base objects and the offsets into the object.
7993 This requires at least one operand being an ADDR_EXPR to do more
7994 than the operand_equal_p test below. */
7995 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7996 && (TREE_CODE (arg0) == ADDR_EXPR
7997 || TREE_CODE (arg1) == ADDR_EXPR))
7999 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8000 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8001 enum machine_mode mode;
8002 int volatilep, unsignedp;
8003 bool indirect_base0 = false;
8005 /* Get base and offset for the access. Strip ADDR_EXPR for
8006 get_inner_reference, but put it back by stripping INDIRECT_REF
8007 off the base object if possible. */
8008 base0 = arg0;
8009 if (TREE_CODE (arg0) == ADDR_EXPR)
8011 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8012 &bitsize, &bitpos0, &offset0, &mode,
8013 &unsignedp, &volatilep, false);
8014 if (TREE_CODE (base0) == INDIRECT_REF)
8015 base0 = TREE_OPERAND (base0, 0);
8016 else
8017 indirect_base0 = true;
8020 base1 = arg1;
8021 if (TREE_CODE (arg1) == ADDR_EXPR)
8023 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8024 &bitsize, &bitpos1, &offset1, &mode,
8025 &unsignedp, &volatilep, false);
8026 /* We have to make sure to have an indirect/non-indirect base1
8027 just the same as we did for base0. */
8028 if (TREE_CODE (base1) == INDIRECT_REF
8029 && !indirect_base0)
8030 base1 = TREE_OPERAND (base1, 0);
8031 else if (!indirect_base0)
8032 base1 = NULL_TREE;
8034 else if (indirect_base0)
8035 base1 = NULL_TREE;
8037 /* If we have equivalent bases we might be able to simplify. */
8038 if (base0 && base1
8039 && operand_equal_p (base0, base1, 0))
8041 /* We can fold this expression to a constant if the non-constant
8042 offset parts are equal. */
8043 if (offset0 == offset1
8044 || (offset0 && offset1
8045 && operand_equal_p (offset0, offset1, 0)))
8047 switch (code)
8049 case EQ_EXPR:
8050 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8051 case NE_EXPR:
8052 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8053 case LT_EXPR:
8054 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8055 case LE_EXPR:
8056 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8057 case GE_EXPR:
8058 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8059 case GT_EXPR:
8060 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8061 default:;
8064 /* We can simplify the comparison to a comparison of the variable
8065 offset parts if the constant offset parts are equal.
8066 Be careful to use signed size type here because otherwise we
8067 mess with array offsets in the wrong way. This is possible
8068 because pointer arithmetic is restricted to retain within an
8069 object and overflow on pointer differences is undefined as of
8070 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8071 else if (bitpos0 == bitpos1)
8073 tree signed_size_type_node;
8074 signed_size_type_node = signed_type_for (size_type_node);
8076 /* By converting to signed size type we cover middle-end pointer
8077 arithmetic which operates on unsigned pointer types of size
8078 type size and ARRAY_REF offsets which are properly sign or
8079 zero extended from their type in case it is narrower than
8080 size type. */
8081 if (offset0 == NULL_TREE)
8082 offset0 = build_int_cst (signed_size_type_node, 0);
8083 else
8084 offset0 = fold_convert (signed_size_type_node, offset0);
8085 if (offset1 == NULL_TREE)
8086 offset1 = build_int_cst (signed_size_type_node, 0);
8087 else
8088 offset1 = fold_convert (signed_size_type_node, offset1);
8090 return fold_build2 (code, type, offset0, offset1);
8095 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8096 same object, then we can fold this to a comparison of the two offsets in
8097 signed size type. This is possible because pointer arithmetic is
8098 restricted to retain within an object and overflow on pointer differences
8099 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8100 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8101 && !flag_wrapv && !flag_trapv)
8103 tree base0, offset0, base1, offset1;
8105 if (extract_array_ref (arg0, &base0, &offset0)
8106 && extract_array_ref (arg1, &base1, &offset1)
8107 && operand_equal_p (base0, base1, 0))
8109 tree signed_size_type_node;
8110 signed_size_type_node = signed_type_for (size_type_node);
8112 /* By converting to signed size type we cover middle-end pointer
8113 arithmetic which operates on unsigned pointer types of size
8114 type size and ARRAY_REF offsets which are properly sign or
8115 zero extended from their type in case it is narrower than
8116 size type. */
8117 if (offset0 == NULL_TREE)
8118 offset0 = build_int_cst (signed_size_type_node, 0);
8119 else
8120 offset0 = fold_convert (signed_size_type_node, offset0);
8121 if (offset1 == NULL_TREE)
8122 offset1 = build_int_cst (signed_size_type_node, 0);
8123 else
8124 offset1 = fold_convert (signed_size_type_node, offset1);
8126 return fold_build2 (code, type, offset0, offset1);
8130 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8131 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8132 the resulting offset is smaller in absolute value than the
8133 original one. */
8134 if (!(flag_wrapv || flag_trapv)
8135 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8136 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8137 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8138 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8139 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8140 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8141 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8143 tree const1 = TREE_OPERAND (arg0, 1);
8144 tree const2 = TREE_OPERAND (arg1, 1);
8145 tree variable1 = TREE_OPERAND (arg0, 0);
8146 tree variable2 = TREE_OPERAND (arg1, 0);
8147 tree cst;
8149 /* Put the constant on the side where it doesn't overflow and is
8150 of lower absolute value than before. */
8151 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8152 ? MINUS_EXPR : PLUS_EXPR,
8153 const2, const1, 0);
8154 if (!TREE_OVERFLOW (cst)
8155 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8156 return fold_build2 (code, type,
8157 variable1,
8158 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8159 variable2, cst));
8161 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8162 ? MINUS_EXPR : PLUS_EXPR,
8163 const1, const2, 0);
8164 if (!TREE_OVERFLOW (cst)
8165 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8166 return fold_build2 (code, type,
8167 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8168 variable1, cst),
8169 variable2);
8172 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8173 if (tem)
8174 return tem;
8176 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8178 tree targ0 = strip_float_extensions (arg0);
8179 tree targ1 = strip_float_extensions (arg1);
8180 tree newtype = TREE_TYPE (targ0);
8182 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8183 newtype = TREE_TYPE (targ1);
8185 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8186 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8187 return fold_build2 (code, type, fold_convert (newtype, targ0),
8188 fold_convert (newtype, targ1));
8190 /* (-a) CMP (-b) -> b CMP a */
8191 if (TREE_CODE (arg0) == NEGATE_EXPR
8192 && TREE_CODE (arg1) == NEGATE_EXPR)
8193 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8194 TREE_OPERAND (arg0, 0));
8196 if (TREE_CODE (arg1) == REAL_CST)
8198 REAL_VALUE_TYPE cst;
8199 cst = TREE_REAL_CST (arg1);
8201 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8202 if (TREE_CODE (arg0) == NEGATE_EXPR)
8203 return fold_build2 (swap_tree_comparison (code), type,
8204 TREE_OPERAND (arg0, 0),
8205 build_real (TREE_TYPE (arg1),
8206 REAL_VALUE_NEGATE (cst)));
8208 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8209 /* a CMP (-0) -> a CMP 0 */
8210 if (REAL_VALUE_MINUS_ZERO (cst))
8211 return fold_build2 (code, type, arg0,
8212 build_real (TREE_TYPE (arg1), dconst0));
8214 /* x != NaN is always true, other ops are always false. */
8215 if (REAL_VALUE_ISNAN (cst)
8216 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8218 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8219 return omit_one_operand (type, tem, arg0);
8222 /* Fold comparisons against infinity. */
8223 if (REAL_VALUE_ISINF (cst))
8225 tem = fold_inf_compare (code, type, arg0, arg1);
8226 if (tem != NULL_TREE)
8227 return tem;
8231 /* If this is a comparison of a real constant with a PLUS_EXPR
8232 or a MINUS_EXPR of a real constant, we can convert it into a
8233 comparison with a revised real constant as long as no overflow
8234 occurs when unsafe_math_optimizations are enabled. */
8235 if (flag_unsafe_math_optimizations
8236 && TREE_CODE (arg1) == REAL_CST
8237 && (TREE_CODE (arg0) == PLUS_EXPR
8238 || TREE_CODE (arg0) == MINUS_EXPR)
8239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8240 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8241 ? MINUS_EXPR : PLUS_EXPR,
8242 arg1, TREE_OPERAND (arg0, 1), 0))
8243 && ! TREE_CONSTANT_OVERFLOW (tem))
8244 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8246 /* Likewise, we can simplify a comparison of a real constant with
8247 a MINUS_EXPR whose first operand is also a real constant, i.e.
8248 (c1 - x) < c2 becomes x > c1-c2. */
8249 if (flag_unsafe_math_optimizations
8250 && TREE_CODE (arg1) == REAL_CST
8251 && TREE_CODE (arg0) == MINUS_EXPR
8252 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8253 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8254 arg1, 0))
8255 && ! TREE_CONSTANT_OVERFLOW (tem))
8256 return fold_build2 (swap_tree_comparison (code), type,
8257 TREE_OPERAND (arg0, 1), tem);
8259 /* Fold comparisons against built-in math functions. */
8260 if (TREE_CODE (arg1) == REAL_CST
8261 && flag_unsafe_math_optimizations
8262 && ! flag_errno_math)
8264 enum built_in_function fcode = builtin_mathfn_code (arg0);
8266 if (fcode != END_BUILTINS)
8268 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8269 if (tem != NULL_TREE)
8270 return tem;
8275 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8276 if (TREE_CONSTANT (arg1)
8277 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8278 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8279 /* This optimization is invalid for ordered comparisons
8280 if CONST+INCR overflows or if foo+incr might overflow.
8281 This optimization is invalid for floating point due to rounding.
8282 For pointer types we assume overflow doesn't happen. */
8283 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8284 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8285 && (code == EQ_EXPR || code == NE_EXPR))))
8287 tree varop, newconst;
8289 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8291 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8292 arg1, TREE_OPERAND (arg0, 1));
8293 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8294 TREE_OPERAND (arg0, 0),
8295 TREE_OPERAND (arg0, 1));
8297 else
8299 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8300 arg1, TREE_OPERAND (arg0, 1));
8301 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8302 TREE_OPERAND (arg0, 0),
8303 TREE_OPERAND (arg0, 1));
8307 /* If VAROP is a reference to a bitfield, we must mask
8308 the constant by the width of the field. */
8309 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8310 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8311 && host_integerp (DECL_SIZE (TREE_OPERAND
8312 (TREE_OPERAND (varop, 0), 1)), 1))
8314 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8315 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8316 tree folded_compare, shift;
8318 /* First check whether the comparison would come out
8319 always the same. If we don't do that we would
8320 change the meaning with the masking. */
8321 folded_compare = fold_build2 (code, type,
8322 TREE_OPERAND (varop, 0), arg1);
8323 if (TREE_CODE (folded_compare) == INTEGER_CST)
8324 return omit_one_operand (type, folded_compare, varop);
8326 shift = build_int_cst (NULL_TREE,
8327 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8328 shift = fold_convert (TREE_TYPE (varop), shift);
8329 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8330 newconst, shift);
8331 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8332 newconst, shift);
8335 return fold_build2 (code, type, varop, newconst);
8338 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8339 && (TREE_CODE (arg0) == NOP_EXPR
8340 || TREE_CODE (arg0) == CONVERT_EXPR))
8342 /* If we are widening one operand of an integer comparison,
8343 see if the other operand is similarly being widened. Perhaps we
8344 can do the comparison in the narrower type. */
8345 tem = fold_widened_comparison (code, type, arg0, arg1);
8346 if (tem)
8347 return tem;
8349 /* Or if we are changing signedness. */
8350 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8351 if (tem)
8352 return tem;
8355 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8356 constant, we can simplify it. */
8357 if (TREE_CODE (arg1) == INTEGER_CST
8358 && (TREE_CODE (arg0) == MIN_EXPR
8359 || TREE_CODE (arg0) == MAX_EXPR)
8360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8362 tem = optimize_minmax_comparison (code, type, op0, op1);
8363 if (tem)
8364 return tem;
8367 /* Simplify comparison of something with itself. (For IEEE
8368 floating-point, we can only do some of these simplifications.) */
8369 if (operand_equal_p (arg0, arg1, 0))
8371 switch (code)
8373 case EQ_EXPR:
8374 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8375 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8376 return constant_boolean_node (1, type);
8377 break;
8379 case GE_EXPR:
8380 case LE_EXPR:
8381 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8382 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8383 return constant_boolean_node (1, type);
8384 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8386 case NE_EXPR:
8387 /* For NE, we can only do this simplification if integer
8388 or we don't honor IEEE floating point NaNs. */
8389 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8390 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8391 break;
8392 /* ... fall through ... */
8393 case GT_EXPR:
8394 case LT_EXPR:
8395 return constant_boolean_node (0, type);
8396 default:
8397 gcc_unreachable ();
8401 /* If we are comparing an expression that just has comparisons
8402 of two integer values, arithmetic expressions of those comparisons,
8403 and constants, we can simplify it. There are only three cases
8404 to check: the two values can either be equal, the first can be
8405 greater, or the second can be greater. Fold the expression for
8406 those three values. Since each value must be 0 or 1, we have
8407 eight possibilities, each of which corresponds to the constant 0
8408 or 1 or one of the six possible comparisons.
8410 This handles common cases like (a > b) == 0 but also handles
8411 expressions like ((x > y) - (y > x)) > 0, which supposedly
8412 occur in macroized code. */
8414 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8416 tree cval1 = 0, cval2 = 0;
8417 int save_p = 0;
8419 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8420 /* Don't handle degenerate cases here; they should already
8421 have been handled anyway. */
8422 && cval1 != 0 && cval2 != 0
8423 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8424 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8425 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8426 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8427 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8428 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8429 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8431 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8432 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8434 /* We can't just pass T to eval_subst in case cval1 or cval2
8435 was the same as ARG1. */
8437 tree high_result
8438 = fold_build2 (code, type,
8439 eval_subst (arg0, cval1, maxval,
8440 cval2, minval),
8441 arg1);
8442 tree equal_result
8443 = fold_build2 (code, type,
8444 eval_subst (arg0, cval1, maxval,
8445 cval2, maxval),
8446 arg1);
8447 tree low_result
8448 = fold_build2 (code, type,
8449 eval_subst (arg0, cval1, minval,
8450 cval2, maxval),
8451 arg1);
8453 /* All three of these results should be 0 or 1. Confirm they are.
8454 Then use those values to select the proper code to use. */
8456 if (TREE_CODE (high_result) == INTEGER_CST
8457 && TREE_CODE (equal_result) == INTEGER_CST
8458 && TREE_CODE (low_result) == INTEGER_CST)
8460 /* Make a 3-bit mask with the high-order bit being the
8461 value for `>', the next for '=', and the low for '<'. */
8462 switch ((integer_onep (high_result) * 4)
8463 + (integer_onep (equal_result) * 2)
8464 + integer_onep (low_result))
8466 case 0:
8467 /* Always false. */
8468 return omit_one_operand (type, integer_zero_node, arg0);
8469 case 1:
8470 code = LT_EXPR;
8471 break;
8472 case 2:
8473 code = EQ_EXPR;
8474 break;
8475 case 3:
8476 code = LE_EXPR;
8477 break;
8478 case 4:
8479 code = GT_EXPR;
8480 break;
8481 case 5:
8482 code = NE_EXPR;
8483 break;
8484 case 6:
8485 code = GE_EXPR;
8486 break;
8487 case 7:
8488 /* Always true. */
8489 return omit_one_operand (type, integer_one_node, arg0);
8492 if (save_p)
8493 return save_expr (build2 (code, type, cval1, cval2));
8494 return fold_build2 (code, type, cval1, cval2);
8499 /* Fold a comparison of the address of COMPONENT_REFs with the same
8500 type and component to a comparison of the address of the base
8501 object. In short, &x->a OP &y->a to x OP y and
8502 &x->a OP &y.a to x OP &y */
8503 if (TREE_CODE (arg0) == ADDR_EXPR
8504 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8505 && TREE_CODE (arg1) == ADDR_EXPR
8506 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8508 tree cref0 = TREE_OPERAND (arg0, 0);
8509 tree cref1 = TREE_OPERAND (arg1, 0);
8510 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8512 tree op0 = TREE_OPERAND (cref0, 0);
8513 tree op1 = TREE_OPERAND (cref1, 0);
8514 return fold_build2 (code, type,
8515 build_fold_addr_expr (op0),
8516 build_fold_addr_expr (op1));
8520 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8521 into a single range test. */
8522 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8523 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8524 && TREE_CODE (arg1) == INTEGER_CST
8525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8526 && !integer_zerop (TREE_OPERAND (arg0, 1))
8527 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8528 && !TREE_OVERFLOW (arg1))
8530 tem = fold_div_compare (code, type, arg0, arg1);
8531 if (tem != NULL_TREE)
8532 return tem;
8535 /* Fold ~X op ~Y as Y op X. */
8536 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8537 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8538 return fold_build2 (code, type,
8539 TREE_OPERAND (arg1, 0),
8540 TREE_OPERAND (arg0, 0));
8542 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8543 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8544 && TREE_CODE (arg1) == INTEGER_CST)
8545 return fold_build2 (swap_tree_comparison (code), type,
8546 TREE_OPERAND (arg0, 0),
8547 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8549 return NULL_TREE;
8553 /* Subroutine of fold_binary. Optimize complex multiplications of the
8554 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8555 argument EXPR represents the expression "z" of type TYPE. */
8557 static tree
8558 fold_mult_zconjz (tree type, tree expr)
8560 tree itype = TREE_TYPE (type);
8561 tree rpart, ipart, tem;
8563 if (TREE_CODE (expr) == COMPLEX_EXPR)
8565 rpart = TREE_OPERAND (expr, 0);
8566 ipart = TREE_OPERAND (expr, 1);
8568 else if (TREE_CODE (expr) == COMPLEX_CST)
8570 rpart = TREE_REALPART (expr);
8571 ipart = TREE_IMAGPART (expr);
8573 else
8575 expr = save_expr (expr);
8576 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8577 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8580 rpart = save_expr (rpart);
8581 ipart = save_expr (ipart);
8582 tem = fold_build2 (PLUS_EXPR, itype,
8583 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8584 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8585 return fold_build2 (COMPLEX_EXPR, type, tem,
8586 fold_convert (itype, integer_zero_node));
8590 /* Fold a binary expression of code CODE and type TYPE with operands
8591 OP0 and OP1. Return the folded expression if folding is
8592 successful. Otherwise, return NULL_TREE. */
8594 tree
8595 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8597 enum tree_code_class kind = TREE_CODE_CLASS (code);
8598 tree arg0, arg1, tem;
8599 tree t1 = NULL_TREE;
8601 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8602 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8603 && TREE_CODE_LENGTH (code) == 2
8604 && op0 != NULL_TREE
8605 && op1 != NULL_TREE);
8607 arg0 = op0;
8608 arg1 = op1;
8610 /* Strip any conversions that don't change the mode. This is
8611 safe for every expression, except for a comparison expression
8612 because its signedness is derived from its operands. So, in
8613 the latter case, only strip conversions that don't change the
8614 signedness.
8616 Note that this is done as an internal manipulation within the
8617 constant folder, in order to find the simplest representation
8618 of the arguments so that their form can be studied. In any
8619 cases, the appropriate type conversions should be put back in
8620 the tree that will get out of the constant folder. */
8622 if (kind == tcc_comparison)
8624 STRIP_SIGN_NOPS (arg0);
8625 STRIP_SIGN_NOPS (arg1);
8627 else
8629 STRIP_NOPS (arg0);
8630 STRIP_NOPS (arg1);
8633 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8634 constant but we can't do arithmetic on them. */
8635 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8636 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8637 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8638 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8640 if (kind == tcc_binary)
8641 tem = const_binop (code, arg0, arg1, 0);
8642 else if (kind == tcc_comparison)
8643 tem = fold_relational_const (code, type, arg0, arg1);
8644 else
8645 tem = NULL_TREE;
8647 if (tem != NULL_TREE)
8649 if (TREE_TYPE (tem) != type)
8650 tem = fold_convert (type, tem);
8651 return tem;
8655 /* If this is a commutative operation, and ARG0 is a constant, move it
8656 to ARG1 to reduce the number of tests below. */
8657 if (commutative_tree_code (code)
8658 && tree_swap_operands_p (arg0, arg1, true))
8659 return fold_build2 (code, type, op1, op0);
8661 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8663 First check for cases where an arithmetic operation is applied to a
8664 compound, conditional, or comparison operation. Push the arithmetic
8665 operation inside the compound or conditional to see if any folding
8666 can then be done. Convert comparison to conditional for this purpose.
8667 The also optimizes non-constant cases that used to be done in
8668 expand_expr.
8670 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8671 one of the operands is a comparison and the other is a comparison, a
8672 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8673 code below would make the expression more complex. Change it to a
8674 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8675 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8677 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8678 || code == EQ_EXPR || code == NE_EXPR)
8679 && ((truth_value_p (TREE_CODE (arg0))
8680 && (truth_value_p (TREE_CODE (arg1))
8681 || (TREE_CODE (arg1) == BIT_AND_EXPR
8682 && integer_onep (TREE_OPERAND (arg1, 1)))))
8683 || (truth_value_p (TREE_CODE (arg1))
8684 && (truth_value_p (TREE_CODE (arg0))
8685 || (TREE_CODE (arg0) == BIT_AND_EXPR
8686 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8688 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8689 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8690 : TRUTH_XOR_EXPR,
8691 boolean_type_node,
8692 fold_convert (boolean_type_node, arg0),
8693 fold_convert (boolean_type_node, arg1));
8695 if (code == EQ_EXPR)
8696 tem = invert_truthvalue (tem);
8698 return fold_convert (type, tem);
8701 if (TREE_CODE_CLASS (code) == tcc_binary
8702 || TREE_CODE_CLASS (code) == tcc_comparison)
8704 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8705 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8706 fold_build2 (code, type,
8707 TREE_OPERAND (arg0, 1), op1));
8708 if (TREE_CODE (arg1) == COMPOUND_EXPR
8709 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8710 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8711 fold_build2 (code, type,
8712 op0, TREE_OPERAND (arg1, 1)));
8714 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8716 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8717 arg0, arg1,
8718 /*cond_first_p=*/1);
8719 if (tem != NULL_TREE)
8720 return tem;
8723 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8725 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8726 arg1, arg0,
8727 /*cond_first_p=*/0);
8728 if (tem != NULL_TREE)
8729 return tem;
8733 switch (code)
8735 case PLUS_EXPR:
8736 /* A + (-B) -> A - B */
8737 if (TREE_CODE (arg1) == NEGATE_EXPR)
8738 return fold_build2 (MINUS_EXPR, type,
8739 fold_convert (type, arg0),
8740 fold_convert (type, TREE_OPERAND (arg1, 0)));
8741 /* (-A) + B -> B - A */
8742 if (TREE_CODE (arg0) == NEGATE_EXPR
8743 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8744 return fold_build2 (MINUS_EXPR, type,
8745 fold_convert (type, arg1),
8746 fold_convert (type, TREE_OPERAND (arg0, 0)));
8747 /* Convert ~A + 1 to -A. */
8748 if (INTEGRAL_TYPE_P (type)
8749 && TREE_CODE (arg0) == BIT_NOT_EXPR
8750 && integer_onep (arg1))
8751 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8753 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8754 same or one. */
8755 if ((TREE_CODE (arg0) == MULT_EXPR
8756 || TREE_CODE (arg1) == MULT_EXPR)
8757 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8759 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8760 if (tem)
8761 return tem;
8764 if (! FLOAT_TYPE_P (type))
8766 if (integer_zerop (arg1))
8767 return non_lvalue (fold_convert (type, arg0));
8769 /* ~X + X is -1. */
8770 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8771 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8772 && !TYPE_TRAP_SIGNED (type))
8774 t1 = build_int_cst (type, -1);
8775 t1 = force_fit_type (t1, 0, false, false);
8776 return omit_one_operand (type, t1, arg1);
8779 /* X + ~X is -1. */
8780 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8781 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8782 && !TYPE_TRAP_SIGNED (type))
8784 t1 = build_int_cst (type, -1);
8785 t1 = force_fit_type (t1, 0, false, false);
8786 return omit_one_operand (type, t1, arg0);
8789 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8790 with a constant, and the two constants have no bits in common,
8791 we should treat this as a BIT_IOR_EXPR since this may produce more
8792 simplifications. */
8793 if (TREE_CODE (arg0) == BIT_AND_EXPR
8794 && TREE_CODE (arg1) == BIT_AND_EXPR
8795 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8796 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8797 && integer_zerop (const_binop (BIT_AND_EXPR,
8798 TREE_OPERAND (arg0, 1),
8799 TREE_OPERAND (arg1, 1), 0)))
8801 code = BIT_IOR_EXPR;
8802 goto bit_ior;
8805 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8806 (plus (plus (mult) (mult)) (foo)) so that we can
8807 take advantage of the factoring cases below. */
8808 if (((TREE_CODE (arg0) == PLUS_EXPR
8809 || TREE_CODE (arg0) == MINUS_EXPR)
8810 && TREE_CODE (arg1) == MULT_EXPR)
8811 || ((TREE_CODE (arg1) == PLUS_EXPR
8812 || TREE_CODE (arg1) == MINUS_EXPR)
8813 && TREE_CODE (arg0) == MULT_EXPR))
8815 tree parg0, parg1, parg, marg;
8816 enum tree_code pcode;
8818 if (TREE_CODE (arg1) == MULT_EXPR)
8819 parg = arg0, marg = arg1;
8820 else
8821 parg = arg1, marg = arg0;
8822 pcode = TREE_CODE (parg);
8823 parg0 = TREE_OPERAND (parg, 0);
8824 parg1 = TREE_OPERAND (parg, 1);
8825 STRIP_NOPS (parg0);
8826 STRIP_NOPS (parg1);
8828 if (TREE_CODE (parg0) == MULT_EXPR
8829 && TREE_CODE (parg1) != MULT_EXPR)
8830 return fold_build2 (pcode, type,
8831 fold_build2 (PLUS_EXPR, type,
8832 fold_convert (type, parg0),
8833 fold_convert (type, marg)),
8834 fold_convert (type, parg1));
8835 if (TREE_CODE (parg0) != MULT_EXPR
8836 && TREE_CODE (parg1) == MULT_EXPR)
8837 return fold_build2 (PLUS_EXPR, type,
8838 fold_convert (type, parg0),
8839 fold_build2 (pcode, type,
8840 fold_convert (type, marg),
8841 fold_convert (type,
8842 parg1)));
8845 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8846 of the array. Loop optimizer sometimes produce this type of
8847 expressions. */
8848 if (TREE_CODE (arg0) == ADDR_EXPR)
8850 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8851 if (tem)
8852 return fold_convert (type, tem);
8854 else if (TREE_CODE (arg1) == ADDR_EXPR)
8856 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8857 if (tem)
8858 return fold_convert (type, tem);
8861 else
8863 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8864 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8865 return non_lvalue (fold_convert (type, arg0));
8867 /* Likewise if the operands are reversed. */
8868 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8869 return non_lvalue (fold_convert (type, arg1));
8871 /* Convert X + -C into X - C. */
8872 if (TREE_CODE (arg1) == REAL_CST
8873 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8875 tem = fold_negate_const (arg1, type);
8876 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8877 return fold_build2 (MINUS_EXPR, type,
8878 fold_convert (type, arg0),
8879 fold_convert (type, tem));
8882 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8883 to __complex__ ( x, y ). This is not the same for SNaNs or
8884 if singed zeros are involved. */
8885 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8886 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8887 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8889 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8890 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8891 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8892 bool arg0rz = false, arg0iz = false;
8893 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8894 || (arg0i && (arg0iz = real_zerop (arg0i))))
8896 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8897 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8898 if (arg0rz && arg1i && real_zerop (arg1i))
8900 tree rp = arg1r ? arg1r
8901 : build1 (REALPART_EXPR, rtype, arg1);
8902 tree ip = arg0i ? arg0i
8903 : build1 (IMAGPART_EXPR, rtype, arg0);
8904 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8906 else if (arg0iz && arg1r && real_zerop (arg1r))
8908 tree rp = arg0r ? arg0r
8909 : build1 (REALPART_EXPR, rtype, arg0);
8910 tree ip = arg1i ? arg1i
8911 : build1 (IMAGPART_EXPR, rtype, arg1);
8912 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8917 if (flag_unsafe_math_optimizations
8918 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8919 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8920 && (tem = distribute_real_division (code, type, arg0, arg1)))
8921 return tem;
8923 /* Convert x+x into x*2.0. */
8924 if (operand_equal_p (arg0, arg1, 0)
8925 && SCALAR_FLOAT_TYPE_P (type))
8926 return fold_build2 (MULT_EXPR, type, arg0,
8927 build_real (type, dconst2));
8929 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8930 if (flag_unsafe_math_optimizations
8931 && TREE_CODE (arg1) == PLUS_EXPR
8932 && TREE_CODE (arg0) != MULT_EXPR)
8934 tree tree10 = TREE_OPERAND (arg1, 0);
8935 tree tree11 = TREE_OPERAND (arg1, 1);
8936 if (TREE_CODE (tree11) == MULT_EXPR
8937 && TREE_CODE (tree10) == MULT_EXPR)
8939 tree tree0;
8940 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8941 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8944 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8945 if (flag_unsafe_math_optimizations
8946 && TREE_CODE (arg0) == PLUS_EXPR
8947 && TREE_CODE (arg1) != MULT_EXPR)
8949 tree tree00 = TREE_OPERAND (arg0, 0);
8950 tree tree01 = TREE_OPERAND (arg0, 1);
8951 if (TREE_CODE (tree01) == MULT_EXPR
8952 && TREE_CODE (tree00) == MULT_EXPR)
8954 tree tree0;
8955 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8956 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8961 bit_rotate:
8962 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8963 is a rotate of A by C1 bits. */
8964 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8965 is a rotate of A by B bits. */
8967 enum tree_code code0, code1;
8968 code0 = TREE_CODE (arg0);
8969 code1 = TREE_CODE (arg1);
8970 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8971 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8972 && operand_equal_p (TREE_OPERAND (arg0, 0),
8973 TREE_OPERAND (arg1, 0), 0)
8974 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8976 tree tree01, tree11;
8977 enum tree_code code01, code11;
8979 tree01 = TREE_OPERAND (arg0, 1);
8980 tree11 = TREE_OPERAND (arg1, 1);
8981 STRIP_NOPS (tree01);
8982 STRIP_NOPS (tree11);
8983 code01 = TREE_CODE (tree01);
8984 code11 = TREE_CODE (tree11);
8985 if (code01 == INTEGER_CST
8986 && code11 == INTEGER_CST
8987 && TREE_INT_CST_HIGH (tree01) == 0
8988 && TREE_INT_CST_HIGH (tree11) == 0
8989 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8990 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8991 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8992 code0 == LSHIFT_EXPR ? tree01 : tree11);
8993 else if (code11 == MINUS_EXPR)
8995 tree tree110, tree111;
8996 tree110 = TREE_OPERAND (tree11, 0);
8997 tree111 = TREE_OPERAND (tree11, 1);
8998 STRIP_NOPS (tree110);
8999 STRIP_NOPS (tree111);
9000 if (TREE_CODE (tree110) == INTEGER_CST
9001 && 0 == compare_tree_int (tree110,
9002 TYPE_PRECISION
9003 (TREE_TYPE (TREE_OPERAND
9004 (arg0, 0))))
9005 && operand_equal_p (tree01, tree111, 0))
9006 return build2 ((code0 == LSHIFT_EXPR
9007 ? LROTATE_EXPR
9008 : RROTATE_EXPR),
9009 type, TREE_OPERAND (arg0, 0), tree01);
9011 else if (code01 == MINUS_EXPR)
9013 tree tree010, tree011;
9014 tree010 = TREE_OPERAND (tree01, 0);
9015 tree011 = TREE_OPERAND (tree01, 1);
9016 STRIP_NOPS (tree010);
9017 STRIP_NOPS (tree011);
9018 if (TREE_CODE (tree010) == INTEGER_CST
9019 && 0 == compare_tree_int (tree010,
9020 TYPE_PRECISION
9021 (TREE_TYPE (TREE_OPERAND
9022 (arg0, 0))))
9023 && operand_equal_p (tree11, tree011, 0))
9024 return build2 ((code0 != LSHIFT_EXPR
9025 ? LROTATE_EXPR
9026 : RROTATE_EXPR),
9027 type, TREE_OPERAND (arg0, 0), tree11);
9032 associate:
9033 /* In most languages, can't associate operations on floats through
9034 parentheses. Rather than remember where the parentheses were, we
9035 don't associate floats at all, unless the user has specified
9036 -funsafe-math-optimizations. */
9038 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9040 tree var0, con0, lit0, minus_lit0;
9041 tree var1, con1, lit1, minus_lit1;
9043 /* Split both trees into variables, constants, and literals. Then
9044 associate each group together, the constants with literals,
9045 then the result with variables. This increases the chances of
9046 literals being recombined later and of generating relocatable
9047 expressions for the sum of a constant and literal. */
9048 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9049 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9050 code == MINUS_EXPR);
9052 /* Only do something if we found more than two objects. Otherwise,
9053 nothing has changed and we risk infinite recursion. */
9054 if (2 < ((var0 != 0) + (var1 != 0)
9055 + (con0 != 0) + (con1 != 0)
9056 + (lit0 != 0) + (lit1 != 0)
9057 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9059 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9060 if (code == MINUS_EXPR)
9061 code = PLUS_EXPR;
9063 var0 = associate_trees (var0, var1, code, type);
9064 con0 = associate_trees (con0, con1, code, type);
9065 lit0 = associate_trees (lit0, lit1, code, type);
9066 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9068 /* Preserve the MINUS_EXPR if the negative part of the literal is
9069 greater than the positive part. Otherwise, the multiplicative
9070 folding code (i.e extract_muldiv) may be fooled in case
9071 unsigned constants are subtracted, like in the following
9072 example: ((X*2 + 4) - 8U)/2. */
9073 if (minus_lit0 && lit0)
9075 if (TREE_CODE (lit0) == INTEGER_CST
9076 && TREE_CODE (minus_lit0) == INTEGER_CST
9077 && tree_int_cst_lt (lit0, minus_lit0))
9079 minus_lit0 = associate_trees (minus_lit0, lit0,
9080 MINUS_EXPR, type);
9081 lit0 = 0;
9083 else
9085 lit0 = associate_trees (lit0, minus_lit0,
9086 MINUS_EXPR, type);
9087 minus_lit0 = 0;
9090 if (minus_lit0)
9092 if (con0 == 0)
9093 return fold_convert (type,
9094 associate_trees (var0, minus_lit0,
9095 MINUS_EXPR, type));
9096 else
9098 con0 = associate_trees (con0, minus_lit0,
9099 MINUS_EXPR, type);
9100 return fold_convert (type,
9101 associate_trees (var0, con0,
9102 PLUS_EXPR, type));
9106 con0 = associate_trees (con0, lit0, code, type);
9107 return fold_convert (type, associate_trees (var0, con0,
9108 code, type));
9112 return NULL_TREE;
9114 case MINUS_EXPR:
9115 /* A - (-B) -> A + B */
9116 if (TREE_CODE (arg1) == NEGATE_EXPR)
9117 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9118 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9119 if (TREE_CODE (arg0) == NEGATE_EXPR
9120 && (FLOAT_TYPE_P (type)
9121 || INTEGRAL_TYPE_P (type))
9122 && negate_expr_p (arg1)
9123 && reorder_operands_p (arg0, arg1))
9124 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9125 TREE_OPERAND (arg0, 0));
9126 /* Convert -A - 1 to ~A. */
9127 if (INTEGRAL_TYPE_P (type)
9128 && TREE_CODE (arg0) == NEGATE_EXPR
9129 && integer_onep (arg1)
9130 && !TYPE_TRAP_SIGNED (type))
9131 return fold_build1 (BIT_NOT_EXPR, type,
9132 fold_convert (type, TREE_OPERAND (arg0, 0)));
9134 /* Convert -1 - A to ~A. */
9135 if (INTEGRAL_TYPE_P (type)
9136 && integer_all_onesp (arg0))
9137 return fold_build1 (BIT_NOT_EXPR, type, op1);
9139 if (! FLOAT_TYPE_P (type))
9141 if (integer_zerop (arg0))
9142 return negate_expr (fold_convert (type, arg1));
9143 if (integer_zerop (arg1))
9144 return non_lvalue (fold_convert (type, arg0));
9146 /* Fold A - (A & B) into ~B & A. */
9147 if (!TREE_SIDE_EFFECTS (arg0)
9148 && TREE_CODE (arg1) == BIT_AND_EXPR)
9150 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9151 return fold_build2 (BIT_AND_EXPR, type,
9152 fold_build1 (BIT_NOT_EXPR, type,
9153 TREE_OPERAND (arg1, 0)),
9154 arg0);
9155 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9156 return fold_build2 (BIT_AND_EXPR, type,
9157 fold_build1 (BIT_NOT_EXPR, type,
9158 TREE_OPERAND (arg1, 1)),
9159 arg0);
9162 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9163 any power of 2 minus 1. */
9164 if (TREE_CODE (arg0) == BIT_AND_EXPR
9165 && TREE_CODE (arg1) == BIT_AND_EXPR
9166 && operand_equal_p (TREE_OPERAND (arg0, 0),
9167 TREE_OPERAND (arg1, 0), 0))
9169 tree mask0 = TREE_OPERAND (arg0, 1);
9170 tree mask1 = TREE_OPERAND (arg1, 1);
9171 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9173 if (operand_equal_p (tem, mask1, 0))
9175 tem = fold_build2 (BIT_XOR_EXPR, type,
9176 TREE_OPERAND (arg0, 0), mask1);
9177 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9182 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9183 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9184 return non_lvalue (fold_convert (type, arg0));
9186 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9187 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9188 (-ARG1 + ARG0) reduces to -ARG1. */
9189 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9190 return negate_expr (fold_convert (type, arg1));
9192 /* Fold &x - &x. This can happen from &x.foo - &x.
9193 This is unsafe for certain floats even in non-IEEE formats.
9194 In IEEE, it is unsafe because it does wrong for NaNs.
9195 Also note that operand_equal_p is always false if an operand
9196 is volatile. */
9198 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9199 && operand_equal_p (arg0, arg1, 0))
9200 return fold_convert (type, integer_zero_node);
9202 /* A - B -> A + (-B) if B is easily negatable. */
9203 if (negate_expr_p (arg1)
9204 && ((FLOAT_TYPE_P (type)
9205 /* Avoid this transformation if B is a positive REAL_CST. */
9206 && (TREE_CODE (arg1) != REAL_CST
9207 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9208 || INTEGRAL_TYPE_P (type)))
9209 return fold_build2 (PLUS_EXPR, type,
9210 fold_convert (type, arg0),
9211 fold_convert (type, negate_expr (arg1)));
9213 /* Try folding difference of addresses. */
9215 HOST_WIDE_INT diff;
9217 if ((TREE_CODE (arg0) == ADDR_EXPR
9218 || TREE_CODE (arg1) == ADDR_EXPR)
9219 && ptr_difference_const (arg0, arg1, &diff))
9220 return build_int_cst_type (type, diff);
9223 /* Fold &a[i] - &a[j] to i-j. */
9224 if (TREE_CODE (arg0) == ADDR_EXPR
9225 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9226 && TREE_CODE (arg1) == ADDR_EXPR
9227 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9229 tree aref0 = TREE_OPERAND (arg0, 0);
9230 tree aref1 = TREE_OPERAND (arg1, 0);
9231 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9232 TREE_OPERAND (aref1, 0), 0))
9234 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9235 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9236 tree esz = array_ref_element_size (aref0);
9237 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9238 return fold_build2 (MULT_EXPR, type, diff,
9239 fold_convert (type, esz));
9244 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9245 of the array. Loop optimizer sometimes produce this type of
9246 expressions. */
9247 if (TREE_CODE (arg0) == ADDR_EXPR)
9249 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9250 if (tem)
9251 return fold_convert (type, tem);
9254 if (flag_unsafe_math_optimizations
9255 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9256 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9257 && (tem = distribute_real_division (code, type, arg0, arg1)))
9258 return tem;
9260 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9261 same or one. */
9262 if ((TREE_CODE (arg0) == MULT_EXPR
9263 || TREE_CODE (arg1) == MULT_EXPR)
9264 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9266 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9267 if (tem)
9268 return tem;
9271 goto associate;
9273 case MULT_EXPR:
9274 /* (-A) * (-B) -> A * B */
9275 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9276 return fold_build2 (MULT_EXPR, type,
9277 fold_convert (type, TREE_OPERAND (arg0, 0)),
9278 fold_convert (type, negate_expr (arg1)));
9279 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9280 return fold_build2 (MULT_EXPR, type,
9281 fold_convert (type, negate_expr (arg0)),
9282 fold_convert (type, TREE_OPERAND (arg1, 0)));
9284 if (! FLOAT_TYPE_P (type))
9286 if (integer_zerop (arg1))
9287 return omit_one_operand (type, arg1, arg0);
9288 if (integer_onep (arg1))
9289 return non_lvalue (fold_convert (type, arg0));
9290 /* Transform x * -1 into -x. */
9291 if (integer_all_onesp (arg1))
9292 return fold_convert (type, negate_expr (arg0));
9293 /* Transform x * -C into -x * C if x is easily negatable. */
9294 if (TREE_CODE (arg1) == INTEGER_CST
9295 && tree_int_cst_sgn (arg1) == -1
9296 && negate_expr_p (arg0)
9297 && (tem = negate_expr (arg1)) != arg1
9298 && !TREE_OVERFLOW (tem))
9299 return fold_build2 (MULT_EXPR, type,
9300 negate_expr (arg0), tem);
9302 /* (a * (1 << b)) is (a << b) */
9303 if (TREE_CODE (arg1) == LSHIFT_EXPR
9304 && integer_onep (TREE_OPERAND (arg1, 0)))
9305 return fold_build2 (LSHIFT_EXPR, type, arg0,
9306 TREE_OPERAND (arg1, 1));
9307 if (TREE_CODE (arg0) == LSHIFT_EXPR
9308 && integer_onep (TREE_OPERAND (arg0, 0)))
9309 return fold_build2 (LSHIFT_EXPR, type, arg1,
9310 TREE_OPERAND (arg0, 1));
9312 if (TREE_CODE (arg1) == INTEGER_CST
9313 && 0 != (tem = extract_muldiv (op0,
9314 fold_convert (type, arg1),
9315 code, NULL_TREE)))
9316 return fold_convert (type, tem);
9318 /* Optimize z * conj(z) for integer complex numbers. */
9319 if (TREE_CODE (arg0) == CONJ_EXPR
9320 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9321 return fold_mult_zconjz (type, arg1);
9322 if (TREE_CODE (arg1) == CONJ_EXPR
9323 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9324 return fold_mult_zconjz (type, arg0);
9326 else
9328 /* Maybe fold x * 0 to 0. The expressions aren't the same
9329 when x is NaN, since x * 0 is also NaN. Nor are they the
9330 same in modes with signed zeros, since multiplying a
9331 negative value by 0 gives -0, not +0. */
9332 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9333 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9334 && real_zerop (arg1))
9335 return omit_one_operand (type, arg1, arg0);
9336 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9337 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9338 && real_onep (arg1))
9339 return non_lvalue (fold_convert (type, arg0));
9341 /* Transform x * -1.0 into -x. */
9342 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9343 && real_minus_onep (arg1))
9344 return fold_convert (type, negate_expr (arg0));
9346 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9347 if (flag_unsafe_math_optimizations
9348 && TREE_CODE (arg0) == RDIV_EXPR
9349 && TREE_CODE (arg1) == REAL_CST
9350 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9352 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9353 arg1, 0);
9354 if (tem)
9355 return fold_build2 (RDIV_EXPR, type, tem,
9356 TREE_OPERAND (arg0, 1));
9359 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9360 if (operand_equal_p (arg0, arg1, 0))
9362 tree tem = fold_strip_sign_ops (arg0);
9363 if (tem != NULL_TREE)
9365 tem = fold_convert (type, tem);
9366 return fold_build2 (MULT_EXPR, type, tem, tem);
9370 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9371 This is not the same for NaNs or if singed zeros are
9372 involved. */
9373 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9374 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9375 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9376 && TREE_CODE (arg1) == COMPLEX_CST
9377 && real_zerop (TREE_REALPART (arg1)))
9379 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9380 if (real_onep (TREE_IMAGPART (arg1)))
9381 return fold_build2 (COMPLEX_EXPR, type,
9382 negate_expr (fold_build1 (IMAGPART_EXPR,
9383 rtype, arg0)),
9384 fold_build1 (REALPART_EXPR, rtype, arg0));
9385 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9386 return fold_build2 (COMPLEX_EXPR, type,
9387 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9388 negate_expr (fold_build1 (REALPART_EXPR,
9389 rtype, arg0)));
9392 /* Optimize z * conj(z) for floating point complex numbers.
9393 Guarded by flag_unsafe_math_optimizations as non-finite
9394 imaginary components don't produce scalar results. */
9395 if (flag_unsafe_math_optimizations
9396 && TREE_CODE (arg0) == CONJ_EXPR
9397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9398 return fold_mult_zconjz (type, arg1);
9399 if (flag_unsafe_math_optimizations
9400 && TREE_CODE (arg1) == CONJ_EXPR
9401 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9402 return fold_mult_zconjz (type, arg0);
9404 if (flag_unsafe_math_optimizations)
9406 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9407 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9409 /* Optimizations of root(...)*root(...). */
9410 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9412 tree rootfn, arg, arglist;
9413 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9414 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9416 /* Optimize sqrt(x)*sqrt(x) as x. */
9417 if (BUILTIN_SQRT_P (fcode0)
9418 && operand_equal_p (arg00, arg10, 0)
9419 && ! HONOR_SNANS (TYPE_MODE (type)))
9420 return arg00;
9422 /* Optimize root(x)*root(y) as root(x*y). */
9423 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9424 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9425 arglist = build_tree_list (NULL_TREE, arg);
9426 return build_function_call_expr (rootfn, arglist);
9429 /* Optimize expN(x)*expN(y) as expN(x+y). */
9430 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9432 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9433 tree arg = fold_build2 (PLUS_EXPR, type,
9434 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9435 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9436 tree arglist = build_tree_list (NULL_TREE, arg);
9437 return build_function_call_expr (expfn, arglist);
9440 /* Optimizations of pow(...)*pow(...). */
9441 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9442 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9443 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9445 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9446 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9447 1)));
9448 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9449 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9450 1)));
9452 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9453 if (operand_equal_p (arg01, arg11, 0))
9455 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9456 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9457 tree arglist = tree_cons (NULL_TREE, arg,
9458 build_tree_list (NULL_TREE,
9459 arg01));
9460 return build_function_call_expr (powfn, arglist);
9463 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9464 if (operand_equal_p (arg00, arg10, 0))
9466 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9467 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9468 tree arglist = tree_cons (NULL_TREE, arg00,
9469 build_tree_list (NULL_TREE,
9470 arg));
9471 return build_function_call_expr (powfn, arglist);
9475 /* Optimize tan(x)*cos(x) as sin(x). */
9476 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9477 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9478 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9479 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9480 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9481 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9482 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9483 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9485 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9487 if (sinfn != NULL_TREE)
9488 return build_function_call_expr (sinfn,
9489 TREE_OPERAND (arg0, 1));
9492 /* Optimize x*pow(x,c) as pow(x,c+1). */
9493 if (fcode1 == BUILT_IN_POW
9494 || fcode1 == BUILT_IN_POWF
9495 || fcode1 == BUILT_IN_POWL)
9497 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9498 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9499 1)));
9500 if (TREE_CODE (arg11) == REAL_CST
9501 && ! TREE_CONSTANT_OVERFLOW (arg11)
9502 && operand_equal_p (arg0, arg10, 0))
9504 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9505 REAL_VALUE_TYPE c;
9506 tree arg, arglist;
9508 c = TREE_REAL_CST (arg11);
9509 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9510 arg = build_real (type, c);
9511 arglist = build_tree_list (NULL_TREE, arg);
9512 arglist = tree_cons (NULL_TREE, arg0, arglist);
9513 return build_function_call_expr (powfn, arglist);
9517 /* Optimize pow(x,c)*x as pow(x,c+1). */
9518 if (fcode0 == BUILT_IN_POW
9519 || fcode0 == BUILT_IN_POWF
9520 || fcode0 == BUILT_IN_POWL)
9522 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9523 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9524 1)));
9525 if (TREE_CODE (arg01) == REAL_CST
9526 && ! TREE_CONSTANT_OVERFLOW (arg01)
9527 && operand_equal_p (arg1, arg00, 0))
9529 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9530 REAL_VALUE_TYPE c;
9531 tree arg, arglist;
9533 c = TREE_REAL_CST (arg01);
9534 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9535 arg = build_real (type, c);
9536 arglist = build_tree_list (NULL_TREE, arg);
9537 arglist = tree_cons (NULL_TREE, arg1, arglist);
9538 return build_function_call_expr (powfn, arglist);
9542 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9543 if (! optimize_size
9544 && operand_equal_p (arg0, arg1, 0))
9546 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9548 if (powfn)
9550 tree arg = build_real (type, dconst2);
9551 tree arglist = build_tree_list (NULL_TREE, arg);
9552 arglist = tree_cons (NULL_TREE, arg0, arglist);
9553 return build_function_call_expr (powfn, arglist);
9558 goto associate;
9560 case BIT_IOR_EXPR:
9561 bit_ior:
9562 if (integer_all_onesp (arg1))
9563 return omit_one_operand (type, arg1, arg0);
9564 if (integer_zerop (arg1))
9565 return non_lvalue (fold_convert (type, arg0));
9566 if (operand_equal_p (arg0, arg1, 0))
9567 return non_lvalue (fold_convert (type, arg0));
9569 /* ~X | X is -1. */
9570 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9571 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9573 t1 = build_int_cst (type, -1);
9574 t1 = force_fit_type (t1, 0, false, false);
9575 return omit_one_operand (type, t1, arg1);
9578 /* X | ~X is -1. */
9579 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9580 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9582 t1 = build_int_cst (type, -1);
9583 t1 = force_fit_type (t1, 0, false, false);
9584 return omit_one_operand (type, t1, arg0);
9587 /* Canonicalize (X & C1) | C2. */
9588 if (TREE_CODE (arg0) == BIT_AND_EXPR
9589 && TREE_CODE (arg1) == INTEGER_CST
9590 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9592 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9593 int width = TYPE_PRECISION (type);
9594 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9595 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9596 hi2 = TREE_INT_CST_HIGH (arg1);
9597 lo2 = TREE_INT_CST_LOW (arg1);
9599 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9600 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9601 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9603 if (width > HOST_BITS_PER_WIDE_INT)
9605 mhi = (unsigned HOST_WIDE_INT) -1
9606 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9607 mlo = -1;
9609 else
9611 mhi = 0;
9612 mlo = (unsigned HOST_WIDE_INT) -1
9613 >> (HOST_BITS_PER_WIDE_INT - width);
9616 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9617 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9618 return fold_build2 (BIT_IOR_EXPR, type,
9619 TREE_OPERAND (arg0, 0), arg1);
9621 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9622 hi1 &= mhi;
9623 lo1 &= mlo;
9624 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9625 return fold_build2 (BIT_IOR_EXPR, type,
9626 fold_build2 (BIT_AND_EXPR, type,
9627 TREE_OPERAND (arg0, 0),
9628 build_int_cst_wide (type,
9629 lo1 & ~lo2,
9630 hi1 & ~hi2)),
9631 arg1);
9634 /* (X & Y) | Y is (X, Y). */
9635 if (TREE_CODE (arg0) == BIT_AND_EXPR
9636 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9637 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9638 /* (X & Y) | X is (Y, X). */
9639 if (TREE_CODE (arg0) == BIT_AND_EXPR
9640 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9641 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9642 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9643 /* X | (X & Y) is (Y, X). */
9644 if (TREE_CODE (arg1) == BIT_AND_EXPR
9645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9646 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9647 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9648 /* X | (Y & X) is (Y, X). */
9649 if (TREE_CODE (arg1) == BIT_AND_EXPR
9650 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9651 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9652 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9654 t1 = distribute_bit_expr (code, type, arg0, arg1);
9655 if (t1 != NULL_TREE)
9656 return t1;
9658 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9660 This results in more efficient code for machines without a NAND
9661 instruction. Combine will canonicalize to the first form
9662 which will allow use of NAND instructions provided by the
9663 backend if they exist. */
9664 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9665 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9667 return fold_build1 (BIT_NOT_EXPR, type,
9668 build2 (BIT_AND_EXPR, type,
9669 TREE_OPERAND (arg0, 0),
9670 TREE_OPERAND (arg1, 0)));
9673 /* See if this can be simplified into a rotate first. If that
9674 is unsuccessful continue in the association code. */
9675 goto bit_rotate;
9677 case BIT_XOR_EXPR:
9678 if (integer_zerop (arg1))
9679 return non_lvalue (fold_convert (type, arg0));
9680 if (integer_all_onesp (arg1))
9681 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9682 if (operand_equal_p (arg0, arg1, 0))
9683 return omit_one_operand (type, integer_zero_node, arg0);
9685 /* ~X ^ X is -1. */
9686 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9689 t1 = build_int_cst (type, -1);
9690 t1 = force_fit_type (t1, 0, false, false);
9691 return omit_one_operand (type, t1, arg1);
9694 /* X ^ ~X is -1. */
9695 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9696 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9698 t1 = build_int_cst (type, -1);
9699 t1 = force_fit_type (t1, 0, false, false);
9700 return omit_one_operand (type, t1, arg0);
9703 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9704 with a constant, and the two constants have no bits in common,
9705 we should treat this as a BIT_IOR_EXPR since this may produce more
9706 simplifications. */
9707 if (TREE_CODE (arg0) == BIT_AND_EXPR
9708 && TREE_CODE (arg1) == BIT_AND_EXPR
9709 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9710 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9711 && integer_zerop (const_binop (BIT_AND_EXPR,
9712 TREE_OPERAND (arg0, 1),
9713 TREE_OPERAND (arg1, 1), 0)))
9715 code = BIT_IOR_EXPR;
9716 goto bit_ior;
9719 /* (X | Y) ^ X -> Y & ~ X*/
9720 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9721 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9723 tree t2 = TREE_OPERAND (arg0, 1);
9724 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9725 arg1);
9726 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9727 fold_convert (type, t1));
9728 return t1;
9731 /* (Y | X) ^ X -> Y & ~ X*/
9732 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9733 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9735 tree t2 = TREE_OPERAND (arg0, 0);
9736 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9737 arg1);
9738 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9739 fold_convert (type, t1));
9740 return t1;
9743 /* X ^ (X | Y) -> Y & ~ X*/
9744 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9745 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9747 tree t2 = TREE_OPERAND (arg1, 1);
9748 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9749 arg0);
9750 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9751 fold_convert (type, t1));
9752 return t1;
9755 /* X ^ (Y | X) -> Y & ~ X*/
9756 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9757 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9759 tree t2 = TREE_OPERAND (arg1, 0);
9760 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9761 arg0);
9762 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9763 fold_convert (type, t1));
9764 return t1;
9767 /* Convert ~X ^ ~Y to X ^ Y. */
9768 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9769 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9770 return fold_build2 (code, type,
9771 fold_convert (type, TREE_OPERAND (arg0, 0)),
9772 fold_convert (type, TREE_OPERAND (arg1, 0)));
9774 /* Convert ~X ^ C to X ^ ~C. */
9775 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9776 && TREE_CODE (arg1) == INTEGER_CST)
9777 return fold_build2 (code, type,
9778 fold_convert (type, TREE_OPERAND (arg0, 0)),
9779 fold_build1 (BIT_NOT_EXPR, type, arg1));
9781 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9782 if (TREE_CODE (arg0) == BIT_AND_EXPR
9783 && integer_onep (TREE_OPERAND (arg0, 1))
9784 && integer_onep (arg1))
9785 return fold_build2 (EQ_EXPR, type, arg0,
9786 build_int_cst (TREE_TYPE (arg0), 0));
9788 /* Fold (X & Y) ^ Y as ~X & Y. */
9789 if (TREE_CODE (arg0) == BIT_AND_EXPR
9790 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9792 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9793 return fold_build2 (BIT_AND_EXPR, type,
9794 fold_build1 (BIT_NOT_EXPR, type, tem),
9795 fold_convert (type, arg1));
9797 /* Fold (X & Y) ^ X as ~Y & X. */
9798 if (TREE_CODE (arg0) == BIT_AND_EXPR
9799 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9800 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9802 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9803 return fold_build2 (BIT_AND_EXPR, type,
9804 fold_build1 (BIT_NOT_EXPR, type, tem),
9805 fold_convert (type, arg1));
9807 /* Fold X ^ (X & Y) as X & ~Y. */
9808 if (TREE_CODE (arg1) == BIT_AND_EXPR
9809 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9811 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9812 return fold_build2 (BIT_AND_EXPR, type,
9813 fold_convert (type, arg0),
9814 fold_build1 (BIT_NOT_EXPR, type, tem));
9816 /* Fold X ^ (Y & X) as ~Y & X. */
9817 if (TREE_CODE (arg1) == BIT_AND_EXPR
9818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9819 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9821 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9822 return fold_build2 (BIT_AND_EXPR, type,
9823 fold_build1 (BIT_NOT_EXPR, type, tem),
9824 fold_convert (type, arg0));
9827 /* See if this can be simplified into a rotate first. If that
9828 is unsuccessful continue in the association code. */
9829 goto bit_rotate;
9831 case BIT_AND_EXPR:
9832 if (integer_all_onesp (arg1))
9833 return non_lvalue (fold_convert (type, arg0));
9834 if (integer_zerop (arg1))
9835 return omit_one_operand (type, arg1, arg0);
9836 if (operand_equal_p (arg0, arg1, 0))
9837 return non_lvalue (fold_convert (type, arg0));
9839 /* ~X & X is always zero. */
9840 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9841 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9842 return omit_one_operand (type, integer_zero_node, arg1);
9844 /* X & ~X is always zero. */
9845 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9846 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9847 return omit_one_operand (type, integer_zero_node, arg0);
9849 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9850 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9851 && TREE_CODE (arg1) == INTEGER_CST
9852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9853 return fold_build2 (BIT_IOR_EXPR, type,
9854 fold_build2 (BIT_AND_EXPR, type,
9855 TREE_OPERAND (arg0, 0), arg1),
9856 fold_build2 (BIT_AND_EXPR, type,
9857 TREE_OPERAND (arg0, 1), arg1));
9859 /* (X | Y) & Y is (X, Y). */
9860 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9861 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9862 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9863 /* (X | Y) & X is (Y, X). */
9864 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9865 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9866 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9867 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9868 /* X & (X | Y) is (Y, X). */
9869 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9870 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9871 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9872 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9873 /* X & (Y | X) is (Y, X). */
9874 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9875 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9876 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9877 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9879 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9880 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9881 && integer_onep (TREE_OPERAND (arg0, 1))
9882 && integer_onep (arg1))
9884 tem = TREE_OPERAND (arg0, 0);
9885 return fold_build2 (EQ_EXPR, type,
9886 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9887 build_int_cst (TREE_TYPE (tem), 1)),
9888 build_int_cst (TREE_TYPE (tem), 0));
9890 /* Fold ~X & 1 as (X & 1) == 0. */
9891 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9892 && integer_onep (arg1))
9894 tem = TREE_OPERAND (arg0, 0);
9895 return fold_build2 (EQ_EXPR, type,
9896 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9897 build_int_cst (TREE_TYPE (tem), 1)),
9898 build_int_cst (TREE_TYPE (tem), 0));
9901 /* Fold (X ^ Y) & Y as ~X & Y. */
9902 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9903 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9905 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9906 return fold_build2 (BIT_AND_EXPR, type,
9907 fold_build1 (BIT_NOT_EXPR, type, tem),
9908 fold_convert (type, arg1));
9910 /* Fold (X ^ Y) & X as ~Y & X. */
9911 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9912 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9913 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9915 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9916 return fold_build2 (BIT_AND_EXPR, type,
9917 fold_build1 (BIT_NOT_EXPR, type, tem),
9918 fold_convert (type, arg1));
9920 /* Fold X & (X ^ Y) as X & ~Y. */
9921 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9922 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9924 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9925 return fold_build2 (BIT_AND_EXPR, type,
9926 fold_convert (type, arg0),
9927 fold_build1 (BIT_NOT_EXPR, type, tem));
9929 /* Fold X & (Y ^ X) as ~Y & X. */
9930 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9931 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9932 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9934 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9935 return fold_build2 (BIT_AND_EXPR, type,
9936 fold_build1 (BIT_NOT_EXPR, type, tem),
9937 fold_convert (type, arg0));
9940 t1 = distribute_bit_expr (code, type, arg0, arg1);
9941 if (t1 != NULL_TREE)
9942 return t1;
9943 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9944 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9945 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9947 unsigned int prec
9948 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9950 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9951 && (~TREE_INT_CST_LOW (arg1)
9952 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9953 return fold_convert (type, TREE_OPERAND (arg0, 0));
9956 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9958 This results in more efficient code for machines without a NOR
9959 instruction. Combine will canonicalize to the first form
9960 which will allow use of NOR instructions provided by the
9961 backend if they exist. */
9962 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9963 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9965 return fold_build1 (BIT_NOT_EXPR, type,
9966 build2 (BIT_IOR_EXPR, type,
9967 TREE_OPERAND (arg0, 0),
9968 TREE_OPERAND (arg1, 0)));
9971 goto associate;
9973 case RDIV_EXPR:
9974 /* Don't touch a floating-point divide by zero unless the mode
9975 of the constant can represent infinity. */
9976 if (TREE_CODE (arg1) == REAL_CST
9977 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9978 && real_zerop (arg1))
9979 return NULL_TREE;
9981 /* Optimize A / A to 1.0 if we don't care about
9982 NaNs or Infinities. Skip the transformation
9983 for non-real operands. */
9984 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9985 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9986 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9987 && operand_equal_p (arg0, arg1, 0))
9989 tree r = build_real (TREE_TYPE (arg0), dconst1);
9991 return omit_two_operands (type, r, arg0, arg1);
9994 /* The complex version of the above A / A optimization. */
9995 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9996 && operand_equal_p (arg0, arg1, 0))
9998 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9999 if (! HONOR_NANS (TYPE_MODE (elem_type))
10000 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10002 tree r = build_real (elem_type, dconst1);
10003 /* omit_two_operands will call fold_convert for us. */
10004 return omit_two_operands (type, r, arg0, arg1);
10008 /* (-A) / (-B) -> A / B */
10009 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10010 return fold_build2 (RDIV_EXPR, type,
10011 TREE_OPERAND (arg0, 0),
10012 negate_expr (arg1));
10013 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10014 return fold_build2 (RDIV_EXPR, type,
10015 negate_expr (arg0),
10016 TREE_OPERAND (arg1, 0));
10018 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10019 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10020 && real_onep (arg1))
10021 return non_lvalue (fold_convert (type, arg0));
10023 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10024 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10025 && real_minus_onep (arg1))
10026 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10028 /* If ARG1 is a constant, we can convert this to a multiply by the
10029 reciprocal. This does not have the same rounding properties,
10030 so only do this if -funsafe-math-optimizations. We can actually
10031 always safely do it if ARG1 is a power of two, but it's hard to
10032 tell if it is or not in a portable manner. */
10033 if (TREE_CODE (arg1) == REAL_CST)
10035 if (flag_unsafe_math_optimizations
10036 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10037 arg1, 0)))
10038 return fold_build2 (MULT_EXPR, type, arg0, tem);
10039 /* Find the reciprocal if optimizing and the result is exact. */
10040 if (optimize)
10042 REAL_VALUE_TYPE r;
10043 r = TREE_REAL_CST (arg1);
10044 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10046 tem = build_real (type, r);
10047 return fold_build2 (MULT_EXPR, type,
10048 fold_convert (type, arg0), tem);
10052 /* Convert A/B/C to A/(B*C). */
10053 if (flag_unsafe_math_optimizations
10054 && TREE_CODE (arg0) == RDIV_EXPR)
10055 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10056 fold_build2 (MULT_EXPR, type,
10057 TREE_OPERAND (arg0, 1), arg1));
10059 /* Convert A/(B/C) to (A/B)*C. */
10060 if (flag_unsafe_math_optimizations
10061 && TREE_CODE (arg1) == RDIV_EXPR)
10062 return fold_build2 (MULT_EXPR, type,
10063 fold_build2 (RDIV_EXPR, type, arg0,
10064 TREE_OPERAND (arg1, 0)),
10065 TREE_OPERAND (arg1, 1));
10067 /* Convert C1/(X*C2) into (C1/C2)/X. */
10068 if (flag_unsafe_math_optimizations
10069 && TREE_CODE (arg1) == MULT_EXPR
10070 && TREE_CODE (arg0) == REAL_CST
10071 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10073 tree tem = const_binop (RDIV_EXPR, arg0,
10074 TREE_OPERAND (arg1, 1), 0);
10075 if (tem)
10076 return fold_build2 (RDIV_EXPR, type, tem,
10077 TREE_OPERAND (arg1, 0));
10080 if (flag_unsafe_math_optimizations)
10082 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10083 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10085 /* Optimize sin(x)/cos(x) as tan(x). */
10086 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10087 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10088 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10089 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10090 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10092 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10094 if (tanfn != NULL_TREE)
10095 return build_function_call_expr (tanfn,
10096 TREE_OPERAND (arg0, 1));
10099 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10100 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10101 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10102 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10103 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10104 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10106 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10108 if (tanfn != NULL_TREE)
10110 tree tmp = TREE_OPERAND (arg0, 1);
10111 tmp = build_function_call_expr (tanfn, tmp);
10112 return fold_build2 (RDIV_EXPR, type,
10113 build_real (type, dconst1), tmp);
10117 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10118 NaNs or Infinities. */
10119 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10120 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10121 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10123 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10124 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10126 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10127 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10128 && operand_equal_p (arg00, arg01, 0))
10130 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10132 if (cosfn != NULL_TREE)
10133 return build_function_call_expr (cosfn,
10134 TREE_OPERAND (arg0, 1));
10138 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10139 NaNs or Infinities. */
10140 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10141 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10142 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10144 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10145 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10147 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10148 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10149 && operand_equal_p (arg00, arg01, 0))
10151 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10153 if (cosfn != NULL_TREE)
10155 tree tmp = TREE_OPERAND (arg0, 1);
10156 tmp = build_function_call_expr (cosfn, tmp);
10157 return fold_build2 (RDIV_EXPR, type,
10158 build_real (type, dconst1),
10159 tmp);
10164 /* Optimize pow(x,c)/x as pow(x,c-1). */
10165 if (fcode0 == BUILT_IN_POW
10166 || fcode0 == BUILT_IN_POWF
10167 || fcode0 == BUILT_IN_POWL)
10169 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10170 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10171 if (TREE_CODE (arg01) == REAL_CST
10172 && ! TREE_CONSTANT_OVERFLOW (arg01)
10173 && operand_equal_p (arg1, arg00, 0))
10175 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10176 REAL_VALUE_TYPE c;
10177 tree arg, arglist;
10179 c = TREE_REAL_CST (arg01);
10180 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10181 arg = build_real (type, c);
10182 arglist = build_tree_list (NULL_TREE, arg);
10183 arglist = tree_cons (NULL_TREE, arg1, arglist);
10184 return build_function_call_expr (powfn, arglist);
10188 /* Optimize x/expN(y) into x*expN(-y). */
10189 if (BUILTIN_EXPONENT_P (fcode1))
10191 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10192 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10193 tree arglist = build_tree_list (NULL_TREE,
10194 fold_convert (type, arg));
10195 arg1 = build_function_call_expr (expfn, arglist);
10196 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10199 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10200 if (fcode1 == BUILT_IN_POW
10201 || fcode1 == BUILT_IN_POWF
10202 || fcode1 == BUILT_IN_POWL)
10204 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10205 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10206 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10207 tree neg11 = fold_convert (type, negate_expr (arg11));
10208 tree arglist = tree_cons(NULL_TREE, arg10,
10209 build_tree_list (NULL_TREE, neg11));
10210 arg1 = build_function_call_expr (powfn, arglist);
10211 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10214 return NULL_TREE;
10216 case TRUNC_DIV_EXPR:
10217 case FLOOR_DIV_EXPR:
10218 /* Simplify A / (B << N) where A and B are positive and B is
10219 a power of 2, to A >> (N + log2(B)). */
10220 if (TREE_CODE (arg1) == LSHIFT_EXPR
10221 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10223 tree sval = TREE_OPERAND (arg1, 0);
10224 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10226 tree sh_cnt = TREE_OPERAND (arg1, 1);
10227 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10229 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10230 sh_cnt, build_int_cst (NULL_TREE, pow2));
10231 return fold_build2 (RSHIFT_EXPR, type,
10232 fold_convert (type, arg0), sh_cnt);
10235 /* Fall thru */
10237 case ROUND_DIV_EXPR:
10238 case CEIL_DIV_EXPR:
10239 case EXACT_DIV_EXPR:
10240 if (integer_onep (arg1))
10241 return non_lvalue (fold_convert (type, arg0));
10242 if (integer_zerop (arg1))
10243 return NULL_TREE;
10244 /* X / -1 is -X. */
10245 if (!TYPE_UNSIGNED (type)
10246 && TREE_CODE (arg1) == INTEGER_CST
10247 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10248 && TREE_INT_CST_HIGH (arg1) == -1)
10249 return fold_convert (type, negate_expr (arg0));
10251 /* Convert -A / -B to A / B when the type is signed and overflow is
10252 undefined. */
10253 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10254 && TREE_CODE (arg0) == NEGATE_EXPR
10255 && negate_expr_p (arg1))
10256 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10257 negate_expr (arg1));
10258 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10259 && TREE_CODE (arg1) == NEGATE_EXPR
10260 && negate_expr_p (arg0))
10261 return fold_build2 (code, type, negate_expr (arg0),
10262 TREE_OPERAND (arg1, 0));
10264 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10265 operation, EXACT_DIV_EXPR.
10267 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10268 At one time others generated faster code, it's not clear if they do
10269 after the last round to changes to the DIV code in expmed.c. */
10270 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10271 && multiple_of_p (type, arg0, arg1))
10272 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10274 if (TREE_CODE (arg1) == INTEGER_CST
10275 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10276 return fold_convert (type, tem);
10278 return NULL_TREE;
10280 case CEIL_MOD_EXPR:
10281 case FLOOR_MOD_EXPR:
10282 case ROUND_MOD_EXPR:
10283 case TRUNC_MOD_EXPR:
10284 /* X % 1 is always zero, but be sure to preserve any side
10285 effects in X. */
10286 if (integer_onep (arg1))
10287 return omit_one_operand (type, integer_zero_node, arg0);
10289 /* X % 0, return X % 0 unchanged so that we can get the
10290 proper warnings and errors. */
10291 if (integer_zerop (arg1))
10292 return NULL_TREE;
10294 /* 0 % X is always zero, but be sure to preserve any side
10295 effects in X. Place this after checking for X == 0. */
10296 if (integer_zerop (arg0))
10297 return omit_one_operand (type, integer_zero_node, arg1);
10299 /* X % -1 is zero. */
10300 if (!TYPE_UNSIGNED (type)
10301 && TREE_CODE (arg1) == INTEGER_CST
10302 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10303 && TREE_INT_CST_HIGH (arg1) == -1)
10304 return omit_one_operand (type, integer_zero_node, arg0);
10306 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10307 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10308 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10309 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10311 tree c = arg1;
10312 /* Also optimize A % (C << N) where C is a power of 2,
10313 to A & ((C << N) - 1). */
10314 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10315 c = TREE_OPERAND (arg1, 0);
10317 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10319 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10320 build_int_cst (TREE_TYPE (arg1), 1));
10321 return fold_build2 (BIT_AND_EXPR, type,
10322 fold_convert (type, arg0),
10323 fold_convert (type, mask));
10327 /* X % -C is the same as X % C. */
10328 if (code == TRUNC_MOD_EXPR
10329 && !TYPE_UNSIGNED (type)
10330 && TREE_CODE (arg1) == INTEGER_CST
10331 && !TREE_CONSTANT_OVERFLOW (arg1)
10332 && TREE_INT_CST_HIGH (arg1) < 0
10333 && !flag_trapv
10334 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10335 && !sign_bit_p (arg1, arg1))
10336 return fold_build2 (code, type, fold_convert (type, arg0),
10337 fold_convert (type, negate_expr (arg1)));
10339 /* X % -Y is the same as X % Y. */
10340 if (code == TRUNC_MOD_EXPR
10341 && !TYPE_UNSIGNED (type)
10342 && TREE_CODE (arg1) == NEGATE_EXPR
10343 && !flag_trapv)
10344 return fold_build2 (code, type, fold_convert (type, arg0),
10345 fold_convert (type, TREE_OPERAND (arg1, 0)));
10347 if (TREE_CODE (arg1) == INTEGER_CST
10348 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10349 return fold_convert (type, tem);
10351 return NULL_TREE;
10353 case LROTATE_EXPR:
10354 case RROTATE_EXPR:
10355 if (integer_all_onesp (arg0))
10356 return omit_one_operand (type, arg0, arg1);
10357 goto shift;
10359 case RSHIFT_EXPR:
10360 /* Optimize -1 >> x for arithmetic right shifts. */
10361 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10362 return omit_one_operand (type, arg0, arg1);
10363 /* ... fall through ... */
10365 case LSHIFT_EXPR:
10366 shift:
10367 if (integer_zerop (arg1))
10368 return non_lvalue (fold_convert (type, arg0));
10369 if (integer_zerop (arg0))
10370 return omit_one_operand (type, arg0, arg1);
10372 /* Since negative shift count is not well-defined,
10373 don't try to compute it in the compiler. */
10374 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10375 return NULL_TREE;
10377 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10378 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10379 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10380 && host_integerp (TREE_OPERAND (arg0, 1), false)
10381 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10383 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10384 + TREE_INT_CST_LOW (arg1));
10386 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10387 being well defined. */
10388 if (low >= TYPE_PRECISION (type))
10390 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10391 low = low % TYPE_PRECISION (type);
10392 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10393 return build_int_cst (type, 0);
10394 else
10395 low = TYPE_PRECISION (type) - 1;
10398 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10399 build_int_cst (type, low));
10402 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10403 into x & ((unsigned)-1 >> c) for unsigned types. */
10404 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10405 || (TYPE_UNSIGNED (type)
10406 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10407 && host_integerp (arg1, false)
10408 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10409 && host_integerp (TREE_OPERAND (arg0, 1), false)
10410 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10412 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10413 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10414 tree lshift;
10415 tree arg00;
10417 if (low0 == low1)
10419 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10421 lshift = build_int_cst (type, -1);
10422 lshift = int_const_binop (code, lshift, arg1, 0);
10424 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10428 /* Rewrite an LROTATE_EXPR by a constant into an
10429 RROTATE_EXPR by a new constant. */
10430 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10432 tree tem = build_int_cst (TREE_TYPE (arg1),
10433 GET_MODE_BITSIZE (TYPE_MODE (type)));
10434 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10435 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10438 /* If we have a rotate of a bit operation with the rotate count and
10439 the second operand of the bit operation both constant,
10440 permute the two operations. */
10441 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10442 && (TREE_CODE (arg0) == BIT_AND_EXPR
10443 || TREE_CODE (arg0) == BIT_IOR_EXPR
10444 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10445 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10446 return fold_build2 (TREE_CODE (arg0), type,
10447 fold_build2 (code, type,
10448 TREE_OPERAND (arg0, 0), arg1),
10449 fold_build2 (code, type,
10450 TREE_OPERAND (arg0, 1), arg1));
10452 /* Two consecutive rotates adding up to the width of the mode can
10453 be ignored. */
10454 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10455 && TREE_CODE (arg0) == RROTATE_EXPR
10456 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10457 && TREE_INT_CST_HIGH (arg1) == 0
10458 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10459 && ((TREE_INT_CST_LOW (arg1)
10460 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10461 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10462 return TREE_OPERAND (arg0, 0);
10464 return NULL_TREE;
10466 case MIN_EXPR:
10467 if (operand_equal_p (arg0, arg1, 0))
10468 return omit_one_operand (type, arg0, arg1);
10469 if (INTEGRAL_TYPE_P (type)
10470 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10471 return omit_one_operand (type, arg1, arg0);
10472 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10473 if (tem)
10474 return tem;
10475 goto associate;
10477 case MAX_EXPR:
10478 if (operand_equal_p (arg0, arg1, 0))
10479 return omit_one_operand (type, arg0, arg1);
10480 if (INTEGRAL_TYPE_P (type)
10481 && TYPE_MAX_VALUE (type)
10482 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10483 return omit_one_operand (type, arg1, arg0);
10484 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10485 if (tem)
10486 return tem;
10487 goto associate;
10489 case TRUTH_ANDIF_EXPR:
10490 /* Note that the operands of this must be ints
10491 and their values must be 0 or 1.
10492 ("true" is a fixed value perhaps depending on the language.) */
10493 /* If first arg is constant zero, return it. */
10494 if (integer_zerop (arg0))
10495 return fold_convert (type, arg0);
10496 case TRUTH_AND_EXPR:
10497 /* If either arg is constant true, drop it. */
10498 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10499 return non_lvalue (fold_convert (type, arg1));
10500 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10501 /* Preserve sequence points. */
10502 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10503 return non_lvalue (fold_convert (type, arg0));
10504 /* If second arg is constant zero, result is zero, but first arg
10505 must be evaluated. */
10506 if (integer_zerop (arg1))
10507 return omit_one_operand (type, arg1, arg0);
10508 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10509 case will be handled here. */
10510 if (integer_zerop (arg0))
10511 return omit_one_operand (type, arg0, arg1);
10513 /* !X && X is always false. */
10514 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10515 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10516 return omit_one_operand (type, integer_zero_node, arg1);
10517 /* X && !X is always false. */
10518 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10519 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10520 return omit_one_operand (type, integer_zero_node, arg0);
10522 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10523 means A >= Y && A != MAX, but in this case we know that
10524 A < X <= MAX. */
10526 if (!TREE_SIDE_EFFECTS (arg0)
10527 && !TREE_SIDE_EFFECTS (arg1))
10529 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10530 if (tem && !operand_equal_p (tem, arg0, 0))
10531 return fold_build2 (code, type, tem, arg1);
10533 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10534 if (tem && !operand_equal_p (tem, arg1, 0))
10535 return fold_build2 (code, type, arg0, tem);
10538 truth_andor:
10539 /* We only do these simplifications if we are optimizing. */
10540 if (!optimize)
10541 return NULL_TREE;
10543 /* Check for things like (A || B) && (A || C). We can convert this
10544 to A || (B && C). Note that either operator can be any of the four
10545 truth and/or operations and the transformation will still be
10546 valid. Also note that we only care about order for the
10547 ANDIF and ORIF operators. If B contains side effects, this
10548 might change the truth-value of A. */
10549 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10550 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10551 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10552 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10553 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10554 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10556 tree a00 = TREE_OPERAND (arg0, 0);
10557 tree a01 = TREE_OPERAND (arg0, 1);
10558 tree a10 = TREE_OPERAND (arg1, 0);
10559 tree a11 = TREE_OPERAND (arg1, 1);
10560 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10561 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10562 && (code == TRUTH_AND_EXPR
10563 || code == TRUTH_OR_EXPR));
10565 if (operand_equal_p (a00, a10, 0))
10566 return fold_build2 (TREE_CODE (arg0), type, a00,
10567 fold_build2 (code, type, a01, a11));
10568 else if (commutative && operand_equal_p (a00, a11, 0))
10569 return fold_build2 (TREE_CODE (arg0), type, a00,
10570 fold_build2 (code, type, a01, a10));
10571 else if (commutative && operand_equal_p (a01, a10, 0))
10572 return fold_build2 (TREE_CODE (arg0), type, a01,
10573 fold_build2 (code, type, a00, a11));
10575 /* This case if tricky because we must either have commutative
10576 operators or else A10 must not have side-effects. */
10578 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10579 && operand_equal_p (a01, a11, 0))
10580 return fold_build2 (TREE_CODE (arg0), type,
10581 fold_build2 (code, type, a00, a10),
10582 a01);
10585 /* See if we can build a range comparison. */
10586 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10587 return tem;
10589 /* Check for the possibility of merging component references. If our
10590 lhs is another similar operation, try to merge its rhs with our
10591 rhs. Then try to merge our lhs and rhs. */
10592 if (TREE_CODE (arg0) == code
10593 && 0 != (tem = fold_truthop (code, type,
10594 TREE_OPERAND (arg0, 1), arg1)))
10595 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10597 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10598 return tem;
10600 return NULL_TREE;
10602 case TRUTH_ORIF_EXPR:
10603 /* Note that the operands of this must be ints
10604 and their values must be 0 or true.
10605 ("true" is a fixed value perhaps depending on the language.) */
10606 /* If first arg is constant true, return it. */
10607 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10608 return fold_convert (type, arg0);
10609 case TRUTH_OR_EXPR:
10610 /* If either arg is constant zero, drop it. */
10611 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10612 return non_lvalue (fold_convert (type, arg1));
10613 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10614 /* Preserve sequence points. */
10615 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10616 return non_lvalue (fold_convert (type, arg0));
10617 /* If second arg is constant true, result is true, but we must
10618 evaluate first arg. */
10619 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10620 return omit_one_operand (type, arg1, arg0);
10621 /* Likewise for first arg, but note this only occurs here for
10622 TRUTH_OR_EXPR. */
10623 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10624 return omit_one_operand (type, arg0, arg1);
10626 /* !X || X is always true. */
10627 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10628 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10629 return omit_one_operand (type, integer_one_node, arg1);
10630 /* X || !X is always true. */
10631 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10632 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10633 return omit_one_operand (type, integer_one_node, arg0);
10635 goto truth_andor;
10637 case TRUTH_XOR_EXPR:
10638 /* If the second arg is constant zero, drop it. */
10639 if (integer_zerop (arg1))
10640 return non_lvalue (fold_convert (type, arg0));
10641 /* If the second arg is constant true, this is a logical inversion. */
10642 if (integer_onep (arg1))
10644 /* Only call invert_truthvalue if operand is a truth value. */
10645 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10646 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10647 else
10648 tem = invert_truthvalue (arg0);
10649 return non_lvalue (fold_convert (type, tem));
10651 /* Identical arguments cancel to zero. */
10652 if (operand_equal_p (arg0, arg1, 0))
10653 return omit_one_operand (type, integer_zero_node, arg0);
10655 /* !X ^ X is always true. */
10656 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10657 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10658 return omit_one_operand (type, integer_one_node, arg1);
10660 /* X ^ !X is always true. */
10661 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10662 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10663 return omit_one_operand (type, integer_one_node, arg0);
10665 return NULL_TREE;
10667 case EQ_EXPR:
10668 case NE_EXPR:
10669 tem = fold_comparison (code, type, op0, op1);
10670 if (tem != NULL_TREE)
10671 return tem;
10673 /* bool_var != 0 becomes bool_var. */
10674 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10675 && code == NE_EXPR)
10676 return non_lvalue (fold_convert (type, arg0));
10678 /* bool_var == 1 becomes bool_var. */
10679 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10680 && code == EQ_EXPR)
10681 return non_lvalue (fold_convert (type, arg0));
10683 /* bool_var != 1 becomes !bool_var. */
10684 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10685 && code == NE_EXPR)
10686 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10688 /* bool_var == 0 becomes !bool_var. */
10689 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10690 && code == EQ_EXPR)
10691 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10693 /* If this is an equality comparison of the address of a non-weak
10694 object against zero, then we know the result. */
10695 if (TREE_CODE (arg0) == ADDR_EXPR
10696 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10697 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10698 && integer_zerop (arg1))
10699 return constant_boolean_node (code != EQ_EXPR, type);
10701 /* If this is an equality comparison of the address of two non-weak,
10702 unaliased symbols neither of which are extern (since we do not
10703 have access to attributes for externs), then we know the result. */
10704 if (TREE_CODE (arg0) == ADDR_EXPR
10705 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10706 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10707 && ! lookup_attribute ("alias",
10708 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10709 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10710 && TREE_CODE (arg1) == ADDR_EXPR
10711 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10712 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10713 && ! lookup_attribute ("alias",
10714 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10715 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10717 /* We know that we're looking at the address of two
10718 non-weak, unaliased, static _DECL nodes.
10720 It is both wasteful and incorrect to call operand_equal_p
10721 to compare the two ADDR_EXPR nodes. It is wasteful in that
10722 all we need to do is test pointer equality for the arguments
10723 to the two ADDR_EXPR nodes. It is incorrect to use
10724 operand_equal_p as that function is NOT equivalent to a
10725 C equality test. It can in fact return false for two
10726 objects which would test as equal using the C equality
10727 operator. */
10728 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10729 return constant_boolean_node (equal
10730 ? code == EQ_EXPR : code != EQ_EXPR,
10731 type);
10734 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10735 a MINUS_EXPR of a constant, we can convert it into a comparison with
10736 a revised constant as long as no overflow occurs. */
10737 if (TREE_CODE (arg1) == INTEGER_CST
10738 && (TREE_CODE (arg0) == PLUS_EXPR
10739 || TREE_CODE (arg0) == MINUS_EXPR)
10740 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10741 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10742 ? MINUS_EXPR : PLUS_EXPR,
10743 fold_convert (TREE_TYPE (arg0), arg1),
10744 TREE_OPERAND (arg0, 1), 0))
10745 && ! TREE_CONSTANT_OVERFLOW (tem))
10746 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10748 /* Similarly for a NEGATE_EXPR. */
10749 if (TREE_CODE (arg0) == NEGATE_EXPR
10750 && TREE_CODE (arg1) == INTEGER_CST
10751 && 0 != (tem = negate_expr (arg1))
10752 && TREE_CODE (tem) == INTEGER_CST
10753 && ! TREE_CONSTANT_OVERFLOW (tem))
10754 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10756 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10757 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10758 && TREE_CODE (arg1) == INTEGER_CST
10759 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10760 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10761 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10762 fold_convert (TREE_TYPE (arg0), arg1),
10763 TREE_OPERAND (arg0, 1)));
10765 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10766 for !=. Don't do this for ordered comparisons due to overflow. */
10767 if (TREE_CODE (arg0) == MINUS_EXPR
10768 && integer_zerop (arg1))
10769 return fold_build2 (code, type,
10770 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10772 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10773 if (TREE_CODE (arg0) == ABS_EXPR
10774 && (integer_zerop (arg1) || real_zerop (arg1)))
10775 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10777 /* If this is an EQ or NE comparison with zero and ARG0 is
10778 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10779 two operations, but the latter can be done in one less insn
10780 on machines that have only two-operand insns or on which a
10781 constant cannot be the first operand. */
10782 if (TREE_CODE (arg0) == BIT_AND_EXPR
10783 && integer_zerop (arg1))
10785 tree arg00 = TREE_OPERAND (arg0, 0);
10786 tree arg01 = TREE_OPERAND (arg0, 1);
10787 if (TREE_CODE (arg00) == LSHIFT_EXPR
10788 && integer_onep (TREE_OPERAND (arg00, 0)))
10789 return
10790 fold_build2 (code, type,
10791 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10792 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10793 arg01, TREE_OPERAND (arg00, 1)),
10794 fold_convert (TREE_TYPE (arg0),
10795 integer_one_node)),
10796 arg1);
10797 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10798 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10799 return
10800 fold_build2 (code, type,
10801 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10802 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10803 arg00, TREE_OPERAND (arg01, 1)),
10804 fold_convert (TREE_TYPE (arg0),
10805 integer_one_node)),
10806 arg1);
10809 /* If this is an NE or EQ comparison of zero against the result of a
10810 signed MOD operation whose second operand is a power of 2, make
10811 the MOD operation unsigned since it is simpler and equivalent. */
10812 if (integer_zerop (arg1)
10813 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10814 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10815 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10816 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10817 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10818 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10820 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10821 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10822 fold_convert (newtype,
10823 TREE_OPERAND (arg0, 0)),
10824 fold_convert (newtype,
10825 TREE_OPERAND (arg0, 1)));
10827 return fold_build2 (code, type, newmod,
10828 fold_convert (newtype, arg1));
10831 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10832 C1 is a valid shift constant, and C2 is a power of two, i.e.
10833 a single bit. */
10834 if (TREE_CODE (arg0) == BIT_AND_EXPR
10835 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10836 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10837 == INTEGER_CST
10838 && integer_pow2p (TREE_OPERAND (arg0, 1))
10839 && integer_zerop (arg1))
10841 tree itype = TREE_TYPE (arg0);
10842 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10843 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10845 /* Check for a valid shift count. */
10846 if (TREE_INT_CST_HIGH (arg001) == 0
10847 && TREE_INT_CST_LOW (arg001) < prec)
10849 tree arg01 = TREE_OPERAND (arg0, 1);
10850 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10851 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10852 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10853 can be rewritten as (X & (C2 << C1)) != 0. */
10854 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10856 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10857 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10858 return fold_build2 (code, type, tem, arg1);
10860 /* Otherwise, for signed (arithmetic) shifts,
10861 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10862 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10863 else if (!TYPE_UNSIGNED (itype))
10864 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10865 arg000, build_int_cst (itype, 0));
10866 /* Otherwise, of unsigned (logical) shifts,
10867 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10868 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10869 else
10870 return omit_one_operand (type,
10871 code == EQ_EXPR ? integer_one_node
10872 : integer_zero_node,
10873 arg000);
10877 /* If this is an NE comparison of zero with an AND of one, remove the
10878 comparison since the AND will give the correct value. */
10879 if (code == NE_EXPR
10880 && integer_zerop (arg1)
10881 && TREE_CODE (arg0) == BIT_AND_EXPR
10882 && integer_onep (TREE_OPERAND (arg0, 1)))
10883 return fold_convert (type, arg0);
10885 /* If we have (A & C) == C where C is a power of 2, convert this into
10886 (A & C) != 0. Similarly for NE_EXPR. */
10887 if (TREE_CODE (arg0) == BIT_AND_EXPR
10888 && integer_pow2p (TREE_OPERAND (arg0, 1))
10889 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10890 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10891 arg0, fold_convert (TREE_TYPE (arg0),
10892 integer_zero_node));
10894 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10895 bit, then fold the expression into A < 0 or A >= 0. */
10896 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10897 if (tem)
10898 return tem;
10900 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10901 Similarly for NE_EXPR. */
10902 if (TREE_CODE (arg0) == BIT_AND_EXPR
10903 && TREE_CODE (arg1) == INTEGER_CST
10904 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10906 tree notc = fold_build1 (BIT_NOT_EXPR,
10907 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10908 TREE_OPERAND (arg0, 1));
10909 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10910 arg1, notc);
10911 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10912 if (integer_nonzerop (dandnotc))
10913 return omit_one_operand (type, rslt, arg0);
10916 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10917 Similarly for NE_EXPR. */
10918 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10919 && TREE_CODE (arg1) == INTEGER_CST
10920 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10922 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10923 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10924 TREE_OPERAND (arg0, 1), notd);
10925 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10926 if (integer_nonzerop (candnotd))
10927 return omit_one_operand (type, rslt, arg0);
10930 /* If this is a comparison of a field, we may be able to simplify it. */
10931 if (((TREE_CODE (arg0) == COMPONENT_REF
10932 && lang_hooks.can_use_bit_fields_p ())
10933 || TREE_CODE (arg0) == BIT_FIELD_REF)
10934 /* Handle the constant case even without -O
10935 to make sure the warnings are given. */
10936 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10938 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10939 if (t1)
10940 return t1;
10943 /* Optimize comparisons of strlen vs zero to a compare of the
10944 first character of the string vs zero. To wit,
10945 strlen(ptr) == 0 => *ptr == 0
10946 strlen(ptr) != 0 => *ptr != 0
10947 Other cases should reduce to one of these two (or a constant)
10948 due to the return value of strlen being unsigned. */
10949 if (TREE_CODE (arg0) == CALL_EXPR
10950 && integer_zerop (arg1))
10952 tree fndecl = get_callee_fndecl (arg0);
10953 tree arglist;
10955 if (fndecl
10956 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10957 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10958 && (arglist = TREE_OPERAND (arg0, 1))
10959 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10960 && ! TREE_CHAIN (arglist))
10962 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10963 return fold_build2 (code, type, iref,
10964 build_int_cst (TREE_TYPE (iref), 0));
10968 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10969 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10970 if (TREE_CODE (arg0) == RSHIFT_EXPR
10971 && integer_zerop (arg1)
10972 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10974 tree arg00 = TREE_OPERAND (arg0, 0);
10975 tree arg01 = TREE_OPERAND (arg0, 1);
10976 tree itype = TREE_TYPE (arg00);
10977 if (TREE_INT_CST_HIGH (arg01) == 0
10978 && TREE_INT_CST_LOW (arg01)
10979 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10981 if (TYPE_UNSIGNED (itype))
10983 itype = lang_hooks.types.signed_type (itype);
10984 arg00 = fold_convert (itype, arg00);
10986 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10987 type, arg00, build_int_cst (itype, 0));
10991 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10992 if (integer_zerop (arg1)
10993 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10994 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10995 TREE_OPERAND (arg0, 1));
10997 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10998 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10999 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11000 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11001 build_int_cst (TREE_TYPE (arg1), 0));
11002 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11003 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11004 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11005 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11006 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11007 build_int_cst (TREE_TYPE (arg1), 0));
11009 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11010 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11011 && TREE_CODE (arg1) == INTEGER_CST
11012 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11013 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11014 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11015 TREE_OPERAND (arg0, 1), arg1));
11017 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11018 (X & C) == 0 when C is a single bit. */
11019 if (TREE_CODE (arg0) == BIT_AND_EXPR
11020 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11021 && integer_zerop (arg1)
11022 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11024 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11025 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11026 TREE_OPERAND (arg0, 1));
11027 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11028 type, tem, arg1);
11031 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11032 constant C is a power of two, i.e. a single bit. */
11033 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11034 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11035 && integer_zerop (arg1)
11036 && integer_pow2p (TREE_OPERAND (arg0, 1))
11037 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11038 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11040 tree arg00 = TREE_OPERAND (arg0, 0);
11041 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11042 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11045 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11046 when is C is a power of two, i.e. a single bit. */
11047 if (TREE_CODE (arg0) == BIT_AND_EXPR
11048 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11049 && integer_zerop (arg1)
11050 && integer_pow2p (TREE_OPERAND (arg0, 1))
11051 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11052 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11054 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11055 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11056 arg000, TREE_OPERAND (arg0, 1));
11057 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11058 tem, build_int_cst (TREE_TYPE (tem), 0));
11061 if (integer_zerop (arg1)
11062 && tree_expr_nonzero_p (arg0))
11064 tree res = constant_boolean_node (code==NE_EXPR, type);
11065 return omit_one_operand (type, res, arg0);
11068 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11069 if (TREE_CODE (arg0) == NEGATE_EXPR
11070 && TREE_CODE (arg1) == NEGATE_EXPR)
11071 return fold_build2 (code, type,
11072 TREE_OPERAND (arg0, 0),
11073 TREE_OPERAND (arg1, 0));
11075 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11076 if (TREE_CODE (arg0) == BIT_AND_EXPR
11077 && TREE_CODE (arg1) == BIT_AND_EXPR)
11079 tree arg00 = TREE_OPERAND (arg0, 0);
11080 tree arg01 = TREE_OPERAND (arg0, 1);
11081 tree arg10 = TREE_OPERAND (arg1, 0);
11082 tree arg11 = TREE_OPERAND (arg1, 1);
11083 tree itype = TREE_TYPE (arg0);
11085 if (operand_equal_p (arg01, arg11, 0))
11086 return fold_build2 (code, type,
11087 fold_build2 (BIT_AND_EXPR, itype,
11088 fold_build2 (BIT_XOR_EXPR, itype,
11089 arg00, arg10),
11090 arg01),
11091 build_int_cst (itype, 0));
11093 if (operand_equal_p (arg01, arg10, 0))
11094 return fold_build2 (code, type,
11095 fold_build2 (BIT_AND_EXPR, itype,
11096 fold_build2 (BIT_XOR_EXPR, itype,
11097 arg00, arg11),
11098 arg01),
11099 build_int_cst (itype, 0));
11101 if (operand_equal_p (arg00, arg11, 0))
11102 return fold_build2 (code, type,
11103 fold_build2 (BIT_AND_EXPR, itype,
11104 fold_build2 (BIT_XOR_EXPR, itype,
11105 arg01, arg10),
11106 arg00),
11107 build_int_cst (itype, 0));
11109 if (operand_equal_p (arg00, arg10, 0))
11110 return fold_build2 (code, type,
11111 fold_build2 (BIT_AND_EXPR, itype,
11112 fold_build2 (BIT_XOR_EXPR, itype,
11113 arg01, arg11),
11114 arg00),
11115 build_int_cst (itype, 0));
11118 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11119 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11121 tree arg00 = TREE_OPERAND (arg0, 0);
11122 tree arg01 = TREE_OPERAND (arg0, 1);
11123 tree arg10 = TREE_OPERAND (arg1, 0);
11124 tree arg11 = TREE_OPERAND (arg1, 1);
11125 tree itype = TREE_TYPE (arg0);
11127 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11128 operand_equal_p guarantees no side-effects so we don't need
11129 to use omit_one_operand on Z. */
11130 if (operand_equal_p (arg01, arg11, 0))
11131 return fold_build2 (code, type, arg00, arg10);
11132 if (operand_equal_p (arg01, arg10, 0))
11133 return fold_build2 (code, type, arg00, arg11);
11134 if (operand_equal_p (arg00, arg11, 0))
11135 return fold_build2 (code, type, arg01, arg10);
11136 if (operand_equal_p (arg00, arg10, 0))
11137 return fold_build2 (code, type, arg01, arg11);
11139 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11140 if (TREE_CODE (arg01) == INTEGER_CST
11141 && TREE_CODE (arg11) == INTEGER_CST)
11142 return fold_build2 (code, type,
11143 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11144 fold_build2 (BIT_XOR_EXPR, itype,
11145 arg01, arg11)),
11146 arg10);
11148 return NULL_TREE;
11150 case LT_EXPR:
11151 case GT_EXPR:
11152 case LE_EXPR:
11153 case GE_EXPR:
11154 tem = fold_comparison (code, type, op0, op1);
11155 if (tem != NULL_TREE)
11156 return tem;
11158 /* Transform comparisons of the form X +- C CMP X. */
11159 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11160 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11161 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11162 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11163 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11164 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
11165 && !(flag_wrapv || flag_trapv))))
11167 tree arg01 = TREE_OPERAND (arg0, 1);
11168 enum tree_code code0 = TREE_CODE (arg0);
11169 int is_positive;
11171 if (TREE_CODE (arg01) == REAL_CST)
11172 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11173 else
11174 is_positive = tree_int_cst_sgn (arg01);
11176 /* (X - c) > X becomes false. */
11177 if (code == GT_EXPR
11178 && ((code0 == MINUS_EXPR && is_positive >= 0)
11179 || (code0 == PLUS_EXPR && is_positive <= 0)))
11180 return constant_boolean_node (0, type);
11182 /* Likewise (X + c) < X becomes false. */
11183 if (code == LT_EXPR
11184 && ((code0 == PLUS_EXPR && is_positive >= 0)
11185 || (code0 == MINUS_EXPR && is_positive <= 0)))
11186 return constant_boolean_node (0, type);
11188 /* Convert (X - c) <= X to true. */
11189 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11190 && code == LE_EXPR
11191 && ((code0 == MINUS_EXPR && is_positive >= 0)
11192 || (code0 == PLUS_EXPR && is_positive <= 0)))
11193 return constant_boolean_node (1, type);
11195 /* Convert (X + c) >= X to true. */
11196 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11197 && code == GE_EXPR
11198 && ((code0 == PLUS_EXPR && is_positive >= 0)
11199 || (code0 == MINUS_EXPR && is_positive <= 0)))
11200 return constant_boolean_node (1, type);
11202 if (TREE_CODE (arg01) == INTEGER_CST)
11204 /* Convert X + c > X and X - c < X to true for integers. */
11205 if (code == GT_EXPR
11206 && ((code0 == PLUS_EXPR && is_positive > 0)
11207 || (code0 == MINUS_EXPR && is_positive < 0)))
11208 return constant_boolean_node (1, type);
11210 if (code == LT_EXPR
11211 && ((code0 == MINUS_EXPR && is_positive > 0)
11212 || (code0 == PLUS_EXPR && is_positive < 0)))
11213 return constant_boolean_node (1, type);
11215 /* Convert X + c <= X and X - c >= X to false for integers. */
11216 if (code == LE_EXPR
11217 && ((code0 == PLUS_EXPR && is_positive > 0)
11218 || (code0 == MINUS_EXPR && is_positive < 0)))
11219 return constant_boolean_node (0, type);
11221 if (code == GE_EXPR
11222 && ((code0 == MINUS_EXPR && is_positive > 0)
11223 || (code0 == PLUS_EXPR && is_positive < 0)))
11224 return constant_boolean_node (0, type);
11228 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11229 This transformation affects the cases which are handled in later
11230 optimizations involving comparisons with non-negative constants. */
11231 if (TREE_CODE (arg1) == INTEGER_CST
11232 && TREE_CODE (arg0) != INTEGER_CST
11233 && tree_int_cst_sgn (arg1) > 0)
11235 if (code == GE_EXPR)
11237 arg1 = const_binop (MINUS_EXPR, arg1,
11238 build_int_cst (TREE_TYPE (arg1), 1), 0);
11239 return fold_build2 (GT_EXPR, type, arg0,
11240 fold_convert (TREE_TYPE (arg0), arg1));
11242 if (code == LT_EXPR)
11244 arg1 = const_binop (MINUS_EXPR, arg1,
11245 build_int_cst (TREE_TYPE (arg1), 1), 0);
11246 return fold_build2 (LE_EXPR, type, arg0,
11247 fold_convert (TREE_TYPE (arg0), arg1));
11251 /* Comparisons with the highest or lowest possible integer of
11252 the specified precision will have known values. */
11254 tree arg1_type = TREE_TYPE (arg1);
11255 unsigned int width = TYPE_PRECISION (arg1_type);
11257 if (TREE_CODE (arg1) == INTEGER_CST
11258 && ! TREE_CONSTANT_OVERFLOW (arg1)
11259 && width <= 2 * HOST_BITS_PER_WIDE_INT
11260 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11262 HOST_WIDE_INT signed_max_hi;
11263 unsigned HOST_WIDE_INT signed_max_lo;
11264 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11266 if (width <= HOST_BITS_PER_WIDE_INT)
11268 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11269 - 1;
11270 signed_max_hi = 0;
11271 max_hi = 0;
11273 if (TYPE_UNSIGNED (arg1_type))
11275 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11276 min_lo = 0;
11277 min_hi = 0;
11279 else
11281 max_lo = signed_max_lo;
11282 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11283 min_hi = -1;
11286 else
11288 width -= HOST_BITS_PER_WIDE_INT;
11289 signed_max_lo = -1;
11290 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11291 - 1;
11292 max_lo = -1;
11293 min_lo = 0;
11295 if (TYPE_UNSIGNED (arg1_type))
11297 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11298 min_hi = 0;
11300 else
11302 max_hi = signed_max_hi;
11303 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11307 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11308 && TREE_INT_CST_LOW (arg1) == max_lo)
11309 switch (code)
11311 case GT_EXPR:
11312 return omit_one_operand (type, integer_zero_node, arg0);
11314 case GE_EXPR:
11315 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11317 case LE_EXPR:
11318 return omit_one_operand (type, integer_one_node, arg0);
11320 case LT_EXPR:
11321 return fold_build2 (NE_EXPR, type, arg0, arg1);
11323 /* The GE_EXPR and LT_EXPR cases above are not normally
11324 reached because of previous transformations. */
11326 default:
11327 break;
11329 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11330 == max_hi
11331 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11332 switch (code)
11334 case GT_EXPR:
11335 arg1 = const_binop (PLUS_EXPR, arg1,
11336 build_int_cst (TREE_TYPE (arg1), 1), 0);
11337 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11338 case LE_EXPR:
11339 arg1 = const_binop (PLUS_EXPR, arg1,
11340 build_int_cst (TREE_TYPE (arg1), 1), 0);
11341 return fold_build2 (NE_EXPR, type, arg0, arg1);
11342 default:
11343 break;
11345 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11346 == min_hi
11347 && TREE_INT_CST_LOW (arg1) == min_lo)
11348 switch (code)
11350 case LT_EXPR:
11351 return omit_one_operand (type, integer_zero_node, arg0);
11353 case LE_EXPR:
11354 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11356 case GE_EXPR:
11357 return omit_one_operand (type, integer_one_node, arg0);
11359 case GT_EXPR:
11360 return fold_build2 (NE_EXPR, type, op0, op1);
11362 default:
11363 break;
11365 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11366 == min_hi
11367 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11368 switch (code)
11370 case GE_EXPR:
11371 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11372 return fold_build2 (NE_EXPR, type, arg0, arg1);
11373 case LT_EXPR:
11374 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11375 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11376 default:
11377 break;
11380 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11381 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11382 && TYPE_UNSIGNED (arg1_type)
11383 /* We will flip the signedness of the comparison operator
11384 associated with the mode of arg1, so the sign bit is
11385 specified by this mode. Check that arg1 is the signed
11386 max associated with this sign bit. */
11387 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11388 /* signed_type does not work on pointer types. */
11389 && INTEGRAL_TYPE_P (arg1_type))
11391 /* The following case also applies to X < signed_max+1
11392 and X >= signed_max+1 because previous transformations. */
11393 if (code == LE_EXPR || code == GT_EXPR)
11395 tree st0, st1;
11396 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11397 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11398 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11399 type, fold_convert (st0, arg0),
11400 build_int_cst (st1, 0));
11406 /* If we are comparing an ABS_EXPR with a constant, we can
11407 convert all the cases into explicit comparisons, but they may
11408 well not be faster than doing the ABS and one comparison.
11409 But ABS (X) <= C is a range comparison, which becomes a subtraction
11410 and a comparison, and is probably faster. */
11411 if (code == LE_EXPR
11412 && TREE_CODE (arg1) == INTEGER_CST
11413 && TREE_CODE (arg0) == ABS_EXPR
11414 && ! TREE_SIDE_EFFECTS (arg0)
11415 && (0 != (tem = negate_expr (arg1)))
11416 && TREE_CODE (tem) == INTEGER_CST
11417 && ! TREE_CONSTANT_OVERFLOW (tem))
11418 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11419 build2 (GE_EXPR, type,
11420 TREE_OPERAND (arg0, 0), tem),
11421 build2 (LE_EXPR, type,
11422 TREE_OPERAND (arg0, 0), arg1));
11424 /* Convert ABS_EXPR<x> >= 0 to true. */
11425 if (code == GE_EXPR
11426 && tree_expr_nonnegative_p (arg0)
11427 && (integer_zerop (arg1)
11428 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11429 && real_zerop (arg1))))
11430 return omit_one_operand (type, integer_one_node, arg0);
11432 /* Convert ABS_EXPR<x> < 0 to false. */
11433 if (code == LT_EXPR
11434 && tree_expr_nonnegative_p (arg0)
11435 && (integer_zerop (arg1) || real_zerop (arg1)))
11436 return omit_one_operand (type, integer_zero_node, arg0);
11438 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11439 and similarly for >= into !=. */
11440 if ((code == LT_EXPR || code == GE_EXPR)
11441 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11442 && TREE_CODE (arg1) == LSHIFT_EXPR
11443 && integer_onep (TREE_OPERAND (arg1, 0)))
11444 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11445 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11446 TREE_OPERAND (arg1, 1)),
11447 build_int_cst (TREE_TYPE (arg0), 0));
11449 if ((code == LT_EXPR || code == GE_EXPR)
11450 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11451 && (TREE_CODE (arg1) == NOP_EXPR
11452 || TREE_CODE (arg1) == CONVERT_EXPR)
11453 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11454 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11455 return
11456 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11457 fold_convert (TREE_TYPE (arg0),
11458 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11459 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11460 1))),
11461 build_int_cst (TREE_TYPE (arg0), 0));
11463 return NULL_TREE;
11465 case UNORDERED_EXPR:
11466 case ORDERED_EXPR:
11467 case UNLT_EXPR:
11468 case UNLE_EXPR:
11469 case UNGT_EXPR:
11470 case UNGE_EXPR:
11471 case UNEQ_EXPR:
11472 case LTGT_EXPR:
11473 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11475 t1 = fold_relational_const (code, type, arg0, arg1);
11476 if (t1 != NULL_TREE)
11477 return t1;
11480 /* If the first operand is NaN, the result is constant. */
11481 if (TREE_CODE (arg0) == REAL_CST
11482 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11483 && (code != LTGT_EXPR || ! flag_trapping_math))
11485 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11486 ? integer_zero_node
11487 : integer_one_node;
11488 return omit_one_operand (type, t1, arg1);
11491 /* If the second operand is NaN, the result is constant. */
11492 if (TREE_CODE (arg1) == REAL_CST
11493 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11494 && (code != LTGT_EXPR || ! flag_trapping_math))
11496 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11497 ? integer_zero_node
11498 : integer_one_node;
11499 return omit_one_operand (type, t1, arg0);
11502 /* Simplify unordered comparison of something with itself. */
11503 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11504 && operand_equal_p (arg0, arg1, 0))
11505 return constant_boolean_node (1, type);
11507 if (code == LTGT_EXPR
11508 && !flag_trapping_math
11509 && operand_equal_p (arg0, arg1, 0))
11510 return constant_boolean_node (0, type);
11512 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11514 tree targ0 = strip_float_extensions (arg0);
11515 tree targ1 = strip_float_extensions (arg1);
11516 tree newtype = TREE_TYPE (targ0);
11518 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11519 newtype = TREE_TYPE (targ1);
11521 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11522 return fold_build2 (code, type, fold_convert (newtype, targ0),
11523 fold_convert (newtype, targ1));
11526 return NULL_TREE;
11528 case COMPOUND_EXPR:
11529 /* When pedantic, a compound expression can be neither an lvalue
11530 nor an integer constant expression. */
11531 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11532 return NULL_TREE;
11533 /* Don't let (0, 0) be null pointer constant. */
11534 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11535 : fold_convert (type, arg1);
11536 return pedantic_non_lvalue (tem);
11538 case COMPLEX_EXPR:
11539 if ((TREE_CODE (arg0) == REAL_CST
11540 && TREE_CODE (arg1) == REAL_CST)
11541 || (TREE_CODE (arg0) == INTEGER_CST
11542 && TREE_CODE (arg1) == INTEGER_CST))
11543 return build_complex (type, arg0, arg1);
11544 return NULL_TREE;
11546 case ASSERT_EXPR:
11547 /* An ASSERT_EXPR should never be passed to fold_binary. */
11548 gcc_unreachable ();
11550 default:
11551 return NULL_TREE;
11552 } /* switch (code) */
11555 /* Callback for walk_tree, looking for LABEL_EXPR.
11556 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11557 Do not check the sub-tree of GOTO_EXPR. */
11559 static tree
11560 contains_label_1 (tree *tp,
11561 int *walk_subtrees,
11562 void *data ATTRIBUTE_UNUSED)
11564 switch (TREE_CODE (*tp))
11566 case LABEL_EXPR:
11567 return *tp;
11568 case GOTO_EXPR:
11569 *walk_subtrees = 0;
11570 /* no break */
11571 default:
11572 return NULL_TREE;
11576 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11577 accessible from outside the sub-tree. Returns NULL_TREE if no
11578 addressable label is found. */
11580 static bool
11581 contains_label_p (tree st)
11583 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11586 /* Fold a ternary expression of code CODE and type TYPE with operands
11587 OP0, OP1, and OP2. Return the folded expression if folding is
11588 successful. Otherwise, return NULL_TREE. */
11590 tree
11591 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11593 tree tem;
11594 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11595 enum tree_code_class kind = TREE_CODE_CLASS (code);
11597 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11598 && TREE_CODE_LENGTH (code) == 3);
11600 /* Strip any conversions that don't change the mode. This is safe
11601 for every expression, except for a comparison expression because
11602 its signedness is derived from its operands. So, in the latter
11603 case, only strip conversions that don't change the signedness.
11605 Note that this is done as an internal manipulation within the
11606 constant folder, in order to find the simplest representation of
11607 the arguments so that their form can be studied. In any cases,
11608 the appropriate type conversions should be put back in the tree
11609 that will get out of the constant folder. */
11610 if (op0)
11612 arg0 = op0;
11613 STRIP_NOPS (arg0);
11616 if (op1)
11618 arg1 = op1;
11619 STRIP_NOPS (arg1);
11622 switch (code)
11624 case COMPONENT_REF:
11625 if (TREE_CODE (arg0) == CONSTRUCTOR
11626 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11628 unsigned HOST_WIDE_INT idx;
11629 tree field, value;
11630 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11631 if (field == arg1)
11632 return value;
11634 return NULL_TREE;
11636 case COND_EXPR:
11637 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11638 so all simple results must be passed through pedantic_non_lvalue. */
11639 if (TREE_CODE (arg0) == INTEGER_CST)
11641 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11642 tem = integer_zerop (arg0) ? op2 : op1;
11643 /* Only optimize constant conditions when the selected branch
11644 has the same type as the COND_EXPR. This avoids optimizing
11645 away "c ? x : throw", where the throw has a void type.
11646 Avoid throwing away that operand which contains label. */
11647 if ((!TREE_SIDE_EFFECTS (unused_op)
11648 || !contains_label_p (unused_op))
11649 && (! VOID_TYPE_P (TREE_TYPE (tem))
11650 || VOID_TYPE_P (type)))
11651 return pedantic_non_lvalue (tem);
11652 return NULL_TREE;
11654 if (operand_equal_p (arg1, op2, 0))
11655 return pedantic_omit_one_operand (type, arg1, arg0);
11657 /* If we have A op B ? A : C, we may be able to convert this to a
11658 simpler expression, depending on the operation and the values
11659 of B and C. Signed zeros prevent all of these transformations,
11660 for reasons given above each one.
11662 Also try swapping the arguments and inverting the conditional. */
11663 if (COMPARISON_CLASS_P (arg0)
11664 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11665 arg1, TREE_OPERAND (arg0, 1))
11666 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11668 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11669 if (tem)
11670 return tem;
11673 if (COMPARISON_CLASS_P (arg0)
11674 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11675 op2,
11676 TREE_OPERAND (arg0, 1))
11677 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11679 tem = fold_truth_not_expr (arg0);
11680 if (tem && COMPARISON_CLASS_P (tem))
11682 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11683 if (tem)
11684 return tem;
11688 /* If the second operand is simpler than the third, swap them
11689 since that produces better jump optimization results. */
11690 if (truth_value_p (TREE_CODE (arg0))
11691 && tree_swap_operands_p (op1, op2, false))
11693 /* See if this can be inverted. If it can't, possibly because
11694 it was a floating-point inequality comparison, don't do
11695 anything. */
11696 tem = fold_truth_not_expr (arg0);
11697 if (tem)
11698 return fold_build3 (code, type, tem, op2, op1);
11701 /* Convert A ? 1 : 0 to simply A. */
11702 if (integer_onep (op1)
11703 && integer_zerop (op2)
11704 /* If we try to convert OP0 to our type, the
11705 call to fold will try to move the conversion inside
11706 a COND, which will recurse. In that case, the COND_EXPR
11707 is probably the best choice, so leave it alone. */
11708 && type == TREE_TYPE (arg0))
11709 return pedantic_non_lvalue (arg0);
11711 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11712 over COND_EXPR in cases such as floating point comparisons. */
11713 if (integer_zerop (op1)
11714 && integer_onep (op2)
11715 && truth_value_p (TREE_CODE (arg0)))
11716 return pedantic_non_lvalue (fold_convert (type,
11717 invert_truthvalue (arg0)));
11719 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11720 if (TREE_CODE (arg0) == LT_EXPR
11721 && integer_zerop (TREE_OPERAND (arg0, 1))
11722 && integer_zerop (op2)
11723 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11725 /* sign_bit_p only checks ARG1 bits within A's precision.
11726 If <sign bit of A> has wider type than A, bits outside
11727 of A's precision in <sign bit of A> need to be checked.
11728 If they are all 0, this optimization needs to be done
11729 in unsigned A's type, if they are all 1 in signed A's type,
11730 otherwise this can't be done. */
11731 if (TYPE_PRECISION (TREE_TYPE (tem))
11732 < TYPE_PRECISION (TREE_TYPE (arg1))
11733 && TYPE_PRECISION (TREE_TYPE (tem))
11734 < TYPE_PRECISION (type))
11736 unsigned HOST_WIDE_INT mask_lo;
11737 HOST_WIDE_INT mask_hi;
11738 int inner_width, outer_width;
11739 tree tem_type;
11741 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11742 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11743 if (outer_width > TYPE_PRECISION (type))
11744 outer_width = TYPE_PRECISION (type);
11746 if (outer_width > HOST_BITS_PER_WIDE_INT)
11748 mask_hi = ((unsigned HOST_WIDE_INT) -1
11749 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11750 mask_lo = -1;
11752 else
11754 mask_hi = 0;
11755 mask_lo = ((unsigned HOST_WIDE_INT) -1
11756 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11758 if (inner_width > HOST_BITS_PER_WIDE_INT)
11760 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11761 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11762 mask_lo = 0;
11764 else
11765 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11766 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11768 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11769 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11771 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11772 tem = fold_convert (tem_type, tem);
11774 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11775 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11777 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11778 tem = fold_convert (tem_type, tem);
11780 else
11781 tem = NULL;
11784 if (tem)
11785 return fold_convert (type,
11786 fold_build2 (BIT_AND_EXPR,
11787 TREE_TYPE (tem), tem,
11788 fold_convert (TREE_TYPE (tem),
11789 arg1)));
11792 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11793 already handled above. */
11794 if (TREE_CODE (arg0) == BIT_AND_EXPR
11795 && integer_onep (TREE_OPERAND (arg0, 1))
11796 && integer_zerop (op2)
11797 && integer_pow2p (arg1))
11799 tree tem = TREE_OPERAND (arg0, 0);
11800 STRIP_NOPS (tem);
11801 if (TREE_CODE (tem) == RSHIFT_EXPR
11802 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11803 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11804 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11805 return fold_build2 (BIT_AND_EXPR, type,
11806 TREE_OPERAND (tem, 0), arg1);
11809 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11810 is probably obsolete because the first operand should be a
11811 truth value (that's why we have the two cases above), but let's
11812 leave it in until we can confirm this for all front-ends. */
11813 if (integer_zerop (op2)
11814 && TREE_CODE (arg0) == NE_EXPR
11815 && integer_zerop (TREE_OPERAND (arg0, 1))
11816 && integer_pow2p (arg1)
11817 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11818 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11819 arg1, OEP_ONLY_CONST))
11820 return pedantic_non_lvalue (fold_convert (type,
11821 TREE_OPERAND (arg0, 0)));
11823 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11824 if (integer_zerop (op2)
11825 && truth_value_p (TREE_CODE (arg0))
11826 && truth_value_p (TREE_CODE (arg1)))
11827 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11828 fold_convert (type, arg0),
11829 arg1);
11831 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11832 if (integer_onep (op2)
11833 && truth_value_p (TREE_CODE (arg0))
11834 && truth_value_p (TREE_CODE (arg1)))
11836 /* Only perform transformation if ARG0 is easily inverted. */
11837 tem = fold_truth_not_expr (arg0);
11838 if (tem)
11839 return fold_build2 (TRUTH_ORIF_EXPR, type,
11840 fold_convert (type, tem),
11841 arg1);
11844 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11845 if (integer_zerop (arg1)
11846 && truth_value_p (TREE_CODE (arg0))
11847 && truth_value_p (TREE_CODE (op2)))
11849 /* Only perform transformation if ARG0 is easily inverted. */
11850 tem = fold_truth_not_expr (arg0);
11851 if (tem)
11852 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11853 fold_convert (type, tem),
11854 op2);
11857 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11858 if (integer_onep (arg1)
11859 && truth_value_p (TREE_CODE (arg0))
11860 && truth_value_p (TREE_CODE (op2)))
11861 return fold_build2 (TRUTH_ORIF_EXPR, type,
11862 fold_convert (type, arg0),
11863 op2);
11865 return NULL_TREE;
11867 case CALL_EXPR:
11868 /* Check for a built-in function. */
11869 if (TREE_CODE (op0) == ADDR_EXPR
11870 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11871 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11872 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11873 return NULL_TREE;
11875 case BIT_FIELD_REF:
11876 if (TREE_CODE (arg0) == VECTOR_CST
11877 && type == TREE_TYPE (TREE_TYPE (arg0))
11878 && host_integerp (arg1, 1)
11879 && host_integerp (op2, 1))
11881 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11882 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11884 if (width != 0
11885 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11886 && (idx % width) == 0
11887 && (idx = idx / width)
11888 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11890 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11891 while (idx-- > 0 && elements)
11892 elements = TREE_CHAIN (elements);
11893 if (elements)
11894 return TREE_VALUE (elements);
11895 else
11896 return fold_convert (type, integer_zero_node);
11899 return NULL_TREE;
11901 default:
11902 return NULL_TREE;
11903 } /* switch (code) */
11906 /* Perform constant folding and related simplification of EXPR.
11907 The related simplifications include x*1 => x, x*0 => 0, etc.,
11908 and application of the associative law.
11909 NOP_EXPR conversions may be removed freely (as long as we
11910 are careful not to change the type of the overall expression).
11911 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11912 but we can constant-fold them if they have constant operands. */
11914 #ifdef ENABLE_FOLD_CHECKING
11915 # define fold(x) fold_1 (x)
11916 static tree fold_1 (tree);
11917 static
11918 #endif
11919 tree
11920 fold (tree expr)
11922 const tree t = expr;
11923 enum tree_code code = TREE_CODE (t);
11924 enum tree_code_class kind = TREE_CODE_CLASS (code);
11925 tree tem;
11927 /* Return right away if a constant. */
11928 if (kind == tcc_constant)
11929 return t;
11931 if (IS_EXPR_CODE_CLASS (kind)
11932 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11934 tree type = TREE_TYPE (t);
11935 tree op0, op1, op2;
11937 switch (TREE_CODE_LENGTH (code))
11939 case 1:
11940 op0 = TREE_OPERAND (t, 0);
11941 tem = fold_unary (code, type, op0);
11942 return tem ? tem : expr;
11943 case 2:
11944 op0 = TREE_OPERAND (t, 0);
11945 op1 = TREE_OPERAND (t, 1);
11946 tem = fold_binary (code, type, op0, op1);
11947 return tem ? tem : expr;
11948 case 3:
11949 op0 = TREE_OPERAND (t, 0);
11950 op1 = TREE_OPERAND (t, 1);
11951 op2 = TREE_OPERAND (t, 2);
11952 tem = fold_ternary (code, type, op0, op1, op2);
11953 return tem ? tem : expr;
11954 default:
11955 break;
11959 switch (code)
11961 case CONST_DECL:
11962 return fold (DECL_INITIAL (t));
11964 default:
11965 return t;
11966 } /* switch (code) */
11969 #ifdef ENABLE_FOLD_CHECKING
11970 #undef fold
11972 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11973 static void fold_check_failed (tree, tree);
11974 void print_fold_checksum (tree);
11976 /* When --enable-checking=fold, compute a digest of expr before
11977 and after actual fold call to see if fold did not accidentally
11978 change original expr. */
11980 tree
11981 fold (tree expr)
11983 tree ret;
11984 struct md5_ctx ctx;
11985 unsigned char checksum_before[16], checksum_after[16];
11986 htab_t ht;
11988 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11989 md5_init_ctx (&ctx);
11990 fold_checksum_tree (expr, &ctx, ht);
11991 md5_finish_ctx (&ctx, checksum_before);
11992 htab_empty (ht);
11994 ret = fold_1 (expr);
11996 md5_init_ctx (&ctx);
11997 fold_checksum_tree (expr, &ctx, ht);
11998 md5_finish_ctx (&ctx, checksum_after);
11999 htab_delete (ht);
12001 if (memcmp (checksum_before, checksum_after, 16))
12002 fold_check_failed (expr, ret);
12004 return ret;
12007 void
12008 print_fold_checksum (tree expr)
12010 struct md5_ctx ctx;
12011 unsigned char checksum[16], cnt;
12012 htab_t ht;
12014 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12015 md5_init_ctx (&ctx);
12016 fold_checksum_tree (expr, &ctx, ht);
12017 md5_finish_ctx (&ctx, checksum);
12018 htab_delete (ht);
12019 for (cnt = 0; cnt < 16; ++cnt)
12020 fprintf (stderr, "%02x", checksum[cnt]);
12021 putc ('\n', stderr);
12024 static void
12025 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12027 internal_error ("fold check: original tree changed by fold");
12030 static void
12031 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12033 void **slot;
12034 enum tree_code code;
12035 struct tree_function_decl buf;
12036 int i, len;
12038 recursive_label:
12040 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12041 <= sizeof (struct tree_function_decl))
12042 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12043 if (expr == NULL)
12044 return;
12045 slot = htab_find_slot (ht, expr, INSERT);
12046 if (*slot != NULL)
12047 return;
12048 *slot = expr;
12049 code = TREE_CODE (expr);
12050 if (TREE_CODE_CLASS (code) == tcc_declaration
12051 && DECL_ASSEMBLER_NAME_SET_P (expr))
12053 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12054 memcpy ((char *) &buf, expr, tree_size (expr));
12055 expr = (tree) &buf;
12056 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12058 else if (TREE_CODE_CLASS (code) == tcc_type
12059 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12060 || TYPE_CACHED_VALUES_P (expr)
12061 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12063 /* Allow these fields to be modified. */
12064 memcpy ((char *) &buf, expr, tree_size (expr));
12065 expr = (tree) &buf;
12066 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12067 TYPE_POINTER_TO (expr) = NULL;
12068 TYPE_REFERENCE_TO (expr) = NULL;
12069 if (TYPE_CACHED_VALUES_P (expr))
12071 TYPE_CACHED_VALUES_P (expr) = 0;
12072 TYPE_CACHED_VALUES (expr) = NULL;
12075 md5_process_bytes (expr, tree_size (expr), ctx);
12076 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12077 if (TREE_CODE_CLASS (code) != tcc_type
12078 && TREE_CODE_CLASS (code) != tcc_declaration
12079 && code != TREE_LIST)
12080 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12081 switch (TREE_CODE_CLASS (code))
12083 case tcc_constant:
12084 switch (code)
12086 case STRING_CST:
12087 md5_process_bytes (TREE_STRING_POINTER (expr),
12088 TREE_STRING_LENGTH (expr), ctx);
12089 break;
12090 case COMPLEX_CST:
12091 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12092 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12093 break;
12094 case VECTOR_CST:
12095 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12096 break;
12097 default:
12098 break;
12100 break;
12101 case tcc_exceptional:
12102 switch (code)
12104 case TREE_LIST:
12105 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12106 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12107 expr = TREE_CHAIN (expr);
12108 goto recursive_label;
12109 break;
12110 case TREE_VEC:
12111 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12112 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12113 break;
12114 default:
12115 break;
12117 break;
12118 case tcc_expression:
12119 case tcc_reference:
12120 case tcc_comparison:
12121 case tcc_unary:
12122 case tcc_binary:
12123 case tcc_statement:
12124 len = TREE_CODE_LENGTH (code);
12125 for (i = 0; i < len; ++i)
12126 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12127 break;
12128 case tcc_declaration:
12129 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12130 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12131 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12133 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12134 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12135 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12136 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12137 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12139 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12140 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12142 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12144 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12145 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12146 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12148 break;
12149 case tcc_type:
12150 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12151 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12152 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12153 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12154 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12155 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12156 if (INTEGRAL_TYPE_P (expr)
12157 || SCALAR_FLOAT_TYPE_P (expr))
12159 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12160 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12162 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12163 if (TREE_CODE (expr) == RECORD_TYPE
12164 || TREE_CODE (expr) == UNION_TYPE
12165 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12166 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12167 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12168 break;
12169 default:
12170 break;
12174 #endif
12176 /* Fold a unary tree expression with code CODE of type TYPE with an
12177 operand OP0. Return a folded expression if successful. Otherwise,
12178 return a tree expression with code CODE of type TYPE with an
12179 operand OP0. */
12181 tree
12182 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12184 tree tem;
12185 #ifdef ENABLE_FOLD_CHECKING
12186 unsigned char checksum_before[16], checksum_after[16];
12187 struct md5_ctx ctx;
12188 htab_t ht;
12190 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12191 md5_init_ctx (&ctx);
12192 fold_checksum_tree (op0, &ctx, ht);
12193 md5_finish_ctx (&ctx, checksum_before);
12194 htab_empty (ht);
12195 #endif
12197 tem = fold_unary (code, type, op0);
12198 if (!tem)
12199 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12201 #ifdef ENABLE_FOLD_CHECKING
12202 md5_init_ctx (&ctx);
12203 fold_checksum_tree (op0, &ctx, ht);
12204 md5_finish_ctx (&ctx, checksum_after);
12205 htab_delete (ht);
12207 if (memcmp (checksum_before, checksum_after, 16))
12208 fold_check_failed (op0, tem);
12209 #endif
12210 return tem;
12213 /* Fold a binary tree expression with code CODE of type TYPE with
12214 operands OP0 and OP1. Return a folded expression if successful.
12215 Otherwise, return a tree expression with code CODE of type TYPE
12216 with operands OP0 and OP1. */
12218 tree
12219 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12220 MEM_STAT_DECL)
12222 tree tem;
12223 #ifdef ENABLE_FOLD_CHECKING
12224 unsigned char checksum_before_op0[16],
12225 checksum_before_op1[16],
12226 checksum_after_op0[16],
12227 checksum_after_op1[16];
12228 struct md5_ctx ctx;
12229 htab_t ht;
12231 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12232 md5_init_ctx (&ctx);
12233 fold_checksum_tree (op0, &ctx, ht);
12234 md5_finish_ctx (&ctx, checksum_before_op0);
12235 htab_empty (ht);
12237 md5_init_ctx (&ctx);
12238 fold_checksum_tree (op1, &ctx, ht);
12239 md5_finish_ctx (&ctx, checksum_before_op1);
12240 htab_empty (ht);
12241 #endif
12243 tem = fold_binary (code, type, op0, op1);
12244 if (!tem)
12245 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12247 #ifdef ENABLE_FOLD_CHECKING
12248 md5_init_ctx (&ctx);
12249 fold_checksum_tree (op0, &ctx, ht);
12250 md5_finish_ctx (&ctx, checksum_after_op0);
12251 htab_empty (ht);
12253 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12254 fold_check_failed (op0, tem);
12256 md5_init_ctx (&ctx);
12257 fold_checksum_tree (op1, &ctx, ht);
12258 md5_finish_ctx (&ctx, checksum_after_op1);
12259 htab_delete (ht);
12261 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12262 fold_check_failed (op1, tem);
12263 #endif
12264 return tem;
12267 /* Fold a ternary tree expression with code CODE of type TYPE with
12268 operands OP0, OP1, and OP2. Return a folded expression if
12269 successful. Otherwise, return a tree expression with code CODE of
12270 type TYPE with operands OP0, OP1, and OP2. */
12272 tree
12273 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12274 MEM_STAT_DECL)
12276 tree tem;
12277 #ifdef ENABLE_FOLD_CHECKING
12278 unsigned char checksum_before_op0[16],
12279 checksum_before_op1[16],
12280 checksum_before_op2[16],
12281 checksum_after_op0[16],
12282 checksum_after_op1[16],
12283 checksum_after_op2[16];
12284 struct md5_ctx ctx;
12285 htab_t ht;
12287 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12288 md5_init_ctx (&ctx);
12289 fold_checksum_tree (op0, &ctx, ht);
12290 md5_finish_ctx (&ctx, checksum_before_op0);
12291 htab_empty (ht);
12293 md5_init_ctx (&ctx);
12294 fold_checksum_tree (op1, &ctx, ht);
12295 md5_finish_ctx (&ctx, checksum_before_op1);
12296 htab_empty (ht);
12298 md5_init_ctx (&ctx);
12299 fold_checksum_tree (op2, &ctx, ht);
12300 md5_finish_ctx (&ctx, checksum_before_op2);
12301 htab_empty (ht);
12302 #endif
12304 tem = fold_ternary (code, type, op0, op1, op2);
12305 if (!tem)
12306 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12308 #ifdef ENABLE_FOLD_CHECKING
12309 md5_init_ctx (&ctx);
12310 fold_checksum_tree (op0, &ctx, ht);
12311 md5_finish_ctx (&ctx, checksum_after_op0);
12312 htab_empty (ht);
12314 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12315 fold_check_failed (op0, tem);
12317 md5_init_ctx (&ctx);
12318 fold_checksum_tree (op1, &ctx, ht);
12319 md5_finish_ctx (&ctx, checksum_after_op1);
12320 htab_empty (ht);
12322 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12323 fold_check_failed (op1, tem);
12325 md5_init_ctx (&ctx);
12326 fold_checksum_tree (op2, &ctx, ht);
12327 md5_finish_ctx (&ctx, checksum_after_op2);
12328 htab_delete (ht);
12330 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12331 fold_check_failed (op2, tem);
12332 #endif
12333 return tem;
12336 /* Perform constant folding and related simplification of initializer
12337 expression EXPR. These behave identically to "fold_buildN" but ignore
12338 potential run-time traps and exceptions that fold must preserve. */
12340 #define START_FOLD_INIT \
12341 int saved_signaling_nans = flag_signaling_nans;\
12342 int saved_trapping_math = flag_trapping_math;\
12343 int saved_rounding_math = flag_rounding_math;\
12344 int saved_trapv = flag_trapv;\
12345 int saved_folding_initializer = folding_initializer;\
12346 flag_signaling_nans = 0;\
12347 flag_trapping_math = 0;\
12348 flag_rounding_math = 0;\
12349 flag_trapv = 0;\
12350 folding_initializer = 1;
12352 #define END_FOLD_INIT \
12353 flag_signaling_nans = saved_signaling_nans;\
12354 flag_trapping_math = saved_trapping_math;\
12355 flag_rounding_math = saved_rounding_math;\
12356 flag_trapv = saved_trapv;\
12357 folding_initializer = saved_folding_initializer;
12359 tree
12360 fold_build1_initializer (enum tree_code code, tree type, tree op)
12362 tree result;
12363 START_FOLD_INIT;
12365 result = fold_build1 (code, type, op);
12367 END_FOLD_INIT;
12368 return result;
12371 tree
12372 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12374 tree result;
12375 START_FOLD_INIT;
12377 result = fold_build2 (code, type, op0, op1);
12379 END_FOLD_INIT;
12380 return result;
12383 tree
12384 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12385 tree op2)
12387 tree result;
12388 START_FOLD_INIT;
12390 result = fold_build3 (code, type, op0, op1, op2);
12392 END_FOLD_INIT;
12393 return result;
12396 #undef START_FOLD_INIT
12397 #undef END_FOLD_INIT
12399 /* Determine if first argument is a multiple of second argument. Return 0 if
12400 it is not, or we cannot easily determined it to be.
12402 An example of the sort of thing we care about (at this point; this routine
12403 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12404 fold cases do now) is discovering that
12406 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12408 is a multiple of
12410 SAVE_EXPR (J * 8)
12412 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12414 This code also handles discovering that
12416 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12418 is a multiple of 8 so we don't have to worry about dealing with a
12419 possible remainder.
12421 Note that we *look* inside a SAVE_EXPR only to determine how it was
12422 calculated; it is not safe for fold to do much of anything else with the
12423 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12424 at run time. For example, the latter example above *cannot* be implemented
12425 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12426 evaluation time of the original SAVE_EXPR is not necessarily the same at
12427 the time the new expression is evaluated. The only optimization of this
12428 sort that would be valid is changing
12430 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12432 divided by 8 to
12434 SAVE_EXPR (I) * SAVE_EXPR (J)
12436 (where the same SAVE_EXPR (J) is used in the original and the
12437 transformed version). */
12439 static int
12440 multiple_of_p (tree type, tree top, tree bottom)
12442 if (operand_equal_p (top, bottom, 0))
12443 return 1;
12445 if (TREE_CODE (type) != INTEGER_TYPE)
12446 return 0;
12448 switch (TREE_CODE (top))
12450 case BIT_AND_EXPR:
12451 /* Bitwise and provides a power of two multiple. If the mask is
12452 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12453 if (!integer_pow2p (bottom))
12454 return 0;
12455 /* FALLTHRU */
12457 case MULT_EXPR:
12458 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12459 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12461 case PLUS_EXPR:
12462 case MINUS_EXPR:
12463 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12464 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12466 case LSHIFT_EXPR:
12467 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12469 tree op1, t1;
12471 op1 = TREE_OPERAND (top, 1);
12472 /* const_binop may not detect overflow correctly,
12473 so check for it explicitly here. */
12474 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12475 > TREE_INT_CST_LOW (op1)
12476 && TREE_INT_CST_HIGH (op1) == 0
12477 && 0 != (t1 = fold_convert (type,
12478 const_binop (LSHIFT_EXPR,
12479 size_one_node,
12480 op1, 0)))
12481 && ! TREE_OVERFLOW (t1))
12482 return multiple_of_p (type, t1, bottom);
12484 return 0;
12486 case NOP_EXPR:
12487 /* Can't handle conversions from non-integral or wider integral type. */
12488 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12489 || (TYPE_PRECISION (type)
12490 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12491 return 0;
12493 /* .. fall through ... */
12495 case SAVE_EXPR:
12496 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12498 case INTEGER_CST:
12499 if (TREE_CODE (bottom) != INTEGER_CST
12500 || (TYPE_UNSIGNED (type)
12501 && (tree_int_cst_sgn (top) < 0
12502 || tree_int_cst_sgn (bottom) < 0)))
12503 return 0;
12504 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12505 top, bottom, 0));
12507 default:
12508 return 0;
12512 /* Return true if `t' is known to be non-negative. */
12514 bool
12515 tree_expr_nonnegative_p (tree t)
12517 if (t == error_mark_node)
12518 return false;
12520 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12521 return true;
12523 switch (TREE_CODE (t))
12525 case SSA_NAME:
12526 /* Query VRP to see if it has recorded any information about
12527 the range of this object. */
12528 return ssa_name_nonnegative_p (t);
12530 case ABS_EXPR:
12531 /* We can't return 1 if flag_wrapv is set because
12532 ABS_EXPR<INT_MIN> = INT_MIN. */
12533 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12534 return true;
12535 break;
12537 case INTEGER_CST:
12538 return tree_int_cst_sgn (t) >= 0;
12540 case REAL_CST:
12541 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12543 case PLUS_EXPR:
12544 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12545 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12546 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12548 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12549 both unsigned and at least 2 bits shorter than the result. */
12550 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12551 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12552 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12554 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12555 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12556 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12557 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12559 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12560 TYPE_PRECISION (inner2)) + 1;
12561 return prec < TYPE_PRECISION (TREE_TYPE (t));
12564 break;
12566 case MULT_EXPR:
12567 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12569 /* x * x for floating point x is always non-negative. */
12570 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12571 return true;
12572 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12573 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12576 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12577 both unsigned and their total bits is shorter than the result. */
12578 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12579 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12580 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12582 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12583 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12584 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12585 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12586 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12587 < TYPE_PRECISION (TREE_TYPE (t));
12589 return false;
12591 case BIT_AND_EXPR:
12592 case MAX_EXPR:
12593 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12594 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12596 case BIT_IOR_EXPR:
12597 case BIT_XOR_EXPR:
12598 case MIN_EXPR:
12599 case RDIV_EXPR:
12600 case TRUNC_DIV_EXPR:
12601 case CEIL_DIV_EXPR:
12602 case FLOOR_DIV_EXPR:
12603 case ROUND_DIV_EXPR:
12604 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12605 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12607 case TRUNC_MOD_EXPR:
12608 case CEIL_MOD_EXPR:
12609 case FLOOR_MOD_EXPR:
12610 case ROUND_MOD_EXPR:
12611 case SAVE_EXPR:
12612 case NON_LVALUE_EXPR:
12613 case FLOAT_EXPR:
12614 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12616 case COMPOUND_EXPR:
12617 case MODIFY_EXPR:
12618 case GIMPLE_MODIFY_STMT:
12619 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12621 case BIND_EXPR:
12622 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12624 case COND_EXPR:
12625 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12626 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12628 case NOP_EXPR:
12630 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12631 tree outer_type = TREE_TYPE (t);
12633 if (TREE_CODE (outer_type) == REAL_TYPE)
12635 if (TREE_CODE (inner_type) == REAL_TYPE)
12636 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12637 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12639 if (TYPE_UNSIGNED (inner_type))
12640 return true;
12641 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12644 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12646 if (TREE_CODE (inner_type) == REAL_TYPE)
12647 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12648 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12649 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12650 && TYPE_UNSIGNED (inner_type);
12653 break;
12655 case TARGET_EXPR:
12657 tree temp = TARGET_EXPR_SLOT (t);
12658 t = TARGET_EXPR_INITIAL (t);
12660 /* If the initializer is non-void, then it's a normal expression
12661 that will be assigned to the slot. */
12662 if (!VOID_TYPE_P (t))
12663 return tree_expr_nonnegative_p (t);
12665 /* Otherwise, the initializer sets the slot in some way. One common
12666 way is an assignment statement at the end of the initializer. */
12667 while (1)
12669 if (TREE_CODE (t) == BIND_EXPR)
12670 t = expr_last (BIND_EXPR_BODY (t));
12671 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12672 || TREE_CODE (t) == TRY_CATCH_EXPR)
12673 t = expr_last (TREE_OPERAND (t, 0));
12674 else if (TREE_CODE (t) == STATEMENT_LIST)
12675 t = expr_last (t);
12676 else
12677 break;
12679 if ((TREE_CODE (t) == MODIFY_EXPR
12680 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12681 && GENERIC_TREE_OPERAND (t, 0) == temp)
12682 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12684 return false;
12687 case CALL_EXPR:
12689 tree fndecl = get_callee_fndecl (t);
12690 tree arglist = TREE_OPERAND (t, 1);
12691 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12692 switch (DECL_FUNCTION_CODE (fndecl))
12694 CASE_FLT_FN (BUILT_IN_ACOS):
12695 CASE_FLT_FN (BUILT_IN_ACOSH):
12696 CASE_FLT_FN (BUILT_IN_CABS):
12697 CASE_FLT_FN (BUILT_IN_COSH):
12698 CASE_FLT_FN (BUILT_IN_ERFC):
12699 CASE_FLT_FN (BUILT_IN_EXP):
12700 CASE_FLT_FN (BUILT_IN_EXP10):
12701 CASE_FLT_FN (BUILT_IN_EXP2):
12702 CASE_FLT_FN (BUILT_IN_FABS):
12703 CASE_FLT_FN (BUILT_IN_FDIM):
12704 CASE_FLT_FN (BUILT_IN_HYPOT):
12705 CASE_FLT_FN (BUILT_IN_POW10):
12706 CASE_INT_FN (BUILT_IN_FFS):
12707 CASE_INT_FN (BUILT_IN_PARITY):
12708 CASE_INT_FN (BUILT_IN_POPCOUNT):
12709 case BUILT_IN_BSWAP32:
12710 case BUILT_IN_BSWAP64:
12711 /* Always true. */
12712 return true;
12714 CASE_FLT_FN (BUILT_IN_SQRT):
12715 /* sqrt(-0.0) is -0.0. */
12716 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12717 return true;
12718 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12720 CASE_FLT_FN (BUILT_IN_ASINH):
12721 CASE_FLT_FN (BUILT_IN_ATAN):
12722 CASE_FLT_FN (BUILT_IN_ATANH):
12723 CASE_FLT_FN (BUILT_IN_CBRT):
12724 CASE_FLT_FN (BUILT_IN_CEIL):
12725 CASE_FLT_FN (BUILT_IN_ERF):
12726 CASE_FLT_FN (BUILT_IN_EXPM1):
12727 CASE_FLT_FN (BUILT_IN_FLOOR):
12728 CASE_FLT_FN (BUILT_IN_FMOD):
12729 CASE_FLT_FN (BUILT_IN_FREXP):
12730 CASE_FLT_FN (BUILT_IN_LCEIL):
12731 CASE_FLT_FN (BUILT_IN_LDEXP):
12732 CASE_FLT_FN (BUILT_IN_LFLOOR):
12733 CASE_FLT_FN (BUILT_IN_LLCEIL):
12734 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12735 CASE_FLT_FN (BUILT_IN_LLRINT):
12736 CASE_FLT_FN (BUILT_IN_LLROUND):
12737 CASE_FLT_FN (BUILT_IN_LRINT):
12738 CASE_FLT_FN (BUILT_IN_LROUND):
12739 CASE_FLT_FN (BUILT_IN_MODF):
12740 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12741 CASE_FLT_FN (BUILT_IN_RINT):
12742 CASE_FLT_FN (BUILT_IN_ROUND):
12743 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12744 CASE_FLT_FN (BUILT_IN_SINH):
12745 CASE_FLT_FN (BUILT_IN_TANH):
12746 CASE_FLT_FN (BUILT_IN_TRUNC):
12747 /* True if the 1st argument is nonnegative. */
12748 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12750 CASE_FLT_FN (BUILT_IN_FMAX):
12751 /* True if the 1st OR 2nd arguments are nonnegative. */
12752 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12753 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12755 CASE_FLT_FN (BUILT_IN_FMIN):
12756 /* True if the 1st AND 2nd arguments are nonnegative. */
12757 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12758 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12760 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12761 /* True if the 2nd argument is nonnegative. */
12762 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12764 CASE_FLT_FN (BUILT_IN_POWI):
12765 /* True if the 1st argument is nonnegative or the second
12766 argument is an even integer. */
12767 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12769 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12770 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12771 return true;
12773 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12775 CASE_FLT_FN (BUILT_IN_POW):
12776 /* True if the 1st argument is nonnegative or the second
12777 argument is an even integer valued real. */
12778 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12780 REAL_VALUE_TYPE c;
12781 HOST_WIDE_INT n;
12783 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12784 n = real_to_integer (&c);
12785 if ((n & 1) == 0)
12787 REAL_VALUE_TYPE cint;
12788 real_from_integer (&cint, VOIDmode, n,
12789 n < 0 ? -1 : 0, 0);
12790 if (real_identical (&c, &cint))
12791 return true;
12794 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12796 default:
12797 break;
12801 /* ... fall through ... */
12803 default:
12804 if (truth_value_p (TREE_CODE (t)))
12805 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12806 return true;
12809 /* We don't know sign of `t', so be conservative and return false. */
12810 return false;
12813 /* Return true when T is an address and is known to be nonzero.
12814 For floating point we further ensure that T is not denormal.
12815 Similar logic is present in nonzero_address in rtlanal.h. */
12817 bool
12818 tree_expr_nonzero_p (tree t)
12820 tree type = TREE_TYPE (t);
12822 /* Doing something useful for floating point would need more work. */
12823 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12824 return false;
12826 switch (TREE_CODE (t))
12828 case SSA_NAME:
12829 /* Query VRP to see if it has recorded any information about
12830 the range of this object. */
12831 return ssa_name_nonzero_p (t);
12833 case ABS_EXPR:
12834 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12836 case INTEGER_CST:
12837 /* We used to test for !integer_zerop here. This does not work correctly
12838 if TREE_CONSTANT_OVERFLOW (t). */
12839 return (TREE_INT_CST_LOW (t) != 0
12840 || TREE_INT_CST_HIGH (t) != 0);
12842 case PLUS_EXPR:
12843 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12845 /* With the presence of negative values it is hard
12846 to say something. */
12847 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12848 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12849 return false;
12850 /* One of operands must be positive and the other non-negative. */
12851 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12852 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12854 break;
12856 case MULT_EXPR:
12857 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12859 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12860 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12862 break;
12864 case NOP_EXPR:
12866 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12867 tree outer_type = TREE_TYPE (t);
12869 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12870 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12872 break;
12874 case ADDR_EXPR:
12876 tree base = get_base_address (TREE_OPERAND (t, 0));
12878 if (!base)
12879 return false;
12881 /* Weak declarations may link to NULL. */
12882 if (VAR_OR_FUNCTION_DECL_P (base))
12883 return !DECL_WEAK (base);
12885 /* Constants are never weak. */
12886 if (CONSTANT_CLASS_P (base))
12887 return true;
12889 return false;
12892 case COND_EXPR:
12893 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12894 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12896 case MIN_EXPR:
12897 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12898 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12900 case MAX_EXPR:
12901 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12903 /* When both operands are nonzero, then MAX must be too. */
12904 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12905 return true;
12907 /* MAX where operand 0 is positive is positive. */
12908 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12910 /* MAX where operand 1 is positive is positive. */
12911 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12912 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12913 return true;
12914 break;
12916 case COMPOUND_EXPR:
12917 case MODIFY_EXPR:
12918 case GIMPLE_MODIFY_STMT:
12919 case BIND_EXPR:
12920 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12922 case SAVE_EXPR:
12923 case NON_LVALUE_EXPR:
12924 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12926 case BIT_IOR_EXPR:
12927 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12928 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12930 case CALL_EXPR:
12931 return alloca_call_p (t);
12933 default:
12934 break;
12936 return false;
12939 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12940 attempt to fold the expression to a constant without modifying TYPE,
12941 OP0 or OP1.
12943 If the expression could be simplified to a constant, then return
12944 the constant. If the expression would not be simplified to a
12945 constant, then return NULL_TREE. */
12947 tree
12948 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12950 tree tem = fold_binary (code, type, op0, op1);
12951 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12954 /* Given the components of a unary expression CODE, TYPE and OP0,
12955 attempt to fold the expression to a constant without modifying
12956 TYPE or OP0.
12958 If the expression could be simplified to a constant, then return
12959 the constant. If the expression would not be simplified to a
12960 constant, then return NULL_TREE. */
12962 tree
12963 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12965 tree tem = fold_unary (code, type, op0);
12966 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12969 /* If EXP represents referencing an element in a constant string
12970 (either via pointer arithmetic or array indexing), return the
12971 tree representing the value accessed, otherwise return NULL. */
12973 tree
12974 fold_read_from_constant_string (tree exp)
12976 if ((TREE_CODE (exp) == INDIRECT_REF
12977 || TREE_CODE (exp) == ARRAY_REF)
12978 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12980 tree exp1 = TREE_OPERAND (exp, 0);
12981 tree index;
12982 tree string;
12984 if (TREE_CODE (exp) == INDIRECT_REF)
12985 string = string_constant (exp1, &index);
12986 else
12988 tree low_bound = array_ref_low_bound (exp);
12989 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12991 /* Optimize the special-case of a zero lower bound.
12993 We convert the low_bound to sizetype to avoid some problems
12994 with constant folding. (E.g. suppose the lower bound is 1,
12995 and its mode is QI. Without the conversion,l (ARRAY
12996 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12997 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12998 if (! integer_zerop (low_bound))
12999 index = size_diffop (index, fold_convert (sizetype, low_bound));
13001 string = exp1;
13004 if (string
13005 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13006 && TREE_CODE (string) == STRING_CST
13007 && TREE_CODE (index) == INTEGER_CST
13008 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13009 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13010 == MODE_INT)
13011 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13012 return fold_convert (TREE_TYPE (exp),
13013 build_int_cst (NULL_TREE,
13014 (TREE_STRING_POINTER (string)
13015 [TREE_INT_CST_LOW (index)])));
13017 return NULL;
13020 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13021 an integer constant or real constant.
13023 TYPE is the type of the result. */
13025 static tree
13026 fold_negate_const (tree arg0, tree type)
13028 tree t = NULL_TREE;
13030 switch (TREE_CODE (arg0))
13032 case INTEGER_CST:
13034 unsigned HOST_WIDE_INT low;
13035 HOST_WIDE_INT high;
13036 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13037 TREE_INT_CST_HIGH (arg0),
13038 &low, &high);
13039 t = build_int_cst_wide (type, low, high);
13040 t = force_fit_type (t, 1,
13041 (overflow | TREE_OVERFLOW (arg0))
13042 && !TYPE_UNSIGNED (type),
13043 TREE_CONSTANT_OVERFLOW (arg0));
13044 break;
13047 case REAL_CST:
13048 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13049 break;
13051 default:
13052 gcc_unreachable ();
13055 return t;
13058 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13059 an integer constant or real constant.
13061 TYPE is the type of the result. */
13063 tree
13064 fold_abs_const (tree arg0, tree type)
13066 tree t = NULL_TREE;
13068 switch (TREE_CODE (arg0))
13070 case INTEGER_CST:
13071 /* If the value is unsigned, then the absolute value is
13072 the same as the ordinary value. */
13073 if (TYPE_UNSIGNED (type))
13074 t = arg0;
13075 /* Similarly, if the value is non-negative. */
13076 else if (INT_CST_LT (integer_minus_one_node, arg0))
13077 t = arg0;
13078 /* If the value is negative, then the absolute value is
13079 its negation. */
13080 else
13082 unsigned HOST_WIDE_INT low;
13083 HOST_WIDE_INT high;
13084 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13085 TREE_INT_CST_HIGH (arg0),
13086 &low, &high);
13087 t = build_int_cst_wide (type, low, high);
13088 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13089 TREE_CONSTANT_OVERFLOW (arg0));
13091 break;
13093 case REAL_CST:
13094 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13095 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13096 else
13097 t = arg0;
13098 break;
13100 default:
13101 gcc_unreachable ();
13104 return t;
13107 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13108 constant. TYPE is the type of the result. */
13110 static tree
13111 fold_not_const (tree arg0, tree type)
13113 tree t = NULL_TREE;
13115 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13117 t = build_int_cst_wide (type,
13118 ~ TREE_INT_CST_LOW (arg0),
13119 ~ TREE_INT_CST_HIGH (arg0));
13120 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13121 TREE_CONSTANT_OVERFLOW (arg0));
13123 return t;
13126 /* Given CODE, a relational operator, the target type, TYPE and two
13127 constant operands OP0 and OP1, return the result of the
13128 relational operation. If the result is not a compile time
13129 constant, then return NULL_TREE. */
13131 static tree
13132 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13134 int result, invert;
13136 /* From here on, the only cases we handle are when the result is
13137 known to be a constant. */
13139 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13141 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13142 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13144 /* Handle the cases where either operand is a NaN. */
13145 if (real_isnan (c0) || real_isnan (c1))
13147 switch (code)
13149 case EQ_EXPR:
13150 case ORDERED_EXPR:
13151 result = 0;
13152 break;
13154 case NE_EXPR:
13155 case UNORDERED_EXPR:
13156 case UNLT_EXPR:
13157 case UNLE_EXPR:
13158 case UNGT_EXPR:
13159 case UNGE_EXPR:
13160 case UNEQ_EXPR:
13161 result = 1;
13162 break;
13164 case LT_EXPR:
13165 case LE_EXPR:
13166 case GT_EXPR:
13167 case GE_EXPR:
13168 case LTGT_EXPR:
13169 if (flag_trapping_math)
13170 return NULL_TREE;
13171 result = 0;
13172 break;
13174 default:
13175 gcc_unreachable ();
13178 return constant_boolean_node (result, type);
13181 return constant_boolean_node (real_compare (code, c0, c1), type);
13184 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13186 To compute GT, swap the arguments and do LT.
13187 To compute GE, do LT and invert the result.
13188 To compute LE, swap the arguments, do LT and invert the result.
13189 To compute NE, do EQ and invert the result.
13191 Therefore, the code below must handle only EQ and LT. */
13193 if (code == LE_EXPR || code == GT_EXPR)
13195 tree tem = op0;
13196 op0 = op1;
13197 op1 = tem;
13198 code = swap_tree_comparison (code);
13201 /* Note that it is safe to invert for real values here because we
13202 have already handled the one case that it matters. */
13204 invert = 0;
13205 if (code == NE_EXPR || code == GE_EXPR)
13207 invert = 1;
13208 code = invert_tree_comparison (code, false);
13211 /* Compute a result for LT or EQ if args permit;
13212 Otherwise return T. */
13213 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13215 if (code == EQ_EXPR)
13216 result = tree_int_cst_equal (op0, op1);
13217 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13218 result = INT_CST_LT_UNSIGNED (op0, op1);
13219 else
13220 result = INT_CST_LT (op0, op1);
13222 else
13223 return NULL_TREE;
13225 if (invert)
13226 result ^= 1;
13227 return constant_boolean_node (result, type);
13230 /* Build an expression for the a clean point containing EXPR with type TYPE.
13231 Don't build a cleanup point expression for EXPR which don't have side
13232 effects. */
13234 tree
13235 fold_build_cleanup_point_expr (tree type, tree expr)
13237 /* If the expression does not have side effects then we don't have to wrap
13238 it with a cleanup point expression. */
13239 if (!TREE_SIDE_EFFECTS (expr))
13240 return expr;
13242 /* If the expression is a return, check to see if the expression inside the
13243 return has no side effects or the right hand side of the modify expression
13244 inside the return. If either don't have side effects set we don't need to
13245 wrap the expression in a cleanup point expression. Note we don't check the
13246 left hand side of the modify because it should always be a return decl. */
13247 if (TREE_CODE (expr) == RETURN_EXPR)
13249 tree op = TREE_OPERAND (expr, 0);
13250 if (!op || !TREE_SIDE_EFFECTS (op))
13251 return expr;
13252 op = TREE_OPERAND (op, 1);
13253 if (!TREE_SIDE_EFFECTS (op))
13254 return expr;
13257 return build1 (CLEANUP_POINT_EXPR, type, expr);
13260 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13261 avoid confusing the gimplify process. */
13263 tree
13264 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13266 /* The size of the object is not relevant when talking about its address. */
13267 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13268 t = TREE_OPERAND (t, 0);
13270 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13271 if (TREE_CODE (t) == INDIRECT_REF
13272 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13274 t = TREE_OPERAND (t, 0);
13275 if (TREE_TYPE (t) != ptrtype)
13276 t = build1 (NOP_EXPR, ptrtype, t);
13278 else
13280 tree base = t;
13282 while (handled_component_p (base))
13283 base = TREE_OPERAND (base, 0);
13284 if (DECL_P (base))
13285 TREE_ADDRESSABLE (base) = 1;
13287 t = build1 (ADDR_EXPR, ptrtype, t);
13290 return t;
13293 tree
13294 build_fold_addr_expr (tree t)
13296 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13299 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13300 of an indirection through OP0, or NULL_TREE if no simplification is
13301 possible. */
13303 tree
13304 fold_indirect_ref_1 (tree type, tree op0)
13306 tree sub = op0;
13307 tree subtype;
13309 STRIP_NOPS (sub);
13310 subtype = TREE_TYPE (sub);
13311 if (!POINTER_TYPE_P (subtype))
13312 return NULL_TREE;
13314 if (TREE_CODE (sub) == ADDR_EXPR)
13316 tree op = TREE_OPERAND (sub, 0);
13317 tree optype = TREE_TYPE (op);
13318 /* *&CONST_DECL -> to the value of the const decl. */
13319 if (TREE_CODE (op) == CONST_DECL)
13320 return DECL_INITIAL (op);
13321 /* *&p => p; make sure to handle *&"str"[cst] here. */
13322 if (type == optype)
13324 tree fop = fold_read_from_constant_string (op);
13325 if (fop)
13326 return fop;
13327 else
13328 return op;
13330 /* *(foo *)&fooarray => fooarray[0] */
13331 else if (TREE_CODE (optype) == ARRAY_TYPE
13332 && type == TREE_TYPE (optype))
13334 tree type_domain = TYPE_DOMAIN (optype);
13335 tree min_val = size_zero_node;
13336 if (type_domain && TYPE_MIN_VALUE (type_domain))
13337 min_val = TYPE_MIN_VALUE (type_domain);
13338 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13340 /* *(foo *)&complexfoo => __real__ complexfoo */
13341 else if (TREE_CODE (optype) == COMPLEX_TYPE
13342 && type == TREE_TYPE (optype))
13343 return fold_build1 (REALPART_EXPR, type, op);
13344 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13345 else if (TREE_CODE (optype) == VECTOR_TYPE
13346 && type == TREE_TYPE (optype))
13348 tree part_width = TYPE_SIZE (type);
13349 tree index = bitsize_int (0);
13350 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13354 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13355 if (TREE_CODE (sub) == PLUS_EXPR
13356 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13358 tree op00 = TREE_OPERAND (sub, 0);
13359 tree op01 = TREE_OPERAND (sub, 1);
13360 tree op00type;
13362 STRIP_NOPS (op00);
13363 op00type = TREE_TYPE (op00);
13364 if (TREE_CODE (op00) == ADDR_EXPR
13365 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13366 && type == TREE_TYPE (TREE_TYPE (op00type)))
13368 tree size = TYPE_SIZE_UNIT (type);
13369 if (tree_int_cst_equal (size, op01))
13370 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13374 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13375 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13376 && type == TREE_TYPE (TREE_TYPE (subtype)))
13378 tree type_domain;
13379 tree min_val = size_zero_node;
13380 sub = build_fold_indirect_ref (sub);
13381 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13382 if (type_domain && TYPE_MIN_VALUE (type_domain))
13383 min_val = TYPE_MIN_VALUE (type_domain);
13384 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13387 return NULL_TREE;
13390 /* Builds an expression for an indirection through T, simplifying some
13391 cases. */
13393 tree
13394 build_fold_indirect_ref (tree t)
13396 tree type = TREE_TYPE (TREE_TYPE (t));
13397 tree sub = fold_indirect_ref_1 (type, t);
13399 if (sub)
13400 return sub;
13401 else
13402 return build1 (INDIRECT_REF, type, t);
13405 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13407 tree
13408 fold_indirect_ref (tree t)
13410 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13412 if (sub)
13413 return sub;
13414 else
13415 return t;
13418 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13419 whose result is ignored. The type of the returned tree need not be
13420 the same as the original expression. */
13422 tree
13423 fold_ignored_result (tree t)
13425 if (!TREE_SIDE_EFFECTS (t))
13426 return integer_zero_node;
13428 for (;;)
13429 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13431 case tcc_unary:
13432 t = TREE_OPERAND (t, 0);
13433 break;
13435 case tcc_binary:
13436 case tcc_comparison:
13437 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13438 t = TREE_OPERAND (t, 0);
13439 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13440 t = TREE_OPERAND (t, 1);
13441 else
13442 return t;
13443 break;
13445 case tcc_expression:
13446 switch (TREE_CODE (t))
13448 case COMPOUND_EXPR:
13449 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13450 return t;
13451 t = TREE_OPERAND (t, 0);
13452 break;
13454 case COND_EXPR:
13455 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13456 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13457 return t;
13458 t = TREE_OPERAND (t, 0);
13459 break;
13461 default:
13462 return t;
13464 break;
13466 default:
13467 return t;
13471 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13472 This can only be applied to objects of a sizetype. */
13474 tree
13475 round_up (tree value, int divisor)
13477 tree div = NULL_TREE;
13479 gcc_assert (divisor > 0);
13480 if (divisor == 1)
13481 return value;
13483 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13484 have to do anything. Only do this when we are not given a const,
13485 because in that case, this check is more expensive than just
13486 doing it. */
13487 if (TREE_CODE (value) != INTEGER_CST)
13489 div = build_int_cst (TREE_TYPE (value), divisor);
13491 if (multiple_of_p (TREE_TYPE (value), value, div))
13492 return value;
13495 /* If divisor is a power of two, simplify this to bit manipulation. */
13496 if (divisor == (divisor & -divisor))
13498 tree t;
13500 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13501 value = size_binop (PLUS_EXPR, value, t);
13502 t = build_int_cst (TREE_TYPE (value), -divisor);
13503 value = size_binop (BIT_AND_EXPR, value, t);
13505 else
13507 if (!div)
13508 div = build_int_cst (TREE_TYPE (value), divisor);
13509 value = size_binop (CEIL_DIV_EXPR, value, div);
13510 value = size_binop (MULT_EXPR, value, div);
13513 return value;
13516 /* Likewise, but round down. */
13518 tree
13519 round_down (tree value, int divisor)
13521 tree div = NULL_TREE;
13523 gcc_assert (divisor > 0);
13524 if (divisor == 1)
13525 return value;
13527 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13528 have to do anything. Only do this when we are not given a const,
13529 because in that case, this check is more expensive than just
13530 doing it. */
13531 if (TREE_CODE (value) != INTEGER_CST)
13533 div = build_int_cst (TREE_TYPE (value), divisor);
13535 if (multiple_of_p (TREE_TYPE (value), value, div))
13536 return value;
13539 /* If divisor is a power of two, simplify this to bit manipulation. */
13540 if (divisor == (divisor & -divisor))
13542 tree t;
13544 t = build_int_cst (TREE_TYPE (value), -divisor);
13545 value = size_binop (BIT_AND_EXPR, value, t);
13547 else
13549 if (!div)
13550 div = build_int_cst (TREE_TYPE (value), divisor);
13551 value = size_binop (FLOOR_DIV_EXPR, value, div);
13552 value = size_binop (MULT_EXPR, value, div);
13555 return value;
13558 /* Returns the pointer to the base of the object addressed by EXP and
13559 extracts the information about the offset of the access, storing it
13560 to PBITPOS and POFFSET. */
13562 static tree
13563 split_address_to_core_and_offset (tree exp,
13564 HOST_WIDE_INT *pbitpos, tree *poffset)
13566 tree core;
13567 enum machine_mode mode;
13568 int unsignedp, volatilep;
13569 HOST_WIDE_INT bitsize;
13571 if (TREE_CODE (exp) == ADDR_EXPR)
13573 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13574 poffset, &mode, &unsignedp, &volatilep,
13575 false);
13576 core = build_fold_addr_expr (core);
13578 else
13580 core = exp;
13581 *pbitpos = 0;
13582 *poffset = NULL_TREE;
13585 return core;
13588 /* Returns true if addresses of E1 and E2 differ by a constant, false
13589 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13591 bool
13592 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13594 tree core1, core2;
13595 HOST_WIDE_INT bitpos1, bitpos2;
13596 tree toffset1, toffset2, tdiff, type;
13598 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13599 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13601 if (bitpos1 % BITS_PER_UNIT != 0
13602 || bitpos2 % BITS_PER_UNIT != 0
13603 || !operand_equal_p (core1, core2, 0))
13604 return false;
13606 if (toffset1 && toffset2)
13608 type = TREE_TYPE (toffset1);
13609 if (type != TREE_TYPE (toffset2))
13610 toffset2 = fold_convert (type, toffset2);
13612 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13613 if (!cst_and_fits_in_hwi (tdiff))
13614 return false;
13616 *diff = int_cst_value (tdiff);
13618 else if (toffset1 || toffset2)
13620 /* If only one of the offsets is non-constant, the difference cannot
13621 be a constant. */
13622 return false;
13624 else
13625 *diff = 0;
13627 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13628 return true;
13631 /* Simplify the floating point expression EXP when the sign of the
13632 result is not significant. Return NULL_TREE if no simplification
13633 is possible. */
13635 tree
13636 fold_strip_sign_ops (tree exp)
13638 tree arg0, arg1;
13640 switch (TREE_CODE (exp))
13642 case ABS_EXPR:
13643 case NEGATE_EXPR:
13644 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13645 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13647 case MULT_EXPR:
13648 case RDIV_EXPR:
13649 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13650 return NULL_TREE;
13651 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13652 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13653 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13654 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13655 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13656 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13657 break;
13659 case COMPOUND_EXPR:
13660 arg0 = TREE_OPERAND (exp, 0);
13661 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13662 if (arg1)
13663 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13664 break;
13666 case COND_EXPR:
13667 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13668 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13669 if (arg0 || arg1)
13670 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13671 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13672 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13673 break;
13675 case CALL_EXPR:
13677 const enum built_in_function fcode = builtin_mathfn_code (exp);
13678 switch (fcode)
13680 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13681 /* Strip copysign function call, return the 1st argument. */
13682 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13683 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13684 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13686 default:
13687 /* Strip sign ops from the argument of "odd" math functions. */
13688 if (negate_mathfn_p (fcode))
13690 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13691 if (arg0)
13692 return build_function_call_expr (get_callee_fndecl (exp),
13693 build_tree_list (NULL_TREE,
13694 arg0));
13696 break;
13699 break;
13701 default:
13702 break;
13704 return NULL_TREE;