re PR fortran/30371 (kill suboutine accepts (invalid) array arguments.)
[official-gcc.git] / gcc / fold-const.c
blob559acf018adfb9f5c5dcc1b048302207cb7bd58a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "rtl.h"
58 #include "expr.h"
59 #include "tm_p.h"
60 #include "toplev.h"
61 #include "ggc.h"
62 #include "hashtab.h"
63 #include "langhooks.h"
64 #include "md5.h"
66 /* Non-zero if we are folding constants inside an initializer; zero
67 otherwise. */
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
74 COMPCODE_FALSE = 0,
75 COMPCODE_LT = 1,
76 COMPCODE_EQ = 2,
77 COMPCODE_LE = 3,
78 COMPCODE_GT = 4,
79 COMPCODE_LTGT = 5,
80 COMPCODE_GE = 6,
81 COMPCODE_ORD = 7,
82 COMPCODE_UNORD = 8,
83 COMPCODE_UNLT = 9,
84 COMPCODE_UNEQ = 10,
85 COMPCODE_UNLE = 11,
86 COMPCODE_UNGT = 12,
87 COMPCODE_NE = 13,
88 COMPCODE_UNGE = 14,
89 COMPCODE_TRUE = 15
92 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
93 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static tree combine_comparisons (enum tree_code, enum tree_code,
103 enum tree_code, tree, tree, tree);
104 static int truth_value_p (enum tree_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand (tree, tree, tree);
109 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (tree, tree, int, int, int);
111 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
114 tree *, tree *);
115 static int all_ones_mask_p (tree, int);
116 static tree sign_bit_p (tree, tree);
117 static int simple_operand_p (tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
132 static int multiple_of_p (tree, tree, tree);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static int native_encode_expr (tree, unsigned char *, int);
146 static tree native_interpret_expr (tree, unsigned char *, int);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
152 addition.
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 sign. */
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
160 We do that by representing the two-word integer in 4 words, with only
161 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
162 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 #define LOWPART(x) \
165 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
166 #define HIGHPART(x) \
167 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
168 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170 /* Unpack a two-word integer into 4 words.
171 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
172 WORDS points to the array of HOST_WIDE_INTs. */
174 static void
175 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 words[0] = LOWPART (low);
178 words[1] = HIGHPART (low);
179 words[2] = LOWPART (hi);
180 words[3] = HIGHPART (hi);
183 /* Pack an array of 4 words into a two-word integer.
184 WORDS points to the array of words.
185 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 static void
188 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 HOST_WIDE_INT *hi)
191 *low = words[0] + words[1] * BASE;
192 *hi = words[2] + words[3] * BASE;
195 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
196 in overflow of the value, when >0 we are only interested in signed
197 overflow, for <0 we are interested in any overflow. OVERFLOWED
198 indicates whether overflow has already occurred. CONST_OVERFLOWED
199 indicates whether constant overflow has already occurred. We force
200 T's value to be within range of T's type (by setting to 0 or 1 all
201 the bits outside the type's range). We set TREE_OVERFLOWED if,
202 OVERFLOWED is nonzero,
203 or OVERFLOWABLE is >0 and signed overflow occurs
204 or OVERFLOWABLE is <0 and any overflow occurs
205 We set TREE_CONSTANT_OVERFLOWED if,
206 CONST_OVERFLOWED is nonzero
207 or we set TREE_OVERFLOWED.
208 We return either the original T, or a copy. */
210 tree
211 force_fit_type (tree t, int overflowable,
212 bool overflowed, bool overflowed_const)
214 unsigned HOST_WIDE_INT low;
215 HOST_WIDE_INT high;
216 unsigned int prec;
217 int sign_extended_type;
219 gcc_assert (TREE_CODE (t) == INTEGER_CST);
221 low = TREE_INT_CST_LOW (t);
222 high = TREE_INT_CST_HIGH (t);
224 if (POINTER_TYPE_P (TREE_TYPE (t))
225 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
226 prec = POINTER_SIZE;
227 else
228 prec = TYPE_PRECISION (TREE_TYPE (t));
229 /* Size types *are* sign extended. */
230 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
231 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
232 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
234 /* First clear all bits that are beyond the type's precision. */
236 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
240 else
242 high = 0;
243 if (prec < HOST_BITS_PER_WIDE_INT)
244 low &= ~((HOST_WIDE_INT) (-1) << prec);
247 if (!sign_extended_type)
248 /* No sign extension */;
249 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
250 /* Correct width already. */;
251 else if (prec > HOST_BITS_PER_WIDE_INT)
253 /* Sign extend top half? */
254 if (high & ((unsigned HOST_WIDE_INT)1
255 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
256 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
258 else if (prec == HOST_BITS_PER_WIDE_INT)
260 if ((HOST_WIDE_INT)low < 0)
261 high = -1;
263 else
265 /* Sign extend bottom half? */
266 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
268 high = -1;
269 low |= (HOST_WIDE_INT)(-1) << prec;
273 /* If the value changed, return a new node. */
274 if (overflowed || overflowed_const
275 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
277 t = build_int_cst_wide (TREE_TYPE (t), low, high);
279 if (overflowed
280 || overflowable < 0
281 || (overflowable > 0 && sign_extended_type))
283 t = copy_node (t);
284 TREE_OVERFLOW (t) = 1;
285 TREE_CONSTANT_OVERFLOW (t) = 1;
287 else if (overflowed_const)
289 t = copy_node (t);
290 TREE_CONSTANT_OVERFLOW (t) = 1;
294 return t;
297 /* Add two doubleword integers with doubleword result.
298 Return nonzero if the operation overflows according to UNSIGNED_P.
299 Each argument is given as two `HOST_WIDE_INT' pieces.
300 One argument is L1 and H1; the other, L2 and H2.
301 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
304 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
305 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
306 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
307 bool unsigned_p)
309 unsigned HOST_WIDE_INT l;
310 HOST_WIDE_INT h;
312 l = l1 + l2;
313 h = h1 + h2 + (l < l1);
315 *lv = l;
316 *hv = h;
318 if (unsigned_p)
319 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
320 else
321 return OVERFLOW_SUM_SIGN (h1, h2, h);
324 /* Negate a doubleword integer with doubleword result.
325 Return nonzero if the operation overflows, assuming it's signed.
326 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
327 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
330 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
331 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
333 if (l1 == 0)
335 *lv = 0;
336 *hv = - h1;
337 return (*hv & h1) < 0;
339 else
341 *lv = -l1;
342 *hv = ~h1;
343 return 0;
347 /* Multiply two doubleword integers with doubleword result.
348 Return nonzero if the operation overflows according to UNSIGNED_P.
349 Each argument is given as two `HOST_WIDE_INT' pieces.
350 One argument is L1 and H1; the other, L2 and H2.
351 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
354 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
355 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
356 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
357 bool unsigned_p)
359 HOST_WIDE_INT arg1[4];
360 HOST_WIDE_INT arg2[4];
361 HOST_WIDE_INT prod[4 * 2];
362 unsigned HOST_WIDE_INT carry;
363 int i, j, k;
364 unsigned HOST_WIDE_INT toplow, neglow;
365 HOST_WIDE_INT tophigh, neghigh;
367 encode (arg1, l1, h1);
368 encode (arg2, l2, h2);
370 memset (prod, 0, sizeof prod);
372 for (i = 0; i < 4; i++)
374 carry = 0;
375 for (j = 0; j < 4; j++)
377 k = i + j;
378 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
379 carry += arg1[i] * arg2[j];
380 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
381 carry += prod[k];
382 prod[k] = LOWPART (carry);
383 carry = HIGHPART (carry);
385 prod[i + 4] = carry;
388 decode (prod, lv, hv);
389 decode (prod + 4, &toplow, &tophigh);
391 /* Unsigned overflow is immediate. */
392 if (unsigned_p)
393 return (toplow | tophigh) != 0;
395 /* Check for signed overflow by calculating the signed representation of the
396 top half of the result; it should agree with the low half's sign bit. */
397 if (h1 < 0)
399 neg_double (l2, h2, &neglow, &neghigh);
400 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
402 if (h2 < 0)
404 neg_double (l1, h1, &neglow, &neghigh);
405 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
407 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
410 /* Shift the doubleword integer in L1, H1 left by COUNT places
411 keeping only PREC bits of result.
412 Shift right if COUNT is negative.
413 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
414 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
416 void
417 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
418 HOST_WIDE_INT count, unsigned int prec,
419 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
421 unsigned HOST_WIDE_INT signmask;
423 if (count < 0)
425 rshift_double (l1, h1, -count, prec, lv, hv, arith);
426 return;
429 if (SHIFT_COUNT_TRUNCATED)
430 count %= prec;
432 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
434 /* Shifting by the host word size is undefined according to the
435 ANSI standard, so we must handle this as a special case. */
436 *hv = 0;
437 *lv = 0;
439 else if (count >= HOST_BITS_PER_WIDE_INT)
441 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
442 *lv = 0;
444 else
446 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
447 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
448 *lv = l1 << count;
451 /* Sign extend all bits that are beyond the precision. */
453 signmask = -((prec > HOST_BITS_PER_WIDE_INT
454 ? ((unsigned HOST_WIDE_INT) *hv
455 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
456 : (*lv >> (prec - 1))) & 1);
458 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
460 else if (prec >= HOST_BITS_PER_WIDE_INT)
462 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
463 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
465 else
467 *hv = signmask;
468 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
469 *lv |= signmask << prec;
473 /* Shift the doubleword integer in L1, H1 right by COUNT places
474 keeping only PREC bits of result. COUNT must be positive.
475 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
476 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
478 void
479 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
480 HOST_WIDE_INT count, unsigned int prec,
481 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
482 int arith)
484 unsigned HOST_WIDE_INT signmask;
486 signmask = (arith
487 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
488 : 0);
490 if (SHIFT_COUNT_TRUNCATED)
491 count %= prec;
493 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
495 /* Shifting by the host word size is undefined according to the
496 ANSI standard, so we must handle this as a special case. */
497 *hv = 0;
498 *lv = 0;
500 else if (count >= HOST_BITS_PER_WIDE_INT)
502 *hv = 0;
503 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
505 else
507 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
508 *lv = ((l1 >> count)
509 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
512 /* Zero / sign extend all bits that are beyond the precision. */
514 if (count >= (HOST_WIDE_INT)prec)
516 *hv = signmask;
517 *lv = signmask;
519 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
521 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
523 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
524 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
526 else
528 *hv = signmask;
529 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
530 *lv |= signmask << (prec - count);
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result.
536 Rotate right if COUNT is negative.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
539 void
540 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
541 HOST_WIDE_INT count, unsigned int prec,
542 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
544 unsigned HOST_WIDE_INT s1l, s2l;
545 HOST_WIDE_INT s1h, s2h;
547 count %= prec;
548 if (count < 0)
549 count += prec;
551 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
552 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 *lv = s1l | s2l;
554 *hv = s1h | s2h;
557 /* Rotate the doubleword integer in L1, H1 left by COUNT places
558 keeping only PREC bits of result. COUNT must be positive.
559 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
561 void
562 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
563 HOST_WIDE_INT count, unsigned int prec,
564 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
566 unsigned HOST_WIDE_INT s1l, s2l;
567 HOST_WIDE_INT s1h, s2h;
569 count %= prec;
570 if (count < 0)
571 count += prec;
573 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
574 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 *lv = s1l | s2l;
576 *hv = s1h | s2h;
579 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
580 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
581 CODE is a tree code for a kind of division, one of
582 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
583 or EXACT_DIV_EXPR
584 It controls how the quotient is rounded to an integer.
585 Return nonzero if the operation overflows.
586 UNS nonzero says do unsigned division. */
589 div_and_round_double (enum tree_code code, int uns,
590 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
591 HOST_WIDE_INT hnum_orig,
592 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
593 HOST_WIDE_INT hden_orig,
594 unsigned HOST_WIDE_INT *lquo,
595 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
596 HOST_WIDE_INT *hrem)
598 int quo_neg = 0;
599 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
600 HOST_WIDE_INT den[4], quo[4];
601 int i, j;
602 unsigned HOST_WIDE_INT work;
603 unsigned HOST_WIDE_INT carry = 0;
604 unsigned HOST_WIDE_INT lnum = lnum_orig;
605 HOST_WIDE_INT hnum = hnum_orig;
606 unsigned HOST_WIDE_INT lden = lden_orig;
607 HOST_WIDE_INT hden = hden_orig;
608 int overflow = 0;
610 if (hden == 0 && lden == 0)
611 overflow = 1, lden = 1;
613 /* Calculate quotient sign and convert operands to unsigned. */
614 if (!uns)
616 if (hnum < 0)
618 quo_neg = ~ quo_neg;
619 /* (minimum integer) / (-1) is the only overflow case. */
620 if (neg_double (lnum, hnum, &lnum, &hnum)
621 && ((HOST_WIDE_INT) lden & hden) == -1)
622 overflow = 1;
624 if (hden < 0)
626 quo_neg = ~ quo_neg;
627 neg_double (lden, hden, &lden, &hden);
631 if (hnum == 0 && hden == 0)
632 { /* single precision */
633 *hquo = *hrem = 0;
634 /* This unsigned division rounds toward zero. */
635 *lquo = lnum / lden;
636 goto finish_up;
639 if (hnum == 0)
640 { /* trivial case: dividend < divisor */
641 /* hden != 0 already checked. */
642 *hquo = *lquo = 0;
643 *hrem = hnum;
644 *lrem = lnum;
645 goto finish_up;
648 memset (quo, 0, sizeof quo);
650 memset (num, 0, sizeof num); /* to zero 9th element */
651 memset (den, 0, sizeof den);
653 encode (num, lnum, hnum);
654 encode (den, lden, hden);
656 /* Special code for when the divisor < BASE. */
657 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
659 /* hnum != 0 already checked. */
660 for (i = 4 - 1; i >= 0; i--)
662 work = num[i] + carry * BASE;
663 quo[i] = work / lden;
664 carry = work % lden;
667 else
669 /* Full double precision division,
670 with thanks to Don Knuth's "Seminumerical Algorithms". */
671 int num_hi_sig, den_hi_sig;
672 unsigned HOST_WIDE_INT quo_est, scale;
674 /* Find the highest nonzero divisor digit. */
675 for (i = 4 - 1;; i--)
676 if (den[i] != 0)
678 den_hi_sig = i;
679 break;
682 /* Insure that the first digit of the divisor is at least BASE/2.
683 This is required by the quotient digit estimation algorithm. */
685 scale = BASE / (den[den_hi_sig] + 1);
686 if (scale > 1)
687 { /* scale divisor and dividend */
688 carry = 0;
689 for (i = 0; i <= 4 - 1; i++)
691 work = (num[i] * scale) + carry;
692 num[i] = LOWPART (work);
693 carry = HIGHPART (work);
696 num[4] = carry;
697 carry = 0;
698 for (i = 0; i <= 4 - 1; i++)
700 work = (den[i] * scale) + carry;
701 den[i] = LOWPART (work);
702 carry = HIGHPART (work);
703 if (den[i] != 0) den_hi_sig = i;
707 num_hi_sig = 4;
709 /* Main loop */
710 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
712 /* Guess the next quotient digit, quo_est, by dividing the first
713 two remaining dividend digits by the high order quotient digit.
714 quo_est is never low and is at most 2 high. */
715 unsigned HOST_WIDE_INT tmp;
717 num_hi_sig = i + den_hi_sig + 1;
718 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
719 if (num[num_hi_sig] != den[den_hi_sig])
720 quo_est = work / den[den_hi_sig];
721 else
722 quo_est = BASE - 1;
724 /* Refine quo_est so it's usually correct, and at most one high. */
725 tmp = work - quo_est * den[den_hi_sig];
726 if (tmp < BASE
727 && (den[den_hi_sig - 1] * quo_est
728 > (tmp * BASE + num[num_hi_sig - 2])))
729 quo_est--;
731 /* Try QUO_EST as the quotient digit, by multiplying the
732 divisor by QUO_EST and subtracting from the remaining dividend.
733 Keep in mind that QUO_EST is the I - 1st digit. */
735 carry = 0;
736 for (j = 0; j <= den_hi_sig; j++)
738 work = quo_est * den[j] + carry;
739 carry = HIGHPART (work);
740 work = num[i + j] - LOWPART (work);
741 num[i + j] = LOWPART (work);
742 carry += HIGHPART (work) != 0;
745 /* If quo_est was high by one, then num[i] went negative and
746 we need to correct things. */
747 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
749 quo_est--;
750 carry = 0; /* add divisor back in */
751 for (j = 0; j <= den_hi_sig; j++)
753 work = num[i + j] + den[j] + carry;
754 carry = HIGHPART (work);
755 num[i + j] = LOWPART (work);
758 num [num_hi_sig] += carry;
761 /* Store the quotient digit. */
762 quo[i] = quo_est;
766 decode (quo, lquo, hquo);
768 finish_up:
769 /* If result is negative, make it so. */
770 if (quo_neg)
771 neg_double (*lquo, *hquo, lquo, hquo);
773 /* Compute trial remainder: rem = num - (quo * den) */
774 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
775 neg_double (*lrem, *hrem, lrem, hrem);
776 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
778 switch (code)
780 case TRUNC_DIV_EXPR:
781 case TRUNC_MOD_EXPR: /* round toward zero */
782 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
783 return overflow;
785 case FLOOR_DIV_EXPR:
786 case FLOOR_MOD_EXPR: /* round toward negative infinity */
787 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
789 /* quo = quo - 1; */
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
791 lquo, hquo);
793 else
794 return overflow;
795 break;
797 case CEIL_DIV_EXPR:
798 case CEIL_MOD_EXPR: /* round toward positive infinity */
799 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
801 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 lquo, hquo);
804 else
805 return overflow;
806 break;
808 case ROUND_DIV_EXPR:
809 case ROUND_MOD_EXPR: /* round to closest integer */
811 unsigned HOST_WIDE_INT labs_rem = *lrem;
812 HOST_WIDE_INT habs_rem = *hrem;
813 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
814 HOST_WIDE_INT habs_den = hden, htwice;
816 /* Get absolute values. */
817 if (*hrem < 0)
818 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
819 if (hden < 0)
820 neg_double (lden, hden, &labs_den, &habs_den);
822 /* If (2 * abs (lrem) >= abs (lden)) */
823 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
824 labs_rem, habs_rem, &ltwice, &htwice);
826 if (((unsigned HOST_WIDE_INT) habs_den
827 < (unsigned HOST_WIDE_INT) htwice)
828 || (((unsigned HOST_WIDE_INT) habs_den
829 == (unsigned HOST_WIDE_INT) htwice)
830 && (labs_den < ltwice)))
832 if (*hquo < 0)
833 /* quo = quo - 1; */
834 add_double (*lquo, *hquo,
835 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
836 else
837 /* quo = quo + 1; */
838 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
839 lquo, hquo);
841 else
842 return overflow;
844 break;
846 default:
847 gcc_unreachable ();
850 /* Compute true remainder: rem = num - (quo * den) */
851 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
852 neg_double (*lrem, *hrem, lrem, hrem);
853 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
854 return overflow;
857 /* If ARG2 divides ARG1 with zero remainder, carries out the division
858 of type CODE and returns the quotient.
859 Otherwise returns NULL_TREE. */
861 static tree
862 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
864 unsigned HOST_WIDE_INT int1l, int2l;
865 HOST_WIDE_INT int1h, int2h;
866 unsigned HOST_WIDE_INT quol, reml;
867 HOST_WIDE_INT quoh, remh;
868 tree type = TREE_TYPE (arg1);
869 int uns = TYPE_UNSIGNED (type);
871 int1l = TREE_INT_CST_LOW (arg1);
872 int1h = TREE_INT_CST_HIGH (arg1);
873 int2l = TREE_INT_CST_LOW (arg2);
874 int2h = TREE_INT_CST_HIGH (arg2);
876 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
877 &quol, &quoh, &reml, &remh);
878 if (remh != 0 || reml != 0)
879 return NULL_TREE;
881 return build_int_cst_wide (type, quol, quoh);
884 /* Return true if the built-in mathematical function specified by CODE
885 is odd, i.e. -f(x) == f(-x). */
887 static bool
888 negate_mathfn_p (enum built_in_function code)
890 switch (code)
892 CASE_FLT_FN (BUILT_IN_ASIN):
893 CASE_FLT_FN (BUILT_IN_ASINH):
894 CASE_FLT_FN (BUILT_IN_ATAN):
895 CASE_FLT_FN (BUILT_IN_ATANH):
896 CASE_FLT_FN (BUILT_IN_CBRT):
897 CASE_FLT_FN (BUILT_IN_ERF):
898 CASE_FLT_FN (BUILT_IN_LLROUND):
899 CASE_FLT_FN (BUILT_IN_LROUND):
900 CASE_FLT_FN (BUILT_IN_ROUND):
901 CASE_FLT_FN (BUILT_IN_SIN):
902 CASE_FLT_FN (BUILT_IN_SINH):
903 CASE_FLT_FN (BUILT_IN_TAN):
904 CASE_FLT_FN (BUILT_IN_TANH):
905 CASE_FLT_FN (BUILT_IN_TRUNC):
906 return true;
908 CASE_FLT_FN (BUILT_IN_LLRINT):
909 CASE_FLT_FN (BUILT_IN_LRINT):
910 CASE_FLT_FN (BUILT_IN_NEARBYINT):
911 CASE_FLT_FN (BUILT_IN_RINT):
912 return !flag_rounding_math;
914 default:
915 break;
917 return false;
920 /* Check whether we may negate an integer constant T without causing
921 overflow. */
923 bool
924 may_negate_without_overflow_p (tree t)
926 unsigned HOST_WIDE_INT val;
927 unsigned int prec;
928 tree type;
930 gcc_assert (TREE_CODE (t) == INTEGER_CST);
932 type = TREE_TYPE (t);
933 if (TYPE_UNSIGNED (type))
934 return false;
936 prec = TYPE_PRECISION (type);
937 if (prec > HOST_BITS_PER_WIDE_INT)
939 if (TREE_INT_CST_LOW (t) != 0)
940 return true;
941 prec -= HOST_BITS_PER_WIDE_INT;
942 val = TREE_INT_CST_HIGH (t);
944 else
945 val = TREE_INT_CST_LOW (t);
946 if (prec < HOST_BITS_PER_WIDE_INT)
947 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
948 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
951 /* Determine whether an expression T can be cheaply negated using
952 the function negate_expr without introducing undefined overflow. */
954 static bool
955 negate_expr_p (tree t)
957 tree type;
959 if (t == 0)
960 return false;
962 type = TREE_TYPE (t);
964 STRIP_SIGN_NOPS (t);
965 switch (TREE_CODE (t))
967 case INTEGER_CST:
968 if (TYPE_UNSIGNED (type)
969 || (flag_wrapv && ! flag_trapv))
970 return true;
972 /* Check that -CST will not overflow type. */
973 return may_negate_without_overflow_p (t);
974 case BIT_NOT_EXPR:
975 return INTEGRAL_TYPE_P (type)
976 && (TYPE_UNSIGNED (type)
977 || (flag_wrapv && !flag_trapv));
979 case REAL_CST:
980 case NEGATE_EXPR:
981 return true;
983 case COMPLEX_CST:
984 return negate_expr_p (TREE_REALPART (t))
985 && negate_expr_p (TREE_IMAGPART (t));
987 case PLUS_EXPR:
988 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
989 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
990 return false;
991 /* -(A + B) -> (-B) - A. */
992 if (negate_expr_p (TREE_OPERAND (t, 1))
993 && reorder_operands_p (TREE_OPERAND (t, 0),
994 TREE_OPERAND (t, 1)))
995 return true;
996 /* -(A + B) -> (-A) - B. */
997 return negate_expr_p (TREE_OPERAND (t, 0));
999 case MINUS_EXPR:
1000 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1001 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1002 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1003 && reorder_operands_p (TREE_OPERAND (t, 0),
1004 TREE_OPERAND (t, 1));
1006 case MULT_EXPR:
1007 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1008 break;
1010 /* Fall through. */
1012 case RDIV_EXPR:
1013 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1014 return negate_expr_p (TREE_OPERAND (t, 1))
1015 || negate_expr_p (TREE_OPERAND (t, 0));
1016 break;
1018 case TRUNC_DIV_EXPR:
1019 case ROUND_DIV_EXPR:
1020 case FLOOR_DIV_EXPR:
1021 case CEIL_DIV_EXPR:
1022 case EXACT_DIV_EXPR:
1023 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1024 break;
1025 return negate_expr_p (TREE_OPERAND (t, 1))
1026 || negate_expr_p (TREE_OPERAND (t, 0));
1028 case NOP_EXPR:
1029 /* Negate -((double)float) as (double)(-float). */
1030 if (TREE_CODE (type) == REAL_TYPE)
1032 tree tem = strip_float_extensions (t);
1033 if (tem != t)
1034 return negate_expr_p (tem);
1036 break;
1038 case CALL_EXPR:
1039 /* Negate -f(x) as f(-x). */
1040 if (negate_mathfn_p (builtin_mathfn_code (t)))
1041 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1042 break;
1044 case RSHIFT_EXPR:
1045 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1046 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1048 tree op1 = TREE_OPERAND (t, 1);
1049 if (TREE_INT_CST_HIGH (op1) == 0
1050 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1051 == TREE_INT_CST_LOW (op1))
1052 return true;
1054 break;
1056 default:
1057 break;
1059 return false;
1062 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1063 simplification is possible.
1064 If negate_expr_p would return true for T, NULL_TREE will never be
1065 returned. */
1067 static tree
1068 fold_negate_expr (tree t)
1070 tree type = TREE_TYPE (t);
1071 tree tem;
1073 switch (TREE_CODE (t))
1075 /* Convert - (~A) to A + 1. */
1076 case BIT_NOT_EXPR:
1077 if (INTEGRAL_TYPE_P (type))
1078 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1079 build_int_cst (type, 1));
1080 break;
1082 case INTEGER_CST:
1083 tem = fold_negate_const (t, type);
1084 if (! TREE_OVERFLOW (tem)
1085 || TYPE_UNSIGNED (type)
1086 || ! flag_trapv)
1087 return tem;
1088 break;
1090 case REAL_CST:
1091 tem = fold_negate_const (t, type);
1092 /* Two's complement FP formats, such as c4x, may overflow. */
1093 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1094 return tem;
1095 break;
1097 case COMPLEX_CST:
1099 tree rpart = negate_expr (TREE_REALPART (t));
1100 tree ipart = negate_expr (TREE_IMAGPART (t));
1102 if ((TREE_CODE (rpart) == REAL_CST
1103 && TREE_CODE (ipart) == REAL_CST)
1104 || (TREE_CODE (rpart) == INTEGER_CST
1105 && TREE_CODE (ipart) == INTEGER_CST))
1106 return build_complex (type, rpart, ipart);
1108 break;
1110 case NEGATE_EXPR:
1111 return TREE_OPERAND (t, 0);
1113 case PLUS_EXPR:
1114 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1115 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1117 /* -(A + B) -> (-B) - A. */
1118 if (negate_expr_p (TREE_OPERAND (t, 1))
1119 && reorder_operands_p (TREE_OPERAND (t, 0),
1120 TREE_OPERAND (t, 1)))
1122 tem = negate_expr (TREE_OPERAND (t, 1));
1123 return fold_build2 (MINUS_EXPR, type,
1124 tem, TREE_OPERAND (t, 0));
1127 /* -(A + B) -> (-A) - B. */
1128 if (negate_expr_p (TREE_OPERAND (t, 0)))
1130 tem = negate_expr (TREE_OPERAND (t, 0));
1131 return fold_build2 (MINUS_EXPR, type,
1132 tem, TREE_OPERAND (t, 1));
1135 break;
1137 case MINUS_EXPR:
1138 /* - (A - B) -> B - A */
1139 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1140 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1141 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1142 return fold_build2 (MINUS_EXPR, type,
1143 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1144 break;
1146 case MULT_EXPR:
1147 if (TYPE_UNSIGNED (type))
1148 break;
1150 /* Fall through. */
1152 case RDIV_EXPR:
1153 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1155 tem = TREE_OPERAND (t, 1);
1156 if (negate_expr_p (tem))
1157 return fold_build2 (TREE_CODE (t), type,
1158 TREE_OPERAND (t, 0), negate_expr (tem));
1159 tem = TREE_OPERAND (t, 0);
1160 if (negate_expr_p (tem))
1161 return fold_build2 (TREE_CODE (t), type,
1162 negate_expr (tem), TREE_OPERAND (t, 1));
1164 break;
1166 case TRUNC_DIV_EXPR:
1167 case ROUND_DIV_EXPR:
1168 case FLOOR_DIV_EXPR:
1169 case CEIL_DIV_EXPR:
1170 case EXACT_DIV_EXPR:
1171 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1173 tem = TREE_OPERAND (t, 1);
1174 if (negate_expr_p (tem))
1175 return fold_build2 (TREE_CODE (t), type,
1176 TREE_OPERAND (t, 0), negate_expr (tem));
1177 tem = TREE_OPERAND (t, 0);
1178 if (negate_expr_p (tem))
1179 return fold_build2 (TREE_CODE (t), type,
1180 negate_expr (tem), TREE_OPERAND (t, 1));
1182 break;
1184 case NOP_EXPR:
1185 /* Convert -((double)float) into (double)(-float). */
1186 if (TREE_CODE (type) == REAL_TYPE)
1188 tem = strip_float_extensions (t);
1189 if (tem != t && negate_expr_p (tem))
1190 return negate_expr (tem);
1192 break;
1194 case CALL_EXPR:
1195 /* Negate -f(x) as f(-x). */
1196 if (negate_mathfn_p (builtin_mathfn_code (t))
1197 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1199 tree fndecl, arg, arglist;
1201 fndecl = get_callee_fndecl (t);
1202 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1203 arglist = build_tree_list (NULL_TREE, arg);
1204 return build_function_call_expr (fndecl, arglist);
1206 break;
1208 case RSHIFT_EXPR:
1209 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1210 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1212 tree op1 = TREE_OPERAND (t, 1);
1213 if (TREE_INT_CST_HIGH (op1) == 0
1214 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1215 == TREE_INT_CST_LOW (op1))
1217 tree ntype = TYPE_UNSIGNED (type)
1218 ? lang_hooks.types.signed_type (type)
1219 : lang_hooks.types.unsigned_type (type);
1220 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1221 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1222 return fold_convert (type, temp);
1225 break;
1227 default:
1228 break;
1231 return NULL_TREE;
1234 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1235 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1236 return NULL_TREE. */
1238 static tree
1239 negate_expr (tree t)
1241 tree type, tem;
1243 if (t == NULL_TREE)
1244 return NULL_TREE;
1246 type = TREE_TYPE (t);
1247 STRIP_SIGN_NOPS (t);
1249 tem = fold_negate_expr (t);
1250 if (!tem)
1251 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1252 return fold_convert (type, tem);
1255 /* Split a tree IN into a constant, literal and variable parts that could be
1256 combined with CODE to make IN. "constant" means an expression with
1257 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1258 commutative arithmetic operation. Store the constant part into *CONP,
1259 the literal in *LITP and return the variable part. If a part isn't
1260 present, set it to null. If the tree does not decompose in this way,
1261 return the entire tree as the variable part and the other parts as null.
1263 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1264 case, we negate an operand that was subtracted. Except if it is a
1265 literal for which we use *MINUS_LITP instead.
1267 If NEGATE_P is true, we are negating all of IN, again except a literal
1268 for which we use *MINUS_LITP instead.
1270 If IN is itself a literal or constant, return it as appropriate.
1272 Note that we do not guarantee that any of the three values will be the
1273 same type as IN, but they will have the same signedness and mode. */
1275 static tree
1276 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1277 tree *minus_litp, int negate_p)
1279 tree var = 0;
1281 *conp = 0;
1282 *litp = 0;
1283 *minus_litp = 0;
1285 /* Strip any conversions that don't change the machine mode or signedness. */
1286 STRIP_SIGN_NOPS (in);
1288 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1289 *litp = in;
1290 else if (TREE_CODE (in) == code
1291 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1292 /* We can associate addition and subtraction together (even
1293 though the C standard doesn't say so) for integers because
1294 the value is not affected. For reals, the value might be
1295 affected, so we can't. */
1296 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1297 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1299 tree op0 = TREE_OPERAND (in, 0);
1300 tree op1 = TREE_OPERAND (in, 1);
1301 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1302 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1304 /* First see if either of the operands is a literal, then a constant. */
1305 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1306 *litp = op0, op0 = 0;
1307 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1308 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1310 if (op0 != 0 && TREE_CONSTANT (op0))
1311 *conp = op0, op0 = 0;
1312 else if (op1 != 0 && TREE_CONSTANT (op1))
1313 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1315 /* If we haven't dealt with either operand, this is not a case we can
1316 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1317 if (op0 != 0 && op1 != 0)
1318 var = in;
1319 else if (op0 != 0)
1320 var = op0;
1321 else
1322 var = op1, neg_var_p = neg1_p;
1324 /* Now do any needed negations. */
1325 if (neg_litp_p)
1326 *minus_litp = *litp, *litp = 0;
1327 if (neg_conp_p)
1328 *conp = negate_expr (*conp);
1329 if (neg_var_p)
1330 var = negate_expr (var);
1332 else if (TREE_CONSTANT (in))
1333 *conp = in;
1334 else
1335 var = in;
1337 if (negate_p)
1339 if (*litp)
1340 *minus_litp = *litp, *litp = 0;
1341 else if (*minus_litp)
1342 *litp = *minus_litp, *minus_litp = 0;
1343 *conp = negate_expr (*conp);
1344 var = negate_expr (var);
1347 return var;
1350 /* Re-associate trees split by the above function. T1 and T2 are either
1351 expressions to associate or null. Return the new expression, if any. If
1352 we build an operation, do it in TYPE and with CODE. */
1354 static tree
1355 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1357 if (t1 == 0)
1358 return t2;
1359 else if (t2 == 0)
1360 return t1;
1362 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1363 try to fold this since we will have infinite recursion. But do
1364 deal with any NEGATE_EXPRs. */
1365 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1366 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1368 if (code == PLUS_EXPR)
1370 if (TREE_CODE (t1) == NEGATE_EXPR)
1371 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1372 fold_convert (type, TREE_OPERAND (t1, 0)));
1373 else if (TREE_CODE (t2) == NEGATE_EXPR)
1374 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1375 fold_convert (type, TREE_OPERAND (t2, 0)));
1376 else if (integer_zerop (t2))
1377 return fold_convert (type, t1);
1379 else if (code == MINUS_EXPR)
1381 if (integer_zerop (t2))
1382 return fold_convert (type, t1);
1385 return build2 (code, type, fold_convert (type, t1),
1386 fold_convert (type, t2));
1389 return fold_build2 (code, type, fold_convert (type, t1),
1390 fold_convert (type, t2));
1393 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1394 for use in int_const_binop, size_binop and size_diffop. */
1396 static bool
1397 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1399 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1400 return false;
1401 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1402 return false;
1404 switch (code)
1406 case LSHIFT_EXPR:
1407 case RSHIFT_EXPR:
1408 case LROTATE_EXPR:
1409 case RROTATE_EXPR:
1410 return true;
1412 default:
1413 break;
1416 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1417 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1418 && TYPE_MODE (type1) == TYPE_MODE (type2);
1422 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1423 to produce a new constant. Return NULL_TREE if we don't know how
1424 to evaluate CODE at compile-time.
1426 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1428 tree
1429 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1431 unsigned HOST_WIDE_INT int1l, int2l;
1432 HOST_WIDE_INT int1h, int2h;
1433 unsigned HOST_WIDE_INT low;
1434 HOST_WIDE_INT hi;
1435 unsigned HOST_WIDE_INT garbagel;
1436 HOST_WIDE_INT garbageh;
1437 tree t;
1438 tree type = TREE_TYPE (arg1);
1439 int uns = TYPE_UNSIGNED (type);
1440 int is_sizetype
1441 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1442 int overflow = 0;
1444 int1l = TREE_INT_CST_LOW (arg1);
1445 int1h = TREE_INT_CST_HIGH (arg1);
1446 int2l = TREE_INT_CST_LOW (arg2);
1447 int2h = TREE_INT_CST_HIGH (arg2);
1449 switch (code)
1451 case BIT_IOR_EXPR:
1452 low = int1l | int2l, hi = int1h | int2h;
1453 break;
1455 case BIT_XOR_EXPR:
1456 low = int1l ^ int2l, hi = int1h ^ int2h;
1457 break;
1459 case BIT_AND_EXPR:
1460 low = int1l & int2l, hi = int1h & int2h;
1461 break;
1463 case RSHIFT_EXPR:
1464 int2l = -int2l;
1465 case LSHIFT_EXPR:
1466 /* It's unclear from the C standard whether shifts can overflow.
1467 The following code ignores overflow; perhaps a C standard
1468 interpretation ruling is needed. */
1469 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1470 &low, &hi, !uns);
1471 break;
1473 case RROTATE_EXPR:
1474 int2l = - int2l;
1475 case LROTATE_EXPR:
1476 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1477 &low, &hi);
1478 break;
1480 case PLUS_EXPR:
1481 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1482 break;
1484 case MINUS_EXPR:
1485 neg_double (int2l, int2h, &low, &hi);
1486 add_double (int1l, int1h, low, hi, &low, &hi);
1487 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1488 break;
1490 case MULT_EXPR:
1491 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1492 break;
1494 case TRUNC_DIV_EXPR:
1495 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1496 case EXACT_DIV_EXPR:
1497 /* This is a shortcut for a common special case. */
1498 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1499 && ! TREE_CONSTANT_OVERFLOW (arg1)
1500 && ! TREE_CONSTANT_OVERFLOW (arg2)
1501 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1503 if (code == CEIL_DIV_EXPR)
1504 int1l += int2l - 1;
1506 low = int1l / int2l, hi = 0;
1507 break;
1510 /* ... fall through ... */
1512 case ROUND_DIV_EXPR:
1513 if (int2h == 0 && int2l == 0)
1514 return NULL_TREE;
1515 if (int2h == 0 && int2l == 1)
1517 low = int1l, hi = int1h;
1518 break;
1520 if (int1l == int2l && int1h == int2h
1521 && ! (int1l == 0 && int1h == 0))
1523 low = 1, hi = 0;
1524 break;
1526 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1527 &low, &hi, &garbagel, &garbageh);
1528 break;
1530 case TRUNC_MOD_EXPR:
1531 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1532 /* This is a shortcut for a common special case. */
1533 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1534 && ! TREE_CONSTANT_OVERFLOW (arg1)
1535 && ! TREE_CONSTANT_OVERFLOW (arg2)
1536 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1538 if (code == CEIL_MOD_EXPR)
1539 int1l += int2l - 1;
1540 low = int1l % int2l, hi = 0;
1541 break;
1544 /* ... fall through ... */
1546 case ROUND_MOD_EXPR:
1547 if (int2h == 0 && int2l == 0)
1548 return NULL_TREE;
1549 overflow = div_and_round_double (code, uns,
1550 int1l, int1h, int2l, int2h,
1551 &garbagel, &garbageh, &low, &hi);
1552 break;
1554 case MIN_EXPR:
1555 case MAX_EXPR:
1556 if (uns)
1557 low = (((unsigned HOST_WIDE_INT) int1h
1558 < (unsigned HOST_WIDE_INT) int2h)
1559 || (((unsigned HOST_WIDE_INT) int1h
1560 == (unsigned HOST_WIDE_INT) int2h)
1561 && int1l < int2l));
1562 else
1563 low = (int1h < int2h
1564 || (int1h == int2h && int1l < int2l));
1566 if (low == (code == MIN_EXPR))
1567 low = int1l, hi = int1h;
1568 else
1569 low = int2l, hi = int2h;
1570 break;
1572 default:
1573 return NULL_TREE;
1576 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1578 if (notrunc)
1580 /* Propagate overflow flags ourselves. */
1581 if (((!uns || is_sizetype) && overflow)
1582 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1584 t = copy_node (t);
1585 TREE_OVERFLOW (t) = 1;
1586 TREE_CONSTANT_OVERFLOW (t) = 1;
1588 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1590 t = copy_node (t);
1591 TREE_CONSTANT_OVERFLOW (t) = 1;
1594 else
1595 t = force_fit_type (t, 1,
1596 ((!uns || is_sizetype) && overflow)
1597 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1598 TREE_CONSTANT_OVERFLOW (arg1)
1599 | TREE_CONSTANT_OVERFLOW (arg2));
1601 return t;
1604 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1605 constant. We assume ARG1 and ARG2 have the same data type, or at least
1606 are the same kind of constant and the same machine mode. Return zero if
1607 combining the constants is not allowed in the current operating mode.
1609 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1611 static tree
1612 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1614 /* Sanity check for the recursive cases. */
1615 if (!arg1 || !arg2)
1616 return NULL_TREE;
1618 STRIP_NOPS (arg1);
1619 STRIP_NOPS (arg2);
1621 if (TREE_CODE (arg1) == INTEGER_CST)
1622 return int_const_binop (code, arg1, arg2, notrunc);
1624 if (TREE_CODE (arg1) == REAL_CST)
1626 enum machine_mode mode;
1627 REAL_VALUE_TYPE d1;
1628 REAL_VALUE_TYPE d2;
1629 REAL_VALUE_TYPE value;
1630 REAL_VALUE_TYPE result;
1631 bool inexact;
1632 tree t, type;
1634 /* The following codes are handled by real_arithmetic. */
1635 switch (code)
1637 case PLUS_EXPR:
1638 case MINUS_EXPR:
1639 case MULT_EXPR:
1640 case RDIV_EXPR:
1641 case MIN_EXPR:
1642 case MAX_EXPR:
1643 break;
1645 default:
1646 return NULL_TREE;
1649 d1 = TREE_REAL_CST (arg1);
1650 d2 = TREE_REAL_CST (arg2);
1652 type = TREE_TYPE (arg1);
1653 mode = TYPE_MODE (type);
1655 /* Don't perform operation if we honor signaling NaNs and
1656 either operand is a NaN. */
1657 if (HONOR_SNANS (mode)
1658 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1659 return NULL_TREE;
1661 /* Don't perform operation if it would raise a division
1662 by zero exception. */
1663 if (code == RDIV_EXPR
1664 && REAL_VALUES_EQUAL (d2, dconst0)
1665 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1666 return NULL_TREE;
1668 /* If either operand is a NaN, just return it. Otherwise, set up
1669 for floating-point trap; we return an overflow. */
1670 if (REAL_VALUE_ISNAN (d1))
1671 return arg1;
1672 else if (REAL_VALUE_ISNAN (d2))
1673 return arg2;
1675 inexact = real_arithmetic (&value, code, &d1, &d2);
1676 real_convert (&result, mode, &value);
1678 /* Don't constant fold this floating point operation if
1679 the result has overflowed and flag_trapping_math. */
1680 if (flag_trapping_math
1681 && MODE_HAS_INFINITIES (mode)
1682 && REAL_VALUE_ISINF (result)
1683 && !REAL_VALUE_ISINF (d1)
1684 && !REAL_VALUE_ISINF (d2))
1685 return NULL_TREE;
1687 /* Don't constant fold this floating point operation if the
1688 result may dependent upon the run-time rounding mode and
1689 flag_rounding_math is set, or if GCC's software emulation
1690 is unable to accurately represent the result. */
1691 if ((flag_rounding_math
1692 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1693 && !flag_unsafe_math_optimizations))
1694 && (inexact || !real_identical (&result, &value)))
1695 return NULL_TREE;
1697 t = build_real (type, result);
1699 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1700 TREE_CONSTANT_OVERFLOW (t)
1701 = TREE_OVERFLOW (t)
1702 | TREE_CONSTANT_OVERFLOW (arg1)
1703 | TREE_CONSTANT_OVERFLOW (arg2);
1704 return t;
1707 if (TREE_CODE (arg1) == COMPLEX_CST)
1709 tree type = TREE_TYPE (arg1);
1710 tree r1 = TREE_REALPART (arg1);
1711 tree i1 = TREE_IMAGPART (arg1);
1712 tree r2 = TREE_REALPART (arg2);
1713 tree i2 = TREE_IMAGPART (arg2);
1714 tree real, imag;
1716 switch (code)
1718 case PLUS_EXPR:
1719 case MINUS_EXPR:
1720 real = const_binop (code, r1, r2, notrunc);
1721 imag = const_binop (code, i1, i2, notrunc);
1722 break;
1724 case MULT_EXPR:
1725 real = const_binop (MINUS_EXPR,
1726 const_binop (MULT_EXPR, r1, r2, notrunc),
1727 const_binop (MULT_EXPR, i1, i2, notrunc),
1728 notrunc);
1729 imag = const_binop (PLUS_EXPR,
1730 const_binop (MULT_EXPR, r1, i2, notrunc),
1731 const_binop (MULT_EXPR, i1, r2, notrunc),
1732 notrunc);
1733 break;
1735 case RDIV_EXPR:
1737 tree magsquared
1738 = const_binop (PLUS_EXPR,
1739 const_binop (MULT_EXPR, r2, r2, notrunc),
1740 const_binop (MULT_EXPR, i2, i2, notrunc),
1741 notrunc);
1742 tree t1
1743 = const_binop (PLUS_EXPR,
1744 const_binop (MULT_EXPR, r1, r2, notrunc),
1745 const_binop (MULT_EXPR, i1, i2, notrunc),
1746 notrunc);
1747 tree t2
1748 = const_binop (MINUS_EXPR,
1749 const_binop (MULT_EXPR, i1, r2, notrunc),
1750 const_binop (MULT_EXPR, r1, i2, notrunc),
1751 notrunc);
1753 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1754 code = TRUNC_DIV_EXPR;
1756 real = const_binop (code, t1, magsquared, notrunc);
1757 imag = const_binop (code, t2, magsquared, notrunc);
1759 break;
1761 default:
1762 return NULL_TREE;
1765 if (real && imag)
1766 return build_complex (type, real, imag);
1769 return NULL_TREE;
1772 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1773 indicates which particular sizetype to create. */
1775 tree
1776 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1778 return build_int_cst (sizetype_tab[(int) kind], number);
1781 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1782 is a tree code. The type of the result is taken from the operands.
1783 Both must be equivalent integer types, ala int_binop_types_match_p.
1784 If the operands are constant, so is the result. */
1786 tree
1787 size_binop (enum tree_code code, tree arg0, tree arg1)
1789 tree type = TREE_TYPE (arg0);
1791 if (arg0 == error_mark_node || arg1 == error_mark_node)
1792 return error_mark_node;
1794 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1795 TREE_TYPE (arg1)));
1797 /* Handle the special case of two integer constants faster. */
1798 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1800 /* And some specific cases even faster than that. */
1801 if (code == PLUS_EXPR && integer_zerop (arg0))
1802 return arg1;
1803 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1804 && integer_zerop (arg1))
1805 return arg0;
1806 else if (code == MULT_EXPR && integer_onep (arg0))
1807 return arg1;
1809 /* Handle general case of two integer constants. */
1810 return int_const_binop (code, arg0, arg1, 0);
1813 return fold_build2 (code, type, arg0, arg1);
1816 /* Given two values, either both of sizetype or both of bitsizetype,
1817 compute the difference between the two values. Return the value
1818 in signed type corresponding to the type of the operands. */
1820 tree
1821 size_diffop (tree arg0, tree arg1)
1823 tree type = TREE_TYPE (arg0);
1824 tree ctype;
1826 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1827 TREE_TYPE (arg1)));
1829 /* If the type is already signed, just do the simple thing. */
1830 if (!TYPE_UNSIGNED (type))
1831 return size_binop (MINUS_EXPR, arg0, arg1);
1833 if (type == sizetype)
1834 ctype = ssizetype;
1835 else if (type == bitsizetype)
1836 ctype = sbitsizetype;
1837 else
1838 ctype = lang_hooks.types.signed_type (type);
1840 /* If either operand is not a constant, do the conversions to the signed
1841 type and subtract. The hardware will do the right thing with any
1842 overflow in the subtraction. */
1843 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1844 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1845 fold_convert (ctype, arg1));
1847 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1848 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1849 overflow) and negate (which can't either). Special-case a result
1850 of zero while we're here. */
1851 if (tree_int_cst_equal (arg0, arg1))
1852 return build_int_cst (ctype, 0);
1853 else if (tree_int_cst_lt (arg1, arg0))
1854 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1855 else
1856 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1857 fold_convert (ctype, size_binop (MINUS_EXPR,
1858 arg1, arg0)));
1861 /* A subroutine of fold_convert_const handling conversions of an
1862 INTEGER_CST to another integer type. */
1864 static tree
1865 fold_convert_const_int_from_int (tree type, tree arg1)
1867 tree t;
1869 /* Given an integer constant, make new constant with new type,
1870 appropriately sign-extended or truncated. */
1871 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1872 TREE_INT_CST_HIGH (arg1));
1874 t = force_fit_type (t,
1875 /* Don't set the overflow when
1876 converting a pointer */
1877 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1878 (TREE_INT_CST_HIGH (arg1) < 0
1879 && (TYPE_UNSIGNED (type)
1880 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1881 | TREE_OVERFLOW (arg1),
1882 TREE_CONSTANT_OVERFLOW (arg1));
1884 return t;
1887 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1888 to an integer type. */
1890 static tree
1891 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1893 int overflow = 0;
1894 tree t;
1896 /* The following code implements the floating point to integer
1897 conversion rules required by the Java Language Specification,
1898 that IEEE NaNs are mapped to zero and values that overflow
1899 the target precision saturate, i.e. values greater than
1900 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1901 are mapped to INT_MIN. These semantics are allowed by the
1902 C and C++ standards that simply state that the behavior of
1903 FP-to-integer conversion is unspecified upon overflow. */
1905 HOST_WIDE_INT high, low;
1906 REAL_VALUE_TYPE r;
1907 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1909 switch (code)
1911 case FIX_TRUNC_EXPR:
1912 real_trunc (&r, VOIDmode, &x);
1913 break;
1915 default:
1916 gcc_unreachable ();
1919 /* If R is NaN, return zero and show we have an overflow. */
1920 if (REAL_VALUE_ISNAN (r))
1922 overflow = 1;
1923 high = 0;
1924 low = 0;
1927 /* See if R is less than the lower bound or greater than the
1928 upper bound. */
1930 if (! overflow)
1932 tree lt = TYPE_MIN_VALUE (type);
1933 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1934 if (REAL_VALUES_LESS (r, l))
1936 overflow = 1;
1937 high = TREE_INT_CST_HIGH (lt);
1938 low = TREE_INT_CST_LOW (lt);
1942 if (! overflow)
1944 tree ut = TYPE_MAX_VALUE (type);
1945 if (ut)
1947 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1948 if (REAL_VALUES_LESS (u, r))
1950 overflow = 1;
1951 high = TREE_INT_CST_HIGH (ut);
1952 low = TREE_INT_CST_LOW (ut);
1957 if (! overflow)
1958 REAL_VALUE_TO_INT (&low, &high, r);
1960 t = build_int_cst_wide (type, low, high);
1962 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1963 TREE_CONSTANT_OVERFLOW (arg1));
1964 return t;
1967 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1968 to another floating point type. */
1970 static tree
1971 fold_convert_const_real_from_real (tree type, tree arg1)
1973 REAL_VALUE_TYPE value;
1974 tree t;
1976 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1977 t = build_real (type, value);
1979 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1980 TREE_CONSTANT_OVERFLOW (t)
1981 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1982 return t;
1985 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1986 type TYPE. If no simplification can be done return NULL_TREE. */
1988 static tree
1989 fold_convert_const (enum tree_code code, tree type, tree arg1)
1991 if (TREE_TYPE (arg1) == type)
1992 return arg1;
1994 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1996 if (TREE_CODE (arg1) == INTEGER_CST)
1997 return fold_convert_const_int_from_int (type, arg1);
1998 else if (TREE_CODE (arg1) == REAL_CST)
1999 return fold_convert_const_int_from_real (code, type, arg1);
2001 else if (TREE_CODE (type) == REAL_TYPE)
2003 if (TREE_CODE (arg1) == INTEGER_CST)
2004 return build_real_from_int_cst (type, arg1);
2005 if (TREE_CODE (arg1) == REAL_CST)
2006 return fold_convert_const_real_from_real (type, arg1);
2008 return NULL_TREE;
2011 /* Construct a vector of zero elements of vector type TYPE. */
2013 static tree
2014 build_zero_vector (tree type)
2016 tree elem, list;
2017 int i, units;
2019 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2020 units = TYPE_VECTOR_SUBPARTS (type);
2022 list = NULL_TREE;
2023 for (i = 0; i < units; i++)
2024 list = tree_cons (NULL_TREE, elem, list);
2025 return build_vector (type, list);
2028 /* Convert expression ARG to type TYPE. Used by the middle-end for
2029 simple conversions in preference to calling the front-end's convert. */
2031 tree
2032 fold_convert (tree type, tree arg)
2034 tree orig = TREE_TYPE (arg);
2035 tree tem;
2037 if (type == orig)
2038 return arg;
2040 if (TREE_CODE (arg) == ERROR_MARK
2041 || TREE_CODE (type) == ERROR_MARK
2042 || TREE_CODE (orig) == ERROR_MARK)
2043 return error_mark_node;
2045 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2046 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2047 TYPE_MAIN_VARIANT (orig)))
2048 return fold_build1 (NOP_EXPR, type, arg);
2050 switch (TREE_CODE (type))
2052 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2053 case POINTER_TYPE: case REFERENCE_TYPE:
2054 case OFFSET_TYPE:
2055 if (TREE_CODE (arg) == INTEGER_CST)
2057 tem = fold_convert_const (NOP_EXPR, type, arg);
2058 if (tem != NULL_TREE)
2059 return tem;
2061 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2062 || TREE_CODE (orig) == OFFSET_TYPE)
2063 return fold_build1 (NOP_EXPR, type, arg);
2064 if (TREE_CODE (orig) == COMPLEX_TYPE)
2066 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2067 return fold_convert (type, tem);
2069 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2070 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2071 return fold_build1 (NOP_EXPR, type, arg);
2073 case REAL_TYPE:
2074 if (TREE_CODE (arg) == INTEGER_CST)
2076 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2077 if (tem != NULL_TREE)
2078 return tem;
2080 else if (TREE_CODE (arg) == REAL_CST)
2082 tem = fold_convert_const (NOP_EXPR, type, arg);
2083 if (tem != NULL_TREE)
2084 return tem;
2087 switch (TREE_CODE (orig))
2089 case INTEGER_TYPE:
2090 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2091 case POINTER_TYPE: case REFERENCE_TYPE:
2092 return fold_build1 (FLOAT_EXPR, type, arg);
2094 case REAL_TYPE:
2095 return fold_build1 (NOP_EXPR, type, arg);
2097 case COMPLEX_TYPE:
2098 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2099 return fold_convert (type, tem);
2101 default:
2102 gcc_unreachable ();
2105 case COMPLEX_TYPE:
2106 switch (TREE_CODE (orig))
2108 case INTEGER_TYPE:
2109 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2110 case POINTER_TYPE: case REFERENCE_TYPE:
2111 case REAL_TYPE:
2112 return build2 (COMPLEX_EXPR, type,
2113 fold_convert (TREE_TYPE (type), arg),
2114 fold_convert (TREE_TYPE (type), integer_zero_node));
2115 case COMPLEX_TYPE:
2117 tree rpart, ipart;
2119 if (TREE_CODE (arg) == COMPLEX_EXPR)
2121 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2122 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2123 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2126 arg = save_expr (arg);
2127 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2128 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2129 rpart = fold_convert (TREE_TYPE (type), rpart);
2130 ipart = fold_convert (TREE_TYPE (type), ipart);
2131 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2134 default:
2135 gcc_unreachable ();
2138 case VECTOR_TYPE:
2139 if (integer_zerop (arg))
2140 return build_zero_vector (type);
2141 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2142 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2143 || TREE_CODE (orig) == VECTOR_TYPE);
2144 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2146 case VOID_TYPE:
2147 tem = fold_ignored_result (arg);
2148 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2149 return tem;
2150 return fold_build1 (NOP_EXPR, type, tem);
2152 default:
2153 gcc_unreachable ();
2157 /* Return false if expr can be assumed not to be an lvalue, true
2158 otherwise. */
2160 static bool
2161 maybe_lvalue_p (tree x)
2163 /* We only need to wrap lvalue tree codes. */
2164 switch (TREE_CODE (x))
2166 case VAR_DECL:
2167 case PARM_DECL:
2168 case RESULT_DECL:
2169 case LABEL_DECL:
2170 case FUNCTION_DECL:
2171 case SSA_NAME:
2173 case COMPONENT_REF:
2174 case INDIRECT_REF:
2175 case ALIGN_INDIRECT_REF:
2176 case MISALIGNED_INDIRECT_REF:
2177 case ARRAY_REF:
2178 case ARRAY_RANGE_REF:
2179 case BIT_FIELD_REF:
2180 case OBJ_TYPE_REF:
2182 case REALPART_EXPR:
2183 case IMAGPART_EXPR:
2184 case PREINCREMENT_EXPR:
2185 case PREDECREMENT_EXPR:
2186 case SAVE_EXPR:
2187 case TRY_CATCH_EXPR:
2188 case WITH_CLEANUP_EXPR:
2189 case COMPOUND_EXPR:
2190 case MODIFY_EXPR:
2191 case GIMPLE_MODIFY_STMT:
2192 case TARGET_EXPR:
2193 case COND_EXPR:
2194 case BIND_EXPR:
2195 case MIN_EXPR:
2196 case MAX_EXPR:
2197 break;
2199 default:
2200 /* Assume the worst for front-end tree codes. */
2201 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2202 break;
2203 return false;
2206 return true;
2209 /* Return an expr equal to X but certainly not valid as an lvalue. */
2211 tree
2212 non_lvalue (tree x)
2214 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2215 us. */
2216 if (in_gimple_form)
2217 return x;
2219 if (! maybe_lvalue_p (x))
2220 return x;
2221 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2224 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2225 Zero means allow extended lvalues. */
2227 int pedantic_lvalues;
2229 /* When pedantic, return an expr equal to X but certainly not valid as a
2230 pedantic lvalue. Otherwise, return X. */
2232 static tree
2233 pedantic_non_lvalue (tree x)
2235 if (pedantic_lvalues)
2236 return non_lvalue (x);
2237 else
2238 return x;
2241 /* Given a tree comparison code, return the code that is the logical inverse
2242 of the given code. It is not safe to do this for floating-point
2243 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2244 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2246 enum tree_code
2247 invert_tree_comparison (enum tree_code code, bool honor_nans)
2249 if (honor_nans && flag_trapping_math)
2250 return ERROR_MARK;
2252 switch (code)
2254 case EQ_EXPR:
2255 return NE_EXPR;
2256 case NE_EXPR:
2257 return EQ_EXPR;
2258 case GT_EXPR:
2259 return honor_nans ? UNLE_EXPR : LE_EXPR;
2260 case GE_EXPR:
2261 return honor_nans ? UNLT_EXPR : LT_EXPR;
2262 case LT_EXPR:
2263 return honor_nans ? UNGE_EXPR : GE_EXPR;
2264 case LE_EXPR:
2265 return honor_nans ? UNGT_EXPR : GT_EXPR;
2266 case LTGT_EXPR:
2267 return UNEQ_EXPR;
2268 case UNEQ_EXPR:
2269 return LTGT_EXPR;
2270 case UNGT_EXPR:
2271 return LE_EXPR;
2272 case UNGE_EXPR:
2273 return LT_EXPR;
2274 case UNLT_EXPR:
2275 return GE_EXPR;
2276 case UNLE_EXPR:
2277 return GT_EXPR;
2278 case ORDERED_EXPR:
2279 return UNORDERED_EXPR;
2280 case UNORDERED_EXPR:
2281 return ORDERED_EXPR;
2282 default:
2283 gcc_unreachable ();
2287 /* Similar, but return the comparison that results if the operands are
2288 swapped. This is safe for floating-point. */
2290 enum tree_code
2291 swap_tree_comparison (enum tree_code code)
2293 switch (code)
2295 case EQ_EXPR:
2296 case NE_EXPR:
2297 case ORDERED_EXPR:
2298 case UNORDERED_EXPR:
2299 case LTGT_EXPR:
2300 case UNEQ_EXPR:
2301 return code;
2302 case GT_EXPR:
2303 return LT_EXPR;
2304 case GE_EXPR:
2305 return LE_EXPR;
2306 case LT_EXPR:
2307 return GT_EXPR;
2308 case LE_EXPR:
2309 return GE_EXPR;
2310 case UNGT_EXPR:
2311 return UNLT_EXPR;
2312 case UNGE_EXPR:
2313 return UNLE_EXPR;
2314 case UNLT_EXPR:
2315 return UNGT_EXPR;
2316 case UNLE_EXPR:
2317 return UNGE_EXPR;
2318 default:
2319 gcc_unreachable ();
2324 /* Convert a comparison tree code from an enum tree_code representation
2325 into a compcode bit-based encoding. This function is the inverse of
2326 compcode_to_comparison. */
2328 static enum comparison_code
2329 comparison_to_compcode (enum tree_code code)
2331 switch (code)
2333 case LT_EXPR:
2334 return COMPCODE_LT;
2335 case EQ_EXPR:
2336 return COMPCODE_EQ;
2337 case LE_EXPR:
2338 return COMPCODE_LE;
2339 case GT_EXPR:
2340 return COMPCODE_GT;
2341 case NE_EXPR:
2342 return COMPCODE_NE;
2343 case GE_EXPR:
2344 return COMPCODE_GE;
2345 case ORDERED_EXPR:
2346 return COMPCODE_ORD;
2347 case UNORDERED_EXPR:
2348 return COMPCODE_UNORD;
2349 case UNLT_EXPR:
2350 return COMPCODE_UNLT;
2351 case UNEQ_EXPR:
2352 return COMPCODE_UNEQ;
2353 case UNLE_EXPR:
2354 return COMPCODE_UNLE;
2355 case UNGT_EXPR:
2356 return COMPCODE_UNGT;
2357 case LTGT_EXPR:
2358 return COMPCODE_LTGT;
2359 case UNGE_EXPR:
2360 return COMPCODE_UNGE;
2361 default:
2362 gcc_unreachable ();
2366 /* Convert a compcode bit-based encoding of a comparison operator back
2367 to GCC's enum tree_code representation. This function is the
2368 inverse of comparison_to_compcode. */
2370 static enum tree_code
2371 compcode_to_comparison (enum comparison_code code)
2373 switch (code)
2375 case COMPCODE_LT:
2376 return LT_EXPR;
2377 case COMPCODE_EQ:
2378 return EQ_EXPR;
2379 case COMPCODE_LE:
2380 return LE_EXPR;
2381 case COMPCODE_GT:
2382 return GT_EXPR;
2383 case COMPCODE_NE:
2384 return NE_EXPR;
2385 case COMPCODE_GE:
2386 return GE_EXPR;
2387 case COMPCODE_ORD:
2388 return ORDERED_EXPR;
2389 case COMPCODE_UNORD:
2390 return UNORDERED_EXPR;
2391 case COMPCODE_UNLT:
2392 return UNLT_EXPR;
2393 case COMPCODE_UNEQ:
2394 return UNEQ_EXPR;
2395 case COMPCODE_UNLE:
2396 return UNLE_EXPR;
2397 case COMPCODE_UNGT:
2398 return UNGT_EXPR;
2399 case COMPCODE_LTGT:
2400 return LTGT_EXPR;
2401 case COMPCODE_UNGE:
2402 return UNGE_EXPR;
2403 default:
2404 gcc_unreachable ();
2408 /* Return a tree for the comparison which is the combination of
2409 doing the AND or OR (depending on CODE) of the two operations LCODE
2410 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2411 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2412 if this makes the transformation invalid. */
2414 tree
2415 combine_comparisons (enum tree_code code, enum tree_code lcode,
2416 enum tree_code rcode, tree truth_type,
2417 tree ll_arg, tree lr_arg)
2419 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2420 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2421 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2422 enum comparison_code compcode;
2424 switch (code)
2426 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2427 compcode = lcompcode & rcompcode;
2428 break;
2430 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2431 compcode = lcompcode | rcompcode;
2432 break;
2434 default:
2435 return NULL_TREE;
2438 if (!honor_nans)
2440 /* Eliminate unordered comparisons, as well as LTGT and ORD
2441 which are not used unless the mode has NaNs. */
2442 compcode &= ~COMPCODE_UNORD;
2443 if (compcode == COMPCODE_LTGT)
2444 compcode = COMPCODE_NE;
2445 else if (compcode == COMPCODE_ORD)
2446 compcode = COMPCODE_TRUE;
2448 else if (flag_trapping_math)
2450 /* Check that the original operation and the optimized ones will trap
2451 under the same condition. */
2452 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2453 && (lcompcode != COMPCODE_EQ)
2454 && (lcompcode != COMPCODE_ORD);
2455 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2456 && (rcompcode != COMPCODE_EQ)
2457 && (rcompcode != COMPCODE_ORD);
2458 bool trap = (compcode & COMPCODE_UNORD) == 0
2459 && (compcode != COMPCODE_EQ)
2460 && (compcode != COMPCODE_ORD);
2462 /* In a short-circuited boolean expression the LHS might be
2463 such that the RHS, if evaluated, will never trap. For
2464 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2465 if neither x nor y is NaN. (This is a mixed blessing: for
2466 example, the expression above will never trap, hence
2467 optimizing it to x < y would be invalid). */
2468 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2469 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2470 rtrap = false;
2472 /* If the comparison was short-circuited, and only the RHS
2473 trapped, we may now generate a spurious trap. */
2474 if (rtrap && !ltrap
2475 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2476 return NULL_TREE;
2478 /* If we changed the conditions that cause a trap, we lose. */
2479 if ((ltrap || rtrap) != trap)
2480 return NULL_TREE;
2483 if (compcode == COMPCODE_TRUE)
2484 return constant_boolean_node (true, truth_type);
2485 else if (compcode == COMPCODE_FALSE)
2486 return constant_boolean_node (false, truth_type);
2487 else
2488 return fold_build2 (compcode_to_comparison (compcode),
2489 truth_type, ll_arg, lr_arg);
2492 /* Return nonzero if CODE is a tree code that represents a truth value. */
2494 static int
2495 truth_value_p (enum tree_code code)
2497 return (TREE_CODE_CLASS (code) == tcc_comparison
2498 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2499 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2500 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2503 /* Return nonzero if two operands (typically of the same tree node)
2504 are necessarily equal. If either argument has side-effects this
2505 function returns zero. FLAGS modifies behavior as follows:
2507 If OEP_ONLY_CONST is set, only return nonzero for constants.
2508 This function tests whether the operands are indistinguishable;
2509 it does not test whether they are equal using C's == operation.
2510 The distinction is important for IEEE floating point, because
2511 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2512 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2514 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2515 even though it may hold multiple values during a function.
2516 This is because a GCC tree node guarantees that nothing else is
2517 executed between the evaluation of its "operands" (which may often
2518 be evaluated in arbitrary order). Hence if the operands themselves
2519 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2520 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2521 unset means assuming isochronic (or instantaneous) tree equivalence.
2522 Unless comparing arbitrary expression trees, such as from different
2523 statements, this flag can usually be left unset.
2525 If OEP_PURE_SAME is set, then pure functions with identical arguments
2526 are considered the same. It is used when the caller has other ways
2527 to ensure that global memory is unchanged in between. */
2530 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2532 /* If either is ERROR_MARK, they aren't equal. */
2533 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2534 return 0;
2536 /* If both types don't have the same signedness, then we can't consider
2537 them equal. We must check this before the STRIP_NOPS calls
2538 because they may change the signedness of the arguments. */
2539 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2540 return 0;
2542 /* If both types don't have the same precision, then it is not safe
2543 to strip NOPs. */
2544 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2545 return 0;
2547 STRIP_NOPS (arg0);
2548 STRIP_NOPS (arg1);
2550 /* In case both args are comparisons but with different comparison
2551 code, try to swap the comparison operands of one arg to produce
2552 a match and compare that variant. */
2553 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2554 && COMPARISON_CLASS_P (arg0)
2555 && COMPARISON_CLASS_P (arg1))
2557 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2559 if (TREE_CODE (arg0) == swap_code)
2560 return operand_equal_p (TREE_OPERAND (arg0, 0),
2561 TREE_OPERAND (arg1, 1), flags)
2562 && operand_equal_p (TREE_OPERAND (arg0, 1),
2563 TREE_OPERAND (arg1, 0), flags);
2566 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2567 /* This is needed for conversions and for COMPONENT_REF.
2568 Might as well play it safe and always test this. */
2569 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2570 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2571 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2572 return 0;
2574 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2575 We don't care about side effects in that case because the SAVE_EXPR
2576 takes care of that for us. In all other cases, two expressions are
2577 equal if they have no side effects. If we have two identical
2578 expressions with side effects that should be treated the same due
2579 to the only side effects being identical SAVE_EXPR's, that will
2580 be detected in the recursive calls below. */
2581 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2582 && (TREE_CODE (arg0) == SAVE_EXPR
2583 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2584 return 1;
2586 /* Next handle constant cases, those for which we can return 1 even
2587 if ONLY_CONST is set. */
2588 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2589 switch (TREE_CODE (arg0))
2591 case INTEGER_CST:
2592 return tree_int_cst_equal (arg0, arg1);
2594 case REAL_CST:
2595 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2596 TREE_REAL_CST (arg1)))
2597 return 1;
2600 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2602 /* If we do not distinguish between signed and unsigned zero,
2603 consider them equal. */
2604 if (real_zerop (arg0) && real_zerop (arg1))
2605 return 1;
2607 return 0;
2609 case VECTOR_CST:
2611 tree v1, v2;
2613 v1 = TREE_VECTOR_CST_ELTS (arg0);
2614 v2 = TREE_VECTOR_CST_ELTS (arg1);
2615 while (v1 && v2)
2617 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2618 flags))
2619 return 0;
2620 v1 = TREE_CHAIN (v1);
2621 v2 = TREE_CHAIN (v2);
2624 return v1 == v2;
2627 case COMPLEX_CST:
2628 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2629 flags)
2630 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2631 flags));
2633 case STRING_CST:
2634 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2635 && ! memcmp (TREE_STRING_POINTER (arg0),
2636 TREE_STRING_POINTER (arg1),
2637 TREE_STRING_LENGTH (arg0)));
2639 case ADDR_EXPR:
2640 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2642 default:
2643 break;
2646 if (flags & OEP_ONLY_CONST)
2647 return 0;
2649 /* Define macros to test an operand from arg0 and arg1 for equality and a
2650 variant that allows null and views null as being different from any
2651 non-null value. In the latter case, if either is null, the both
2652 must be; otherwise, do the normal comparison. */
2653 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2654 TREE_OPERAND (arg1, N), flags)
2656 #define OP_SAME_WITH_NULL(N) \
2657 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2658 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2660 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2662 case tcc_unary:
2663 /* Two conversions are equal only if signedness and modes match. */
2664 switch (TREE_CODE (arg0))
2666 case NOP_EXPR:
2667 case CONVERT_EXPR:
2668 case FIX_TRUNC_EXPR:
2669 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2670 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2671 return 0;
2672 break;
2673 default:
2674 break;
2677 return OP_SAME (0);
2680 case tcc_comparison:
2681 case tcc_binary:
2682 if (OP_SAME (0) && OP_SAME (1))
2683 return 1;
2685 /* For commutative ops, allow the other order. */
2686 return (commutative_tree_code (TREE_CODE (arg0))
2687 && operand_equal_p (TREE_OPERAND (arg0, 0),
2688 TREE_OPERAND (arg1, 1), flags)
2689 && operand_equal_p (TREE_OPERAND (arg0, 1),
2690 TREE_OPERAND (arg1, 0), flags));
2692 case tcc_reference:
2693 /* If either of the pointer (or reference) expressions we are
2694 dereferencing contain a side effect, these cannot be equal. */
2695 if (TREE_SIDE_EFFECTS (arg0)
2696 || TREE_SIDE_EFFECTS (arg1))
2697 return 0;
2699 switch (TREE_CODE (arg0))
2701 case INDIRECT_REF:
2702 case ALIGN_INDIRECT_REF:
2703 case MISALIGNED_INDIRECT_REF:
2704 case REALPART_EXPR:
2705 case IMAGPART_EXPR:
2706 return OP_SAME (0);
2708 case ARRAY_REF:
2709 case ARRAY_RANGE_REF:
2710 /* Operands 2 and 3 may be null. */
2711 return (OP_SAME (0)
2712 && OP_SAME (1)
2713 && OP_SAME_WITH_NULL (2)
2714 && OP_SAME_WITH_NULL (3));
2716 case COMPONENT_REF:
2717 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2718 may be NULL when we're called to compare MEM_EXPRs. */
2719 return OP_SAME_WITH_NULL (0)
2720 && OP_SAME (1)
2721 && OP_SAME_WITH_NULL (2);
2723 case BIT_FIELD_REF:
2724 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2726 default:
2727 return 0;
2730 case tcc_expression:
2731 switch (TREE_CODE (arg0))
2733 case ADDR_EXPR:
2734 case TRUTH_NOT_EXPR:
2735 return OP_SAME (0);
2737 case TRUTH_ANDIF_EXPR:
2738 case TRUTH_ORIF_EXPR:
2739 return OP_SAME (0) && OP_SAME (1);
2741 case TRUTH_AND_EXPR:
2742 case TRUTH_OR_EXPR:
2743 case TRUTH_XOR_EXPR:
2744 if (OP_SAME (0) && OP_SAME (1))
2745 return 1;
2747 /* Otherwise take into account this is a commutative operation. */
2748 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2749 TREE_OPERAND (arg1, 1), flags)
2750 && operand_equal_p (TREE_OPERAND (arg0, 1),
2751 TREE_OPERAND (arg1, 0), flags));
2753 case CALL_EXPR:
2754 /* If the CALL_EXPRs call different functions, then they
2755 clearly can not be equal. */
2756 if (!OP_SAME (0))
2757 return 0;
2760 unsigned int cef = call_expr_flags (arg0);
2761 if (flags & OEP_PURE_SAME)
2762 cef &= ECF_CONST | ECF_PURE;
2763 else
2764 cef &= ECF_CONST;
2765 if (!cef)
2766 return 0;
2769 /* Now see if all the arguments are the same. operand_equal_p
2770 does not handle TREE_LIST, so we walk the operands here
2771 feeding them to operand_equal_p. */
2772 arg0 = TREE_OPERAND (arg0, 1);
2773 arg1 = TREE_OPERAND (arg1, 1);
2774 while (arg0 && arg1)
2776 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2777 flags))
2778 return 0;
2780 arg0 = TREE_CHAIN (arg0);
2781 arg1 = TREE_CHAIN (arg1);
2784 /* If we get here and both argument lists are exhausted
2785 then the CALL_EXPRs are equal. */
2786 return ! (arg0 || arg1);
2788 default:
2789 return 0;
2792 case tcc_declaration:
2793 /* Consider __builtin_sqrt equal to sqrt. */
2794 return (TREE_CODE (arg0) == FUNCTION_DECL
2795 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2796 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2797 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2799 default:
2800 return 0;
2803 #undef OP_SAME
2804 #undef OP_SAME_WITH_NULL
2807 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2808 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2810 When in doubt, return 0. */
2812 static int
2813 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2815 int unsignedp1, unsignedpo;
2816 tree primarg0, primarg1, primother;
2817 unsigned int correct_width;
2819 if (operand_equal_p (arg0, arg1, 0))
2820 return 1;
2822 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2823 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2824 return 0;
2826 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2827 and see if the inner values are the same. This removes any
2828 signedness comparison, which doesn't matter here. */
2829 primarg0 = arg0, primarg1 = arg1;
2830 STRIP_NOPS (primarg0);
2831 STRIP_NOPS (primarg1);
2832 if (operand_equal_p (primarg0, primarg1, 0))
2833 return 1;
2835 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2836 actual comparison operand, ARG0.
2838 First throw away any conversions to wider types
2839 already present in the operands. */
2841 primarg1 = get_narrower (arg1, &unsignedp1);
2842 primother = get_narrower (other, &unsignedpo);
2844 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2845 if (unsignedp1 == unsignedpo
2846 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2847 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2849 tree type = TREE_TYPE (arg0);
2851 /* Make sure shorter operand is extended the right way
2852 to match the longer operand. */
2853 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2854 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2856 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2857 return 1;
2860 return 0;
2863 /* See if ARG is an expression that is either a comparison or is performing
2864 arithmetic on comparisons. The comparisons must only be comparing
2865 two different values, which will be stored in *CVAL1 and *CVAL2; if
2866 they are nonzero it means that some operands have already been found.
2867 No variables may be used anywhere else in the expression except in the
2868 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2869 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2871 If this is true, return 1. Otherwise, return zero. */
2873 static int
2874 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2876 enum tree_code code = TREE_CODE (arg);
2877 enum tree_code_class class = TREE_CODE_CLASS (code);
2879 /* We can handle some of the tcc_expression cases here. */
2880 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2881 class = tcc_unary;
2882 else if (class == tcc_expression
2883 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2884 || code == COMPOUND_EXPR))
2885 class = tcc_binary;
2887 else if (class == tcc_expression && code == SAVE_EXPR
2888 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2890 /* If we've already found a CVAL1 or CVAL2, this expression is
2891 two complex to handle. */
2892 if (*cval1 || *cval2)
2893 return 0;
2895 class = tcc_unary;
2896 *save_p = 1;
2899 switch (class)
2901 case tcc_unary:
2902 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2904 case tcc_binary:
2905 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2906 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2907 cval1, cval2, save_p));
2909 case tcc_constant:
2910 return 1;
2912 case tcc_expression:
2913 if (code == COND_EXPR)
2914 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2915 cval1, cval2, save_p)
2916 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2917 cval1, cval2, save_p)
2918 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2919 cval1, cval2, save_p));
2920 return 0;
2922 case tcc_comparison:
2923 /* First see if we can handle the first operand, then the second. For
2924 the second operand, we know *CVAL1 can't be zero. It must be that
2925 one side of the comparison is each of the values; test for the
2926 case where this isn't true by failing if the two operands
2927 are the same. */
2929 if (operand_equal_p (TREE_OPERAND (arg, 0),
2930 TREE_OPERAND (arg, 1), 0))
2931 return 0;
2933 if (*cval1 == 0)
2934 *cval1 = TREE_OPERAND (arg, 0);
2935 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2937 else if (*cval2 == 0)
2938 *cval2 = TREE_OPERAND (arg, 0);
2939 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2941 else
2942 return 0;
2944 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2946 else if (*cval2 == 0)
2947 *cval2 = TREE_OPERAND (arg, 1);
2948 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2950 else
2951 return 0;
2953 return 1;
2955 default:
2956 return 0;
2960 /* ARG is a tree that is known to contain just arithmetic operations and
2961 comparisons. Evaluate the operations in the tree substituting NEW0 for
2962 any occurrence of OLD0 as an operand of a comparison and likewise for
2963 NEW1 and OLD1. */
2965 static tree
2966 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2968 tree type = TREE_TYPE (arg);
2969 enum tree_code code = TREE_CODE (arg);
2970 enum tree_code_class class = TREE_CODE_CLASS (code);
2972 /* We can handle some of the tcc_expression cases here. */
2973 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2974 class = tcc_unary;
2975 else if (class == tcc_expression
2976 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2977 class = tcc_binary;
2979 switch (class)
2981 case tcc_unary:
2982 return fold_build1 (code, type,
2983 eval_subst (TREE_OPERAND (arg, 0),
2984 old0, new0, old1, new1));
2986 case tcc_binary:
2987 return fold_build2 (code, type,
2988 eval_subst (TREE_OPERAND (arg, 0),
2989 old0, new0, old1, new1),
2990 eval_subst (TREE_OPERAND (arg, 1),
2991 old0, new0, old1, new1));
2993 case tcc_expression:
2994 switch (code)
2996 case SAVE_EXPR:
2997 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2999 case COMPOUND_EXPR:
3000 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3002 case COND_EXPR:
3003 return fold_build3 (code, type,
3004 eval_subst (TREE_OPERAND (arg, 0),
3005 old0, new0, old1, new1),
3006 eval_subst (TREE_OPERAND (arg, 1),
3007 old0, new0, old1, new1),
3008 eval_subst (TREE_OPERAND (arg, 2),
3009 old0, new0, old1, new1));
3010 default:
3011 break;
3013 /* Fall through - ??? */
3015 case tcc_comparison:
3017 tree arg0 = TREE_OPERAND (arg, 0);
3018 tree arg1 = TREE_OPERAND (arg, 1);
3020 /* We need to check both for exact equality and tree equality. The
3021 former will be true if the operand has a side-effect. In that
3022 case, we know the operand occurred exactly once. */
3024 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3025 arg0 = new0;
3026 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3027 arg0 = new1;
3029 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3030 arg1 = new0;
3031 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3032 arg1 = new1;
3034 return fold_build2 (code, type, arg0, arg1);
3037 default:
3038 return arg;
3042 /* Return a tree for the case when the result of an expression is RESULT
3043 converted to TYPE and OMITTED was previously an operand of the expression
3044 but is now not needed (e.g., we folded OMITTED * 0).
3046 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3047 the conversion of RESULT to TYPE. */
3049 tree
3050 omit_one_operand (tree type, tree result, tree omitted)
3052 tree t = fold_convert (type, result);
3054 if (TREE_SIDE_EFFECTS (omitted))
3055 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3057 return non_lvalue (t);
3060 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3062 static tree
3063 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3065 tree t = fold_convert (type, result);
3067 if (TREE_SIDE_EFFECTS (omitted))
3068 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3070 return pedantic_non_lvalue (t);
3073 /* Return a tree for the case when the result of an expression is RESULT
3074 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3075 of the expression but are now not needed.
3077 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3078 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3079 evaluated before OMITTED2. Otherwise, if neither has side effects,
3080 just do the conversion of RESULT to TYPE. */
3082 tree
3083 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3085 tree t = fold_convert (type, result);
3087 if (TREE_SIDE_EFFECTS (omitted2))
3088 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3089 if (TREE_SIDE_EFFECTS (omitted1))
3090 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3092 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3096 /* Return a simplified tree node for the truth-negation of ARG. This
3097 never alters ARG itself. We assume that ARG is an operation that
3098 returns a truth value (0 or 1).
3100 FIXME: one would think we would fold the result, but it causes
3101 problems with the dominator optimizer. */
3103 tree
3104 fold_truth_not_expr (tree arg)
3106 tree type = TREE_TYPE (arg);
3107 enum tree_code code = TREE_CODE (arg);
3109 /* If this is a comparison, we can simply invert it, except for
3110 floating-point non-equality comparisons, in which case we just
3111 enclose a TRUTH_NOT_EXPR around what we have. */
3113 if (TREE_CODE_CLASS (code) == tcc_comparison)
3115 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3116 if (FLOAT_TYPE_P (op_type)
3117 && flag_trapping_math
3118 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3119 && code != NE_EXPR && code != EQ_EXPR)
3120 return NULL_TREE;
3121 else
3123 code = invert_tree_comparison (code,
3124 HONOR_NANS (TYPE_MODE (op_type)));
3125 if (code == ERROR_MARK)
3126 return NULL_TREE;
3127 else
3128 return build2 (code, type,
3129 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3133 switch (code)
3135 case INTEGER_CST:
3136 return constant_boolean_node (integer_zerop (arg), type);
3138 case TRUTH_AND_EXPR:
3139 return build2 (TRUTH_OR_EXPR, type,
3140 invert_truthvalue (TREE_OPERAND (arg, 0)),
3141 invert_truthvalue (TREE_OPERAND (arg, 1)));
3143 case TRUTH_OR_EXPR:
3144 return build2 (TRUTH_AND_EXPR, type,
3145 invert_truthvalue (TREE_OPERAND (arg, 0)),
3146 invert_truthvalue (TREE_OPERAND (arg, 1)));
3148 case TRUTH_XOR_EXPR:
3149 /* Here we can invert either operand. We invert the first operand
3150 unless the second operand is a TRUTH_NOT_EXPR in which case our
3151 result is the XOR of the first operand with the inside of the
3152 negation of the second operand. */
3154 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3155 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3156 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3157 else
3158 return build2 (TRUTH_XOR_EXPR, type,
3159 invert_truthvalue (TREE_OPERAND (arg, 0)),
3160 TREE_OPERAND (arg, 1));
3162 case TRUTH_ANDIF_EXPR:
3163 return build2 (TRUTH_ORIF_EXPR, type,
3164 invert_truthvalue (TREE_OPERAND (arg, 0)),
3165 invert_truthvalue (TREE_OPERAND (arg, 1)));
3167 case TRUTH_ORIF_EXPR:
3168 return build2 (TRUTH_ANDIF_EXPR, type,
3169 invert_truthvalue (TREE_OPERAND (arg, 0)),
3170 invert_truthvalue (TREE_OPERAND (arg, 1)));
3172 case TRUTH_NOT_EXPR:
3173 return TREE_OPERAND (arg, 0);
3175 case COND_EXPR:
3177 tree arg1 = TREE_OPERAND (arg, 1);
3178 tree arg2 = TREE_OPERAND (arg, 2);
3179 /* A COND_EXPR may have a throw as one operand, which
3180 then has void type. Just leave void operands
3181 as they are. */
3182 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3183 VOID_TYPE_P (TREE_TYPE (arg1))
3184 ? arg1 : invert_truthvalue (arg1),
3185 VOID_TYPE_P (TREE_TYPE (arg2))
3186 ? arg2 : invert_truthvalue (arg2));
3189 case COMPOUND_EXPR:
3190 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3191 invert_truthvalue (TREE_OPERAND (arg, 1)));
3193 case NON_LVALUE_EXPR:
3194 return invert_truthvalue (TREE_OPERAND (arg, 0));
3196 case NOP_EXPR:
3197 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3198 return build1 (TRUTH_NOT_EXPR, type, arg);
3200 case CONVERT_EXPR:
3201 case FLOAT_EXPR:
3202 return build1 (TREE_CODE (arg), type,
3203 invert_truthvalue (TREE_OPERAND (arg, 0)));
3205 case BIT_AND_EXPR:
3206 if (!integer_onep (TREE_OPERAND (arg, 1)))
3207 break;
3208 return build2 (EQ_EXPR, type, arg,
3209 build_int_cst (type, 0));
3211 case SAVE_EXPR:
3212 return build1 (TRUTH_NOT_EXPR, type, arg);
3214 case CLEANUP_POINT_EXPR:
3215 return build1 (CLEANUP_POINT_EXPR, type,
3216 invert_truthvalue (TREE_OPERAND (arg, 0)));
3218 default:
3219 break;
3222 return NULL_TREE;
3225 /* Return a simplified tree node for the truth-negation of ARG. This
3226 never alters ARG itself. We assume that ARG is an operation that
3227 returns a truth value (0 or 1).
3229 FIXME: one would think we would fold the result, but it causes
3230 problems with the dominator optimizer. */
3232 tree
3233 invert_truthvalue (tree arg)
3235 tree tem;
3237 if (TREE_CODE (arg) == ERROR_MARK)
3238 return arg;
3240 tem = fold_truth_not_expr (arg);
3241 if (!tem)
3242 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3244 return tem;
3247 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3248 operands are another bit-wise operation with a common input. If so,
3249 distribute the bit operations to save an operation and possibly two if
3250 constants are involved. For example, convert
3251 (A | B) & (A | C) into A | (B & C)
3252 Further simplification will occur if B and C are constants.
3254 If this optimization cannot be done, 0 will be returned. */
3256 static tree
3257 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3259 tree common;
3260 tree left, right;
3262 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3263 || TREE_CODE (arg0) == code
3264 || (TREE_CODE (arg0) != BIT_AND_EXPR
3265 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3266 return 0;
3268 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3270 common = TREE_OPERAND (arg0, 0);
3271 left = TREE_OPERAND (arg0, 1);
3272 right = TREE_OPERAND (arg1, 1);
3274 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3276 common = TREE_OPERAND (arg0, 0);
3277 left = TREE_OPERAND (arg0, 1);
3278 right = TREE_OPERAND (arg1, 0);
3280 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3282 common = TREE_OPERAND (arg0, 1);
3283 left = TREE_OPERAND (arg0, 0);
3284 right = TREE_OPERAND (arg1, 1);
3286 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3288 common = TREE_OPERAND (arg0, 1);
3289 left = TREE_OPERAND (arg0, 0);
3290 right = TREE_OPERAND (arg1, 0);
3292 else
3293 return 0;
3295 return fold_build2 (TREE_CODE (arg0), type, common,
3296 fold_build2 (code, type, left, right));
3299 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3300 with code CODE. This optimization is unsafe. */
3301 static tree
3302 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3304 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3305 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3307 /* (A / C) +- (B / C) -> (A +- B) / C. */
3308 if (mul0 == mul1
3309 && operand_equal_p (TREE_OPERAND (arg0, 1),
3310 TREE_OPERAND (arg1, 1), 0))
3311 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3312 fold_build2 (code, type,
3313 TREE_OPERAND (arg0, 0),
3314 TREE_OPERAND (arg1, 0)),
3315 TREE_OPERAND (arg0, 1));
3317 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3318 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3319 TREE_OPERAND (arg1, 0), 0)
3320 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3321 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3323 REAL_VALUE_TYPE r0, r1;
3324 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3325 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3326 if (!mul0)
3327 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3328 if (!mul1)
3329 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3330 real_arithmetic (&r0, code, &r0, &r1);
3331 return fold_build2 (MULT_EXPR, type,
3332 TREE_OPERAND (arg0, 0),
3333 build_real (type, r0));
3336 return NULL_TREE;
3339 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3340 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3342 static tree
3343 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3344 int unsignedp)
3346 tree result;
3348 if (bitpos == 0)
3350 tree size = TYPE_SIZE (TREE_TYPE (inner));
3351 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3352 || POINTER_TYPE_P (TREE_TYPE (inner)))
3353 && host_integerp (size, 0)
3354 && tree_low_cst (size, 0) == bitsize)
3355 return fold_convert (type, inner);
3358 result = build3 (BIT_FIELD_REF, type, inner,
3359 size_int (bitsize), bitsize_int (bitpos));
3361 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3363 return result;
3366 /* Optimize a bit-field compare.
3368 There are two cases: First is a compare against a constant and the
3369 second is a comparison of two items where the fields are at the same
3370 bit position relative to the start of a chunk (byte, halfword, word)
3371 large enough to contain it. In these cases we can avoid the shift
3372 implicit in bitfield extractions.
3374 For constants, we emit a compare of the shifted constant with the
3375 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3376 compared. For two fields at the same position, we do the ANDs with the
3377 similar mask and compare the result of the ANDs.
3379 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3380 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3381 are the left and right operands of the comparison, respectively.
3383 If the optimization described above can be done, we return the resulting
3384 tree. Otherwise we return zero. */
3386 static tree
3387 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3388 tree lhs, tree rhs)
3390 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3391 tree type = TREE_TYPE (lhs);
3392 tree signed_type, unsigned_type;
3393 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3394 enum machine_mode lmode, rmode, nmode;
3395 int lunsignedp, runsignedp;
3396 int lvolatilep = 0, rvolatilep = 0;
3397 tree linner, rinner = NULL_TREE;
3398 tree mask;
3399 tree offset;
3401 /* Get all the information about the extractions being done. If the bit size
3402 if the same as the size of the underlying object, we aren't doing an
3403 extraction at all and so can do nothing. We also don't want to
3404 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3405 then will no longer be able to replace it. */
3406 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3407 &lunsignedp, &lvolatilep, false);
3408 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3409 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3410 return 0;
3412 if (!const_p)
3414 /* If this is not a constant, we can only do something if bit positions,
3415 sizes, and signedness are the same. */
3416 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3417 &runsignedp, &rvolatilep, false);
3419 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3420 || lunsignedp != runsignedp || offset != 0
3421 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3422 return 0;
3425 /* See if we can find a mode to refer to this field. We should be able to,
3426 but fail if we can't. */
3427 nmode = get_best_mode (lbitsize, lbitpos,
3428 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3429 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3430 TYPE_ALIGN (TREE_TYPE (rinner))),
3431 word_mode, lvolatilep || rvolatilep);
3432 if (nmode == VOIDmode)
3433 return 0;
3435 /* Set signed and unsigned types of the precision of this mode for the
3436 shifts below. */
3437 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3438 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3440 /* Compute the bit position and size for the new reference and our offset
3441 within it. If the new reference is the same size as the original, we
3442 won't optimize anything, so return zero. */
3443 nbitsize = GET_MODE_BITSIZE (nmode);
3444 nbitpos = lbitpos & ~ (nbitsize - 1);
3445 lbitpos -= nbitpos;
3446 if (nbitsize == lbitsize)
3447 return 0;
3449 if (BYTES_BIG_ENDIAN)
3450 lbitpos = nbitsize - lbitsize - lbitpos;
3452 /* Make the mask to be used against the extracted field. */
3453 mask = build_int_cst (unsigned_type, -1);
3454 mask = force_fit_type (mask, 0, false, false);
3455 mask = fold_convert (unsigned_type, mask);
3456 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3457 mask = const_binop (RSHIFT_EXPR, mask,
3458 size_int (nbitsize - lbitsize - lbitpos), 0);
3460 if (! const_p)
3461 /* If not comparing with constant, just rework the comparison
3462 and return. */
3463 return fold_build2 (code, compare_type,
3464 fold_build2 (BIT_AND_EXPR, unsigned_type,
3465 make_bit_field_ref (linner,
3466 unsigned_type,
3467 nbitsize, nbitpos,
3469 mask),
3470 fold_build2 (BIT_AND_EXPR, unsigned_type,
3471 make_bit_field_ref (rinner,
3472 unsigned_type,
3473 nbitsize, nbitpos,
3475 mask));
3477 /* Otherwise, we are handling the constant case. See if the constant is too
3478 big for the field. Warn and return a tree of for 0 (false) if so. We do
3479 this not only for its own sake, but to avoid having to test for this
3480 error case below. If we didn't, we might generate wrong code.
3482 For unsigned fields, the constant shifted right by the field length should
3483 be all zero. For signed fields, the high-order bits should agree with
3484 the sign bit. */
3486 if (lunsignedp)
3488 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3489 fold_convert (unsigned_type, rhs),
3490 size_int (lbitsize), 0)))
3492 warning (0, "comparison is always %d due to width of bit-field",
3493 code == NE_EXPR);
3494 return constant_boolean_node (code == NE_EXPR, compare_type);
3497 else
3499 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3500 size_int (lbitsize - 1), 0);
3501 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3503 warning (0, "comparison is always %d due to width of bit-field",
3504 code == NE_EXPR);
3505 return constant_boolean_node (code == NE_EXPR, compare_type);
3509 /* Single-bit compares should always be against zero. */
3510 if (lbitsize == 1 && ! integer_zerop (rhs))
3512 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3513 rhs = build_int_cst (type, 0);
3516 /* Make a new bitfield reference, shift the constant over the
3517 appropriate number of bits and mask it with the computed mask
3518 (in case this was a signed field). If we changed it, make a new one. */
3519 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3520 if (lvolatilep)
3522 TREE_SIDE_EFFECTS (lhs) = 1;
3523 TREE_THIS_VOLATILE (lhs) = 1;
3526 rhs = const_binop (BIT_AND_EXPR,
3527 const_binop (LSHIFT_EXPR,
3528 fold_convert (unsigned_type, rhs),
3529 size_int (lbitpos), 0),
3530 mask, 0);
3532 return build2 (code, compare_type,
3533 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3534 rhs);
3537 /* Subroutine for fold_truthop: decode a field reference.
3539 If EXP is a comparison reference, we return the innermost reference.
3541 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3542 set to the starting bit number.
3544 If the innermost field can be completely contained in a mode-sized
3545 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3547 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3548 otherwise it is not changed.
3550 *PUNSIGNEDP is set to the signedness of the field.
3552 *PMASK is set to the mask used. This is either contained in a
3553 BIT_AND_EXPR or derived from the width of the field.
3555 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3557 Return 0 if this is not a component reference or is one that we can't
3558 do anything with. */
3560 static tree
3561 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3562 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3563 int *punsignedp, int *pvolatilep,
3564 tree *pmask, tree *pand_mask)
3566 tree outer_type = 0;
3567 tree and_mask = 0;
3568 tree mask, inner, offset;
3569 tree unsigned_type;
3570 unsigned int precision;
3572 /* All the optimizations using this function assume integer fields.
3573 There are problems with FP fields since the type_for_size call
3574 below can fail for, e.g., XFmode. */
3575 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3576 return 0;
3578 /* We are interested in the bare arrangement of bits, so strip everything
3579 that doesn't affect the machine mode. However, record the type of the
3580 outermost expression if it may matter below. */
3581 if (TREE_CODE (exp) == NOP_EXPR
3582 || TREE_CODE (exp) == CONVERT_EXPR
3583 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3584 outer_type = TREE_TYPE (exp);
3585 STRIP_NOPS (exp);
3587 if (TREE_CODE (exp) == BIT_AND_EXPR)
3589 and_mask = TREE_OPERAND (exp, 1);
3590 exp = TREE_OPERAND (exp, 0);
3591 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3592 if (TREE_CODE (and_mask) != INTEGER_CST)
3593 return 0;
3596 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3597 punsignedp, pvolatilep, false);
3598 if ((inner == exp && and_mask == 0)
3599 || *pbitsize < 0 || offset != 0
3600 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3601 return 0;
3603 /* If the number of bits in the reference is the same as the bitsize of
3604 the outer type, then the outer type gives the signedness. Otherwise
3605 (in case of a small bitfield) the signedness is unchanged. */
3606 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3607 *punsignedp = TYPE_UNSIGNED (outer_type);
3609 /* Compute the mask to access the bitfield. */
3610 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3611 precision = TYPE_PRECISION (unsigned_type);
3613 mask = build_int_cst (unsigned_type, -1);
3614 mask = force_fit_type (mask, 0, false, false);
3616 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3617 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3619 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3620 if (and_mask != 0)
3621 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3622 fold_convert (unsigned_type, and_mask), mask);
3624 *pmask = mask;
3625 *pand_mask = and_mask;
3626 return inner;
3629 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3630 bit positions. */
3632 static int
3633 all_ones_mask_p (tree mask, int size)
3635 tree type = TREE_TYPE (mask);
3636 unsigned int precision = TYPE_PRECISION (type);
3637 tree tmask;
3639 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3640 tmask = force_fit_type (tmask, 0, false, false);
3642 return
3643 tree_int_cst_equal (mask,
3644 const_binop (RSHIFT_EXPR,
3645 const_binop (LSHIFT_EXPR, tmask,
3646 size_int (precision - size),
3648 size_int (precision - size), 0));
3651 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3652 represents the sign bit of EXP's type. If EXP represents a sign
3653 or zero extension, also test VAL against the unextended type.
3654 The return value is the (sub)expression whose sign bit is VAL,
3655 or NULL_TREE otherwise. */
3657 static tree
3658 sign_bit_p (tree exp, tree val)
3660 unsigned HOST_WIDE_INT mask_lo, lo;
3661 HOST_WIDE_INT mask_hi, hi;
3662 int width;
3663 tree t;
3665 /* Tree EXP must have an integral type. */
3666 t = TREE_TYPE (exp);
3667 if (! INTEGRAL_TYPE_P (t))
3668 return NULL_TREE;
3670 /* Tree VAL must be an integer constant. */
3671 if (TREE_CODE (val) != INTEGER_CST
3672 || TREE_CONSTANT_OVERFLOW (val))
3673 return NULL_TREE;
3675 width = TYPE_PRECISION (t);
3676 if (width > HOST_BITS_PER_WIDE_INT)
3678 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3679 lo = 0;
3681 mask_hi = ((unsigned HOST_WIDE_INT) -1
3682 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3683 mask_lo = -1;
3685 else
3687 hi = 0;
3688 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3690 mask_hi = 0;
3691 mask_lo = ((unsigned HOST_WIDE_INT) -1
3692 >> (HOST_BITS_PER_WIDE_INT - width));
3695 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3696 treat VAL as if it were unsigned. */
3697 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3698 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3699 return exp;
3701 /* Handle extension from a narrower type. */
3702 if (TREE_CODE (exp) == NOP_EXPR
3703 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3704 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3706 return NULL_TREE;
3709 /* Subroutine for fold_truthop: determine if an operand is simple enough
3710 to be evaluated unconditionally. */
3712 static int
3713 simple_operand_p (tree exp)
3715 /* Strip any conversions that don't change the machine mode. */
3716 STRIP_NOPS (exp);
3718 return (CONSTANT_CLASS_P (exp)
3719 || TREE_CODE (exp) == SSA_NAME
3720 || (DECL_P (exp)
3721 && ! TREE_ADDRESSABLE (exp)
3722 && ! TREE_THIS_VOLATILE (exp)
3723 && ! DECL_NONLOCAL (exp)
3724 /* Don't regard global variables as simple. They may be
3725 allocated in ways unknown to the compiler (shared memory,
3726 #pragma weak, etc). */
3727 && ! TREE_PUBLIC (exp)
3728 && ! DECL_EXTERNAL (exp)
3729 /* Loading a static variable is unduly expensive, but global
3730 registers aren't expensive. */
3731 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3734 /* The following functions are subroutines to fold_range_test and allow it to
3735 try to change a logical combination of comparisons into a range test.
3737 For example, both
3738 X == 2 || X == 3 || X == 4 || X == 5
3740 X >= 2 && X <= 5
3741 are converted to
3742 (unsigned) (X - 2) <= 3
3744 We describe each set of comparisons as being either inside or outside
3745 a range, using a variable named like IN_P, and then describe the
3746 range with a lower and upper bound. If one of the bounds is omitted,
3747 it represents either the highest or lowest value of the type.
3749 In the comments below, we represent a range by two numbers in brackets
3750 preceded by a "+" to designate being inside that range, or a "-" to
3751 designate being outside that range, so the condition can be inverted by
3752 flipping the prefix. An omitted bound is represented by a "-". For
3753 example, "- [-, 10]" means being outside the range starting at the lowest
3754 possible value and ending at 10, in other words, being greater than 10.
3755 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3756 always false.
3758 We set up things so that the missing bounds are handled in a consistent
3759 manner so neither a missing bound nor "true" and "false" need to be
3760 handled using a special case. */
3762 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3763 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3764 and UPPER1_P are nonzero if the respective argument is an upper bound
3765 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3766 must be specified for a comparison. ARG1 will be converted to ARG0's
3767 type if both are specified. */
3769 static tree
3770 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3771 tree arg1, int upper1_p)
3773 tree tem;
3774 int result;
3775 int sgn0, sgn1;
3777 /* If neither arg represents infinity, do the normal operation.
3778 Else, if not a comparison, return infinity. Else handle the special
3779 comparison rules. Note that most of the cases below won't occur, but
3780 are handled for consistency. */
3782 if (arg0 != 0 && arg1 != 0)
3784 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3785 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3786 STRIP_NOPS (tem);
3787 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3790 if (TREE_CODE_CLASS (code) != tcc_comparison)
3791 return 0;
3793 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3794 for neither. In real maths, we cannot assume open ended ranges are
3795 the same. But, this is computer arithmetic, where numbers are finite.
3796 We can therefore make the transformation of any unbounded range with
3797 the value Z, Z being greater than any representable number. This permits
3798 us to treat unbounded ranges as equal. */
3799 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3800 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3801 switch (code)
3803 case EQ_EXPR:
3804 result = sgn0 == sgn1;
3805 break;
3806 case NE_EXPR:
3807 result = sgn0 != sgn1;
3808 break;
3809 case LT_EXPR:
3810 result = sgn0 < sgn1;
3811 break;
3812 case LE_EXPR:
3813 result = sgn0 <= sgn1;
3814 break;
3815 case GT_EXPR:
3816 result = sgn0 > sgn1;
3817 break;
3818 case GE_EXPR:
3819 result = sgn0 >= sgn1;
3820 break;
3821 default:
3822 gcc_unreachable ();
3825 return constant_boolean_node (result, type);
3828 /* Given EXP, a logical expression, set the range it is testing into
3829 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3830 actually being tested. *PLOW and *PHIGH will be made of the same type
3831 as the returned expression. If EXP is not a comparison, we will most
3832 likely not be returning a useful value and range. */
3834 static tree
3835 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3837 enum tree_code code;
3838 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3839 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3840 int in_p, n_in_p;
3841 tree low, high, n_low, n_high;
3843 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3844 and see if we can refine the range. Some of the cases below may not
3845 happen, but it doesn't seem worth worrying about this. We "continue"
3846 the outer loop when we've changed something; otherwise we "break"
3847 the switch, which will "break" the while. */
3849 in_p = 0;
3850 low = high = build_int_cst (TREE_TYPE (exp), 0);
3852 while (1)
3854 code = TREE_CODE (exp);
3855 exp_type = TREE_TYPE (exp);
3857 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3859 if (TREE_CODE_LENGTH (code) > 0)
3860 arg0 = TREE_OPERAND (exp, 0);
3861 if (TREE_CODE_CLASS (code) == tcc_comparison
3862 || TREE_CODE_CLASS (code) == tcc_unary
3863 || TREE_CODE_CLASS (code) == tcc_binary)
3864 arg0_type = TREE_TYPE (arg0);
3865 if (TREE_CODE_CLASS (code) == tcc_binary
3866 || TREE_CODE_CLASS (code) == tcc_comparison
3867 || (TREE_CODE_CLASS (code) == tcc_expression
3868 && TREE_CODE_LENGTH (code) > 1))
3869 arg1 = TREE_OPERAND (exp, 1);
3872 switch (code)
3874 case TRUTH_NOT_EXPR:
3875 in_p = ! in_p, exp = arg0;
3876 continue;
3878 case EQ_EXPR: case NE_EXPR:
3879 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3880 /* We can only do something if the range is testing for zero
3881 and if the second operand is an integer constant. Note that
3882 saying something is "in" the range we make is done by
3883 complementing IN_P since it will set in the initial case of
3884 being not equal to zero; "out" is leaving it alone. */
3885 if (low == 0 || high == 0
3886 || ! integer_zerop (low) || ! integer_zerop (high)
3887 || TREE_CODE (arg1) != INTEGER_CST)
3888 break;
3890 switch (code)
3892 case NE_EXPR: /* - [c, c] */
3893 low = high = arg1;
3894 break;
3895 case EQ_EXPR: /* + [c, c] */
3896 in_p = ! in_p, low = high = arg1;
3897 break;
3898 case GT_EXPR: /* - [-, c] */
3899 low = 0, high = arg1;
3900 break;
3901 case GE_EXPR: /* + [c, -] */
3902 in_p = ! in_p, low = arg1, high = 0;
3903 break;
3904 case LT_EXPR: /* - [c, -] */
3905 low = arg1, high = 0;
3906 break;
3907 case LE_EXPR: /* + [-, c] */
3908 in_p = ! in_p, low = 0, high = arg1;
3909 break;
3910 default:
3911 gcc_unreachable ();
3914 /* If this is an unsigned comparison, we also know that EXP is
3915 greater than or equal to zero. We base the range tests we make
3916 on that fact, so we record it here so we can parse existing
3917 range tests. We test arg0_type since often the return type
3918 of, e.g. EQ_EXPR, is boolean. */
3919 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3921 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3922 in_p, low, high, 1,
3923 build_int_cst (arg0_type, 0),
3924 NULL_TREE))
3925 break;
3927 in_p = n_in_p, low = n_low, high = n_high;
3929 /* If the high bound is missing, but we have a nonzero low
3930 bound, reverse the range so it goes from zero to the low bound
3931 minus 1. */
3932 if (high == 0 && low && ! integer_zerop (low))
3934 in_p = ! in_p;
3935 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3936 integer_one_node, 0);
3937 low = build_int_cst (arg0_type, 0);
3941 exp = arg0;
3942 continue;
3944 case NEGATE_EXPR:
3945 /* (-x) IN [a,b] -> x in [-b, -a] */
3946 n_low = range_binop (MINUS_EXPR, exp_type,
3947 build_int_cst (exp_type, 0),
3948 0, high, 1);
3949 n_high = range_binop (MINUS_EXPR, exp_type,
3950 build_int_cst (exp_type, 0),
3951 0, low, 0);
3952 low = n_low, high = n_high;
3953 exp = arg0;
3954 continue;
3956 case BIT_NOT_EXPR:
3957 /* ~ X -> -X - 1 */
3958 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3959 build_int_cst (exp_type, 1));
3960 continue;
3962 case PLUS_EXPR: case MINUS_EXPR:
3963 if (TREE_CODE (arg1) != INTEGER_CST)
3964 break;
3966 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3967 move a constant to the other side. */
3968 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3969 break;
3971 /* If EXP is signed, any overflow in the computation is undefined,
3972 so we don't worry about it so long as our computations on
3973 the bounds don't overflow. For unsigned, overflow is defined
3974 and this is exactly the right thing. */
3975 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3976 arg0_type, low, 0, arg1, 0);
3977 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3978 arg0_type, high, 1, arg1, 0);
3979 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3980 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3981 break;
3983 /* Check for an unsigned range which has wrapped around the maximum
3984 value thus making n_high < n_low, and normalize it. */
3985 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3987 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3988 integer_one_node, 0);
3989 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3990 integer_one_node, 0);
3992 /* If the range is of the form +/- [ x+1, x ], we won't
3993 be able to normalize it. But then, it represents the
3994 whole range or the empty set, so make it
3995 +/- [ -, - ]. */
3996 if (tree_int_cst_equal (n_low, low)
3997 && tree_int_cst_equal (n_high, high))
3998 low = high = 0;
3999 else
4000 in_p = ! in_p;
4002 else
4003 low = n_low, high = n_high;
4005 exp = arg0;
4006 continue;
4008 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4009 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4010 break;
4012 if (! INTEGRAL_TYPE_P (arg0_type)
4013 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4014 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4015 break;
4017 n_low = low, n_high = high;
4019 if (n_low != 0)
4020 n_low = fold_convert (arg0_type, n_low);
4022 if (n_high != 0)
4023 n_high = fold_convert (arg0_type, n_high);
4026 /* If we're converting arg0 from an unsigned type, to exp,
4027 a signed type, we will be doing the comparison as unsigned.
4028 The tests above have already verified that LOW and HIGH
4029 are both positive.
4031 So we have to ensure that we will handle large unsigned
4032 values the same way that the current signed bounds treat
4033 negative values. */
4035 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4037 tree high_positive;
4038 tree equiv_type = lang_hooks.types.type_for_mode
4039 (TYPE_MODE (arg0_type), 1);
4041 /* A range without an upper bound is, naturally, unbounded.
4042 Since convert would have cropped a very large value, use
4043 the max value for the destination type. */
4044 high_positive
4045 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4046 : TYPE_MAX_VALUE (arg0_type);
4048 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4049 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4050 fold_convert (arg0_type,
4051 high_positive),
4052 build_int_cst (arg0_type, 1));
4054 /* If the low bound is specified, "and" the range with the
4055 range for which the original unsigned value will be
4056 positive. */
4057 if (low != 0)
4059 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4060 1, n_low, n_high, 1,
4061 fold_convert (arg0_type,
4062 integer_zero_node),
4063 high_positive))
4064 break;
4066 in_p = (n_in_p == in_p);
4068 else
4070 /* Otherwise, "or" the range with the range of the input
4071 that will be interpreted as negative. */
4072 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4073 0, n_low, n_high, 1,
4074 fold_convert (arg0_type,
4075 integer_zero_node),
4076 high_positive))
4077 break;
4079 in_p = (in_p != n_in_p);
4083 exp = arg0;
4084 low = n_low, high = n_high;
4085 continue;
4087 default:
4088 break;
4091 break;
4094 /* If EXP is a constant, we can evaluate whether this is true or false. */
4095 if (TREE_CODE (exp) == INTEGER_CST)
4097 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4098 exp, 0, low, 0))
4099 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4100 exp, 1, high, 1)));
4101 low = high = 0;
4102 exp = 0;
4105 *pin_p = in_p, *plow = low, *phigh = high;
4106 return exp;
4109 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4110 type, TYPE, return an expression to test if EXP is in (or out of, depending
4111 on IN_P) the range. Return 0 if the test couldn't be created. */
4113 static tree
4114 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4116 tree etype = TREE_TYPE (exp);
4117 tree value;
4119 #ifdef HAVE_canonicalize_funcptr_for_compare
4120 /* Disable this optimization for function pointer expressions
4121 on targets that require function pointer canonicalization. */
4122 if (HAVE_canonicalize_funcptr_for_compare
4123 && TREE_CODE (etype) == POINTER_TYPE
4124 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4125 return NULL_TREE;
4126 #endif
4128 if (! in_p)
4130 value = build_range_check (type, exp, 1, low, high);
4131 if (value != 0)
4132 return invert_truthvalue (value);
4134 return 0;
4137 if (low == 0 && high == 0)
4138 return build_int_cst (type, 1);
4140 if (low == 0)
4141 return fold_build2 (LE_EXPR, type, exp,
4142 fold_convert (etype, high));
4144 if (high == 0)
4145 return fold_build2 (GE_EXPR, type, exp,
4146 fold_convert (etype, low));
4148 if (operand_equal_p (low, high, 0))
4149 return fold_build2 (EQ_EXPR, type, exp,
4150 fold_convert (etype, low));
4152 if (integer_zerop (low))
4154 if (! TYPE_UNSIGNED (etype))
4156 etype = lang_hooks.types.unsigned_type (etype);
4157 high = fold_convert (etype, high);
4158 exp = fold_convert (etype, exp);
4160 return build_range_check (type, exp, 1, 0, high);
4163 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4164 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4166 unsigned HOST_WIDE_INT lo;
4167 HOST_WIDE_INT hi;
4168 int prec;
4170 prec = TYPE_PRECISION (etype);
4171 if (prec <= HOST_BITS_PER_WIDE_INT)
4173 hi = 0;
4174 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4176 else
4178 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4179 lo = (unsigned HOST_WIDE_INT) -1;
4182 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4184 if (TYPE_UNSIGNED (etype))
4186 etype = lang_hooks.types.signed_type (etype);
4187 exp = fold_convert (etype, exp);
4189 return fold_build2 (GT_EXPR, type, exp,
4190 build_int_cst (etype, 0));
4194 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4195 This requires wrap-around arithmetics for the type of the expression. */
4196 switch (TREE_CODE (etype))
4198 case INTEGER_TYPE:
4199 /* There is no requirement that LOW be within the range of ETYPE
4200 if the latter is a subtype. It must, however, be within the base
4201 type of ETYPE. So be sure we do the subtraction in that type. */
4202 if (TREE_TYPE (etype))
4203 etype = TREE_TYPE (etype);
4204 break;
4206 case ENUMERAL_TYPE:
4207 case BOOLEAN_TYPE:
4208 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4209 TYPE_UNSIGNED (etype));
4210 break;
4212 default:
4213 break;
4216 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4217 if (TREE_CODE (etype) == INTEGER_TYPE
4218 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4220 tree utype, minv, maxv;
4222 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4223 for the type in question, as we rely on this here. */
4224 utype = lang_hooks.types.unsigned_type (etype);
4225 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4226 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4227 integer_one_node, 1);
4228 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4230 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4231 minv, 1, maxv, 1)))
4232 etype = utype;
4233 else
4234 return 0;
4237 high = fold_convert (etype, high);
4238 low = fold_convert (etype, low);
4239 exp = fold_convert (etype, exp);
4241 value = const_binop (MINUS_EXPR, high, low, 0);
4243 if (value != 0 && !TREE_OVERFLOW (value))
4244 return build_range_check (type,
4245 fold_build2 (MINUS_EXPR, etype, exp, low),
4246 1, build_int_cst (etype, 0), value);
4248 return 0;
4251 /* Return the predecessor of VAL in its type, handling the infinite case. */
4253 static tree
4254 range_predecessor (tree val)
4256 tree type = TREE_TYPE (val);
4258 if (INTEGRAL_TYPE_P (type)
4259 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4260 return 0;
4261 else
4262 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4265 /* Return the successor of VAL in its type, handling the infinite case. */
4267 static tree
4268 range_successor (tree val)
4270 tree type = TREE_TYPE (val);
4272 if (INTEGRAL_TYPE_P (type)
4273 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4274 return 0;
4275 else
4276 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4279 /* Given two ranges, see if we can merge them into one. Return 1 if we
4280 can, 0 if we can't. Set the output range into the specified parameters. */
4282 static int
4283 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4284 tree high0, int in1_p, tree low1, tree high1)
4286 int no_overlap;
4287 int subset;
4288 int temp;
4289 tree tem;
4290 int in_p;
4291 tree low, high;
4292 int lowequal = ((low0 == 0 && low1 == 0)
4293 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4294 low0, 0, low1, 0)));
4295 int highequal = ((high0 == 0 && high1 == 0)
4296 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4297 high0, 1, high1, 1)));
4299 /* Make range 0 be the range that starts first, or ends last if they
4300 start at the same value. Swap them if it isn't. */
4301 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4302 low0, 0, low1, 0))
4303 || (lowequal
4304 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4305 high1, 1, high0, 1))))
4307 temp = in0_p, in0_p = in1_p, in1_p = temp;
4308 tem = low0, low0 = low1, low1 = tem;
4309 tem = high0, high0 = high1, high1 = tem;
4312 /* Now flag two cases, whether the ranges are disjoint or whether the
4313 second range is totally subsumed in the first. Note that the tests
4314 below are simplified by the ones above. */
4315 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4316 high0, 1, low1, 0));
4317 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4318 high1, 1, high0, 1));
4320 /* We now have four cases, depending on whether we are including or
4321 excluding the two ranges. */
4322 if (in0_p && in1_p)
4324 /* If they don't overlap, the result is false. If the second range
4325 is a subset it is the result. Otherwise, the range is from the start
4326 of the second to the end of the first. */
4327 if (no_overlap)
4328 in_p = 0, low = high = 0;
4329 else if (subset)
4330 in_p = 1, low = low1, high = high1;
4331 else
4332 in_p = 1, low = low1, high = high0;
4335 else if (in0_p && ! in1_p)
4337 /* If they don't overlap, the result is the first range. If they are
4338 equal, the result is false. If the second range is a subset of the
4339 first, and the ranges begin at the same place, we go from just after
4340 the end of the second range to the end of the first. If the second
4341 range is not a subset of the first, or if it is a subset and both
4342 ranges end at the same place, the range starts at the start of the
4343 first range and ends just before the second range.
4344 Otherwise, we can't describe this as a single range. */
4345 if (no_overlap)
4346 in_p = 1, low = low0, high = high0;
4347 else if (lowequal && highequal)
4348 in_p = 0, low = high = 0;
4349 else if (subset && lowequal)
4351 low = range_successor (high1);
4352 high = high0;
4353 in_p = (low != 0);
4355 else if (! subset || highequal)
4357 low = low0;
4358 high = range_predecessor (low1);
4359 in_p = (high != 0);
4361 else
4362 return 0;
4365 else if (! in0_p && in1_p)
4367 /* If they don't overlap, the result is the second range. If the second
4368 is a subset of the first, the result is false. Otherwise,
4369 the range starts just after the first range and ends at the
4370 end of the second. */
4371 if (no_overlap)
4372 in_p = 1, low = low1, high = high1;
4373 else if (subset || highequal)
4374 in_p = 0, low = high = 0;
4375 else
4377 low = range_successor (high0);
4378 high = high1;
4379 in_p = (low != 0);
4383 else
4385 /* The case where we are excluding both ranges. Here the complex case
4386 is if they don't overlap. In that case, the only time we have a
4387 range is if they are adjacent. If the second is a subset of the
4388 first, the result is the first. Otherwise, the range to exclude
4389 starts at the beginning of the first range and ends at the end of the
4390 second. */
4391 if (no_overlap)
4393 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4394 range_successor (high0),
4395 1, low1, 0)))
4396 in_p = 0, low = low0, high = high1;
4397 else
4399 /* Canonicalize - [min, x] into - [-, x]. */
4400 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4401 switch (TREE_CODE (TREE_TYPE (low0)))
4403 case ENUMERAL_TYPE:
4404 if (TYPE_PRECISION (TREE_TYPE (low0))
4405 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4406 break;
4407 /* FALLTHROUGH */
4408 case INTEGER_TYPE:
4409 if (tree_int_cst_equal (low0,
4410 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4411 low0 = 0;
4412 break;
4413 case POINTER_TYPE:
4414 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4415 && integer_zerop (low0))
4416 low0 = 0;
4417 break;
4418 default:
4419 break;
4422 /* Canonicalize - [x, max] into - [x, -]. */
4423 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4424 switch (TREE_CODE (TREE_TYPE (high1)))
4426 case ENUMERAL_TYPE:
4427 if (TYPE_PRECISION (TREE_TYPE (high1))
4428 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4429 break;
4430 /* FALLTHROUGH */
4431 case INTEGER_TYPE:
4432 if (tree_int_cst_equal (high1,
4433 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4434 high1 = 0;
4435 break;
4436 case POINTER_TYPE:
4437 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4438 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4439 high1, 1,
4440 integer_one_node, 1)))
4441 high1 = 0;
4442 break;
4443 default:
4444 break;
4447 /* The ranges might be also adjacent between the maximum and
4448 minimum values of the given type. For
4449 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4450 return + [x + 1, y - 1]. */
4451 if (low0 == 0 && high1 == 0)
4453 low = range_successor (high0);
4454 high = range_predecessor (low1);
4455 if (low == 0 || high == 0)
4456 return 0;
4458 in_p = 1;
4460 else
4461 return 0;
4464 else if (subset)
4465 in_p = 0, low = low0, high = high0;
4466 else
4467 in_p = 0, low = low0, high = high1;
4470 *pin_p = in_p, *plow = low, *phigh = high;
4471 return 1;
4475 /* Subroutine of fold, looking inside expressions of the form
4476 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4477 of the COND_EXPR. This function is being used also to optimize
4478 A op B ? C : A, by reversing the comparison first.
4480 Return a folded expression whose code is not a COND_EXPR
4481 anymore, or NULL_TREE if no folding opportunity is found. */
4483 static tree
4484 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4486 enum tree_code comp_code = TREE_CODE (arg0);
4487 tree arg00 = TREE_OPERAND (arg0, 0);
4488 tree arg01 = TREE_OPERAND (arg0, 1);
4489 tree arg1_type = TREE_TYPE (arg1);
4490 tree tem;
4492 STRIP_NOPS (arg1);
4493 STRIP_NOPS (arg2);
4495 /* If we have A op 0 ? A : -A, consider applying the following
4496 transformations:
4498 A == 0? A : -A same as -A
4499 A != 0? A : -A same as A
4500 A >= 0? A : -A same as abs (A)
4501 A > 0? A : -A same as abs (A)
4502 A <= 0? A : -A same as -abs (A)
4503 A < 0? A : -A same as -abs (A)
4505 None of these transformations work for modes with signed
4506 zeros. If A is +/-0, the first two transformations will
4507 change the sign of the result (from +0 to -0, or vice
4508 versa). The last four will fix the sign of the result,
4509 even though the original expressions could be positive or
4510 negative, depending on the sign of A.
4512 Note that all these transformations are correct if A is
4513 NaN, since the two alternatives (A and -A) are also NaNs. */
4514 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4515 ? real_zerop (arg01)
4516 : integer_zerop (arg01))
4517 && ((TREE_CODE (arg2) == NEGATE_EXPR
4518 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4519 /* In the case that A is of the form X-Y, '-A' (arg2) may
4520 have already been folded to Y-X, check for that. */
4521 || (TREE_CODE (arg1) == MINUS_EXPR
4522 && TREE_CODE (arg2) == MINUS_EXPR
4523 && operand_equal_p (TREE_OPERAND (arg1, 0),
4524 TREE_OPERAND (arg2, 1), 0)
4525 && operand_equal_p (TREE_OPERAND (arg1, 1),
4526 TREE_OPERAND (arg2, 0), 0))))
4527 switch (comp_code)
4529 case EQ_EXPR:
4530 case UNEQ_EXPR:
4531 tem = fold_convert (arg1_type, arg1);
4532 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4533 case NE_EXPR:
4534 case LTGT_EXPR:
4535 return pedantic_non_lvalue (fold_convert (type, arg1));
4536 case UNGE_EXPR:
4537 case UNGT_EXPR:
4538 if (flag_trapping_math)
4539 break;
4540 /* Fall through. */
4541 case GE_EXPR:
4542 case GT_EXPR:
4543 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4544 arg1 = fold_convert (lang_hooks.types.signed_type
4545 (TREE_TYPE (arg1)), arg1);
4546 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4547 return pedantic_non_lvalue (fold_convert (type, tem));
4548 case UNLE_EXPR:
4549 case UNLT_EXPR:
4550 if (flag_trapping_math)
4551 break;
4552 case LE_EXPR:
4553 case LT_EXPR:
4554 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4555 arg1 = fold_convert (lang_hooks.types.signed_type
4556 (TREE_TYPE (arg1)), arg1);
4557 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4558 return negate_expr (fold_convert (type, tem));
4559 default:
4560 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4561 break;
4564 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4565 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4566 both transformations are correct when A is NaN: A != 0
4567 is then true, and A == 0 is false. */
4569 if (integer_zerop (arg01) && integer_zerop (arg2))
4571 if (comp_code == NE_EXPR)
4572 return pedantic_non_lvalue (fold_convert (type, arg1));
4573 else if (comp_code == EQ_EXPR)
4574 return build_int_cst (type, 0);
4577 /* Try some transformations of A op B ? A : B.
4579 A == B? A : B same as B
4580 A != B? A : B same as A
4581 A >= B? A : B same as max (A, B)
4582 A > B? A : B same as max (B, A)
4583 A <= B? A : B same as min (A, B)
4584 A < B? A : B same as min (B, A)
4586 As above, these transformations don't work in the presence
4587 of signed zeros. For example, if A and B are zeros of
4588 opposite sign, the first two transformations will change
4589 the sign of the result. In the last four, the original
4590 expressions give different results for (A=+0, B=-0) and
4591 (A=-0, B=+0), but the transformed expressions do not.
4593 The first two transformations are correct if either A or B
4594 is a NaN. In the first transformation, the condition will
4595 be false, and B will indeed be chosen. In the case of the
4596 second transformation, the condition A != B will be true,
4597 and A will be chosen.
4599 The conversions to max() and min() are not correct if B is
4600 a number and A is not. The conditions in the original
4601 expressions will be false, so all four give B. The min()
4602 and max() versions would give a NaN instead. */
4603 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4604 /* Avoid these transformations if the COND_EXPR may be used
4605 as an lvalue in the C++ front-end. PR c++/19199. */
4606 && (in_gimple_form
4607 || (strcmp (lang_hooks.name, "GNU C++") != 0
4608 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4609 || ! maybe_lvalue_p (arg1)
4610 || ! maybe_lvalue_p (arg2)))
4612 tree comp_op0 = arg00;
4613 tree comp_op1 = arg01;
4614 tree comp_type = TREE_TYPE (comp_op0);
4616 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4617 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4619 comp_type = type;
4620 comp_op0 = arg1;
4621 comp_op1 = arg2;
4624 switch (comp_code)
4626 case EQ_EXPR:
4627 return pedantic_non_lvalue (fold_convert (type, arg2));
4628 case NE_EXPR:
4629 return pedantic_non_lvalue (fold_convert (type, arg1));
4630 case LE_EXPR:
4631 case LT_EXPR:
4632 case UNLE_EXPR:
4633 case UNLT_EXPR:
4634 /* In C++ a ?: expression can be an lvalue, so put the
4635 operand which will be used if they are equal first
4636 so that we can convert this back to the
4637 corresponding COND_EXPR. */
4638 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4640 comp_op0 = fold_convert (comp_type, comp_op0);
4641 comp_op1 = fold_convert (comp_type, comp_op1);
4642 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4643 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4644 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4645 return pedantic_non_lvalue (fold_convert (type, tem));
4647 break;
4648 case GE_EXPR:
4649 case GT_EXPR:
4650 case UNGE_EXPR:
4651 case UNGT_EXPR:
4652 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4654 comp_op0 = fold_convert (comp_type, comp_op0);
4655 comp_op1 = fold_convert (comp_type, comp_op1);
4656 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4657 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4658 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4659 return pedantic_non_lvalue (fold_convert (type, tem));
4661 break;
4662 case UNEQ_EXPR:
4663 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4664 return pedantic_non_lvalue (fold_convert (type, arg2));
4665 break;
4666 case LTGT_EXPR:
4667 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4668 return pedantic_non_lvalue (fold_convert (type, arg1));
4669 break;
4670 default:
4671 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4672 break;
4676 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4677 we might still be able to simplify this. For example,
4678 if C1 is one less or one more than C2, this might have started
4679 out as a MIN or MAX and been transformed by this function.
4680 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4682 if (INTEGRAL_TYPE_P (type)
4683 && TREE_CODE (arg01) == INTEGER_CST
4684 && TREE_CODE (arg2) == INTEGER_CST)
4685 switch (comp_code)
4687 case EQ_EXPR:
4688 /* We can replace A with C1 in this case. */
4689 arg1 = fold_convert (type, arg01);
4690 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4692 case LT_EXPR:
4693 /* If C1 is C2 + 1, this is min(A, C2). */
4694 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4695 OEP_ONLY_CONST)
4696 && operand_equal_p (arg01,
4697 const_binop (PLUS_EXPR, arg2,
4698 build_int_cst (type, 1), 0),
4699 OEP_ONLY_CONST))
4700 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4701 type, arg1, arg2));
4702 break;
4704 case LE_EXPR:
4705 /* If C1 is C2 - 1, this is min(A, C2). */
4706 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4707 OEP_ONLY_CONST)
4708 && operand_equal_p (arg01,
4709 const_binop (MINUS_EXPR, arg2,
4710 build_int_cst (type, 1), 0),
4711 OEP_ONLY_CONST))
4712 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4713 type, arg1, arg2));
4714 break;
4716 case GT_EXPR:
4717 /* If C1 is C2 - 1, this is max(A, C2). */
4718 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4719 OEP_ONLY_CONST)
4720 && operand_equal_p (arg01,
4721 const_binop (MINUS_EXPR, arg2,
4722 build_int_cst (type, 1), 0),
4723 OEP_ONLY_CONST))
4724 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4725 type, arg1, arg2));
4726 break;
4728 case GE_EXPR:
4729 /* If C1 is C2 + 1, this is max(A, C2). */
4730 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4731 OEP_ONLY_CONST)
4732 && operand_equal_p (arg01,
4733 const_binop (PLUS_EXPR, arg2,
4734 build_int_cst (type, 1), 0),
4735 OEP_ONLY_CONST))
4736 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4737 type, arg1, arg2));
4738 break;
4739 case NE_EXPR:
4740 break;
4741 default:
4742 gcc_unreachable ();
4745 return NULL_TREE;
4750 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4751 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4752 #endif
4754 /* EXP is some logical combination of boolean tests. See if we can
4755 merge it into some range test. Return the new tree if so. */
4757 static tree
4758 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4760 int or_op = (code == TRUTH_ORIF_EXPR
4761 || code == TRUTH_OR_EXPR);
4762 int in0_p, in1_p, in_p;
4763 tree low0, low1, low, high0, high1, high;
4764 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4765 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4766 tree tem;
4768 /* If this is an OR operation, invert both sides; we will invert
4769 again at the end. */
4770 if (or_op)
4771 in0_p = ! in0_p, in1_p = ! in1_p;
4773 /* If both expressions are the same, if we can merge the ranges, and we
4774 can build the range test, return it or it inverted. If one of the
4775 ranges is always true or always false, consider it to be the same
4776 expression as the other. */
4777 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4778 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4779 in1_p, low1, high1)
4780 && 0 != (tem = (build_range_check (type,
4781 lhs != 0 ? lhs
4782 : rhs != 0 ? rhs : integer_zero_node,
4783 in_p, low, high))))
4784 return or_op ? invert_truthvalue (tem) : tem;
4786 /* On machines where the branch cost is expensive, if this is a
4787 short-circuited branch and the underlying object on both sides
4788 is the same, make a non-short-circuit operation. */
4789 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4790 && lhs != 0 && rhs != 0
4791 && (code == TRUTH_ANDIF_EXPR
4792 || code == TRUTH_ORIF_EXPR)
4793 && operand_equal_p (lhs, rhs, 0))
4795 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4796 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4797 which cases we can't do this. */
4798 if (simple_operand_p (lhs))
4799 return build2 (code == TRUTH_ANDIF_EXPR
4800 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4801 type, op0, op1);
4803 else if (lang_hooks.decls.global_bindings_p () == 0
4804 && ! CONTAINS_PLACEHOLDER_P (lhs))
4806 tree common = save_expr (lhs);
4808 if (0 != (lhs = build_range_check (type, common,
4809 or_op ? ! in0_p : in0_p,
4810 low0, high0))
4811 && (0 != (rhs = build_range_check (type, common,
4812 or_op ? ! in1_p : in1_p,
4813 low1, high1))))
4814 return build2 (code == TRUTH_ANDIF_EXPR
4815 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4816 type, lhs, rhs);
4820 return 0;
4823 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4824 bit value. Arrange things so the extra bits will be set to zero if and
4825 only if C is signed-extended to its full width. If MASK is nonzero,
4826 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4828 static tree
4829 unextend (tree c, int p, int unsignedp, tree mask)
4831 tree type = TREE_TYPE (c);
4832 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4833 tree temp;
4835 if (p == modesize || unsignedp)
4836 return c;
4838 /* We work by getting just the sign bit into the low-order bit, then
4839 into the high-order bit, then sign-extend. We then XOR that value
4840 with C. */
4841 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4842 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4844 /* We must use a signed type in order to get an arithmetic right shift.
4845 However, we must also avoid introducing accidental overflows, so that
4846 a subsequent call to integer_zerop will work. Hence we must
4847 do the type conversion here. At this point, the constant is either
4848 zero or one, and the conversion to a signed type can never overflow.
4849 We could get an overflow if this conversion is done anywhere else. */
4850 if (TYPE_UNSIGNED (type))
4851 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4853 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4854 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4855 if (mask != 0)
4856 temp = const_binop (BIT_AND_EXPR, temp,
4857 fold_convert (TREE_TYPE (c), mask), 0);
4858 /* If necessary, convert the type back to match the type of C. */
4859 if (TYPE_UNSIGNED (type))
4860 temp = fold_convert (type, temp);
4862 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4865 /* Find ways of folding logical expressions of LHS and RHS:
4866 Try to merge two comparisons to the same innermost item.
4867 Look for range tests like "ch >= '0' && ch <= '9'".
4868 Look for combinations of simple terms on machines with expensive branches
4869 and evaluate the RHS unconditionally.
4871 For example, if we have p->a == 2 && p->b == 4 and we can make an
4872 object large enough to span both A and B, we can do this with a comparison
4873 against the object ANDed with the a mask.
4875 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4876 operations to do this with one comparison.
4878 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4879 function and the one above.
4881 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4882 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4884 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4885 two operands.
4887 We return the simplified tree or 0 if no optimization is possible. */
4889 static tree
4890 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4892 /* If this is the "or" of two comparisons, we can do something if
4893 the comparisons are NE_EXPR. If this is the "and", we can do something
4894 if the comparisons are EQ_EXPR. I.e.,
4895 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4897 WANTED_CODE is this operation code. For single bit fields, we can
4898 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4899 comparison for one-bit fields. */
4901 enum tree_code wanted_code;
4902 enum tree_code lcode, rcode;
4903 tree ll_arg, lr_arg, rl_arg, rr_arg;
4904 tree ll_inner, lr_inner, rl_inner, rr_inner;
4905 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4906 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4907 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4908 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4909 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4910 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4911 enum machine_mode lnmode, rnmode;
4912 tree ll_mask, lr_mask, rl_mask, rr_mask;
4913 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4914 tree l_const, r_const;
4915 tree lntype, rntype, result;
4916 int first_bit, end_bit;
4917 int volatilep;
4918 tree orig_lhs = lhs, orig_rhs = rhs;
4919 enum tree_code orig_code = code;
4921 /* Start by getting the comparison codes. Fail if anything is volatile.
4922 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4923 it were surrounded with a NE_EXPR. */
4925 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4926 return 0;
4928 lcode = TREE_CODE (lhs);
4929 rcode = TREE_CODE (rhs);
4931 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4933 lhs = build2 (NE_EXPR, truth_type, lhs,
4934 build_int_cst (TREE_TYPE (lhs), 0));
4935 lcode = NE_EXPR;
4938 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4940 rhs = build2 (NE_EXPR, truth_type, rhs,
4941 build_int_cst (TREE_TYPE (rhs), 0));
4942 rcode = NE_EXPR;
4945 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4946 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4947 return 0;
4949 ll_arg = TREE_OPERAND (lhs, 0);
4950 lr_arg = TREE_OPERAND (lhs, 1);
4951 rl_arg = TREE_OPERAND (rhs, 0);
4952 rr_arg = TREE_OPERAND (rhs, 1);
4954 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4955 if (simple_operand_p (ll_arg)
4956 && simple_operand_p (lr_arg))
4958 tree result;
4959 if (operand_equal_p (ll_arg, rl_arg, 0)
4960 && operand_equal_p (lr_arg, rr_arg, 0))
4962 result = combine_comparisons (code, lcode, rcode,
4963 truth_type, ll_arg, lr_arg);
4964 if (result)
4965 return result;
4967 else if (operand_equal_p (ll_arg, rr_arg, 0)
4968 && operand_equal_p (lr_arg, rl_arg, 0))
4970 result = combine_comparisons (code, lcode,
4971 swap_tree_comparison (rcode),
4972 truth_type, ll_arg, lr_arg);
4973 if (result)
4974 return result;
4978 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4979 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4981 /* If the RHS can be evaluated unconditionally and its operands are
4982 simple, it wins to evaluate the RHS unconditionally on machines
4983 with expensive branches. In this case, this isn't a comparison
4984 that can be merged. Avoid doing this if the RHS is a floating-point
4985 comparison since those can trap. */
4987 if (BRANCH_COST >= 2
4988 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4989 && simple_operand_p (rl_arg)
4990 && simple_operand_p (rr_arg))
4992 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4993 if (code == TRUTH_OR_EXPR
4994 && lcode == NE_EXPR && integer_zerop (lr_arg)
4995 && rcode == NE_EXPR && integer_zerop (rr_arg)
4996 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4997 return build2 (NE_EXPR, truth_type,
4998 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4999 ll_arg, rl_arg),
5000 build_int_cst (TREE_TYPE (ll_arg), 0));
5002 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5003 if (code == TRUTH_AND_EXPR
5004 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5005 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5006 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5007 return build2 (EQ_EXPR, truth_type,
5008 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5009 ll_arg, rl_arg),
5010 build_int_cst (TREE_TYPE (ll_arg), 0));
5012 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5014 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5015 return build2 (code, truth_type, lhs, rhs);
5016 return NULL_TREE;
5020 /* See if the comparisons can be merged. Then get all the parameters for
5021 each side. */
5023 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5024 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5025 return 0;
5027 volatilep = 0;
5028 ll_inner = decode_field_reference (ll_arg,
5029 &ll_bitsize, &ll_bitpos, &ll_mode,
5030 &ll_unsignedp, &volatilep, &ll_mask,
5031 &ll_and_mask);
5032 lr_inner = decode_field_reference (lr_arg,
5033 &lr_bitsize, &lr_bitpos, &lr_mode,
5034 &lr_unsignedp, &volatilep, &lr_mask,
5035 &lr_and_mask);
5036 rl_inner = decode_field_reference (rl_arg,
5037 &rl_bitsize, &rl_bitpos, &rl_mode,
5038 &rl_unsignedp, &volatilep, &rl_mask,
5039 &rl_and_mask);
5040 rr_inner = decode_field_reference (rr_arg,
5041 &rr_bitsize, &rr_bitpos, &rr_mode,
5042 &rr_unsignedp, &volatilep, &rr_mask,
5043 &rr_and_mask);
5045 /* It must be true that the inner operation on the lhs of each
5046 comparison must be the same if we are to be able to do anything.
5047 Then see if we have constants. If not, the same must be true for
5048 the rhs's. */
5049 if (volatilep || ll_inner == 0 || rl_inner == 0
5050 || ! operand_equal_p (ll_inner, rl_inner, 0))
5051 return 0;
5053 if (TREE_CODE (lr_arg) == INTEGER_CST
5054 && TREE_CODE (rr_arg) == INTEGER_CST)
5055 l_const = lr_arg, r_const = rr_arg;
5056 else if (lr_inner == 0 || rr_inner == 0
5057 || ! operand_equal_p (lr_inner, rr_inner, 0))
5058 return 0;
5059 else
5060 l_const = r_const = 0;
5062 /* If either comparison code is not correct for our logical operation,
5063 fail. However, we can convert a one-bit comparison against zero into
5064 the opposite comparison against that bit being set in the field. */
5066 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5067 if (lcode != wanted_code)
5069 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5071 /* Make the left operand unsigned, since we are only interested
5072 in the value of one bit. Otherwise we are doing the wrong
5073 thing below. */
5074 ll_unsignedp = 1;
5075 l_const = ll_mask;
5077 else
5078 return 0;
5081 /* This is analogous to the code for l_const above. */
5082 if (rcode != wanted_code)
5084 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5086 rl_unsignedp = 1;
5087 r_const = rl_mask;
5089 else
5090 return 0;
5093 /* After this point all optimizations will generate bit-field
5094 references, which we might not want. */
5095 if (! lang_hooks.can_use_bit_fields_p ())
5096 return 0;
5098 /* See if we can find a mode that contains both fields being compared on
5099 the left. If we can't, fail. Otherwise, update all constants and masks
5100 to be relative to a field of that size. */
5101 first_bit = MIN (ll_bitpos, rl_bitpos);
5102 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5103 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5104 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5105 volatilep);
5106 if (lnmode == VOIDmode)
5107 return 0;
5109 lnbitsize = GET_MODE_BITSIZE (lnmode);
5110 lnbitpos = first_bit & ~ (lnbitsize - 1);
5111 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5112 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5114 if (BYTES_BIG_ENDIAN)
5116 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5117 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5120 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5121 size_int (xll_bitpos), 0);
5122 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5123 size_int (xrl_bitpos), 0);
5125 if (l_const)
5127 l_const = fold_convert (lntype, l_const);
5128 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5129 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5130 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5131 fold_build1 (BIT_NOT_EXPR,
5132 lntype, ll_mask),
5133 0)))
5135 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5137 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5140 if (r_const)
5142 r_const = fold_convert (lntype, r_const);
5143 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5144 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5145 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5146 fold_build1 (BIT_NOT_EXPR,
5147 lntype, rl_mask),
5148 0)))
5150 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5152 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5156 /* If the right sides are not constant, do the same for it. Also,
5157 disallow this optimization if a size or signedness mismatch occurs
5158 between the left and right sides. */
5159 if (l_const == 0)
5161 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5162 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5163 /* Make sure the two fields on the right
5164 correspond to the left without being swapped. */
5165 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5166 return 0;
5168 first_bit = MIN (lr_bitpos, rr_bitpos);
5169 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5170 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5171 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5172 volatilep);
5173 if (rnmode == VOIDmode)
5174 return 0;
5176 rnbitsize = GET_MODE_BITSIZE (rnmode);
5177 rnbitpos = first_bit & ~ (rnbitsize - 1);
5178 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5179 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5181 if (BYTES_BIG_ENDIAN)
5183 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5184 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5187 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5188 size_int (xlr_bitpos), 0);
5189 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5190 size_int (xrr_bitpos), 0);
5192 /* Make a mask that corresponds to both fields being compared.
5193 Do this for both items being compared. If the operands are the
5194 same size and the bits being compared are in the same position
5195 then we can do this by masking both and comparing the masked
5196 results. */
5197 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5198 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5199 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5201 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5202 ll_unsignedp || rl_unsignedp);
5203 if (! all_ones_mask_p (ll_mask, lnbitsize))
5204 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5206 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5207 lr_unsignedp || rr_unsignedp);
5208 if (! all_ones_mask_p (lr_mask, rnbitsize))
5209 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5211 return build2 (wanted_code, truth_type, lhs, rhs);
5214 /* There is still another way we can do something: If both pairs of
5215 fields being compared are adjacent, we may be able to make a wider
5216 field containing them both.
5218 Note that we still must mask the lhs/rhs expressions. Furthermore,
5219 the mask must be shifted to account for the shift done by
5220 make_bit_field_ref. */
5221 if ((ll_bitsize + ll_bitpos == rl_bitpos
5222 && lr_bitsize + lr_bitpos == rr_bitpos)
5223 || (ll_bitpos == rl_bitpos + rl_bitsize
5224 && lr_bitpos == rr_bitpos + rr_bitsize))
5226 tree type;
5228 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5229 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5230 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5231 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5233 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5234 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5235 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5236 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5238 /* Convert to the smaller type before masking out unwanted bits. */
5239 type = lntype;
5240 if (lntype != rntype)
5242 if (lnbitsize > rnbitsize)
5244 lhs = fold_convert (rntype, lhs);
5245 ll_mask = fold_convert (rntype, ll_mask);
5246 type = rntype;
5248 else if (lnbitsize < rnbitsize)
5250 rhs = fold_convert (lntype, rhs);
5251 lr_mask = fold_convert (lntype, lr_mask);
5252 type = lntype;
5256 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5257 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5259 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5260 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5262 return build2 (wanted_code, truth_type, lhs, rhs);
5265 return 0;
5268 /* Handle the case of comparisons with constants. If there is something in
5269 common between the masks, those bits of the constants must be the same.
5270 If not, the condition is always false. Test for this to avoid generating
5271 incorrect code below. */
5272 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5273 if (! integer_zerop (result)
5274 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5275 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5277 if (wanted_code == NE_EXPR)
5279 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5280 return constant_boolean_node (true, truth_type);
5282 else
5284 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5285 return constant_boolean_node (false, truth_type);
5289 /* Construct the expression we will return. First get the component
5290 reference we will make. Unless the mask is all ones the width of
5291 that field, perform the mask operation. Then compare with the
5292 merged constant. */
5293 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5294 ll_unsignedp || rl_unsignedp);
5296 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5297 if (! all_ones_mask_p (ll_mask, lnbitsize))
5298 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5300 return build2 (wanted_code, truth_type, result,
5301 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5304 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5305 constant. */
5307 static tree
5308 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5310 tree arg0 = op0;
5311 enum tree_code op_code;
5312 tree comp_const = op1;
5313 tree minmax_const;
5314 int consts_equal, consts_lt;
5315 tree inner;
5317 STRIP_SIGN_NOPS (arg0);
5319 op_code = TREE_CODE (arg0);
5320 minmax_const = TREE_OPERAND (arg0, 1);
5321 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5322 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5323 inner = TREE_OPERAND (arg0, 0);
5325 /* If something does not permit us to optimize, return the original tree. */
5326 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5327 || TREE_CODE (comp_const) != INTEGER_CST
5328 || TREE_CONSTANT_OVERFLOW (comp_const)
5329 || TREE_CODE (minmax_const) != INTEGER_CST
5330 || TREE_CONSTANT_OVERFLOW (minmax_const))
5331 return NULL_TREE;
5333 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5334 and GT_EXPR, doing the rest with recursive calls using logical
5335 simplifications. */
5336 switch (code)
5338 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5340 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5341 type, op0, op1);
5342 if (tem)
5343 return invert_truthvalue (tem);
5344 return NULL_TREE;
5347 case GE_EXPR:
5348 return
5349 fold_build2 (TRUTH_ORIF_EXPR, type,
5350 optimize_minmax_comparison
5351 (EQ_EXPR, type, arg0, comp_const),
5352 optimize_minmax_comparison
5353 (GT_EXPR, type, arg0, comp_const));
5355 case EQ_EXPR:
5356 if (op_code == MAX_EXPR && consts_equal)
5357 /* MAX (X, 0) == 0 -> X <= 0 */
5358 return fold_build2 (LE_EXPR, type, inner, comp_const);
5360 else if (op_code == MAX_EXPR && consts_lt)
5361 /* MAX (X, 0) == 5 -> X == 5 */
5362 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5364 else if (op_code == MAX_EXPR)
5365 /* MAX (X, 0) == -1 -> false */
5366 return omit_one_operand (type, integer_zero_node, inner);
5368 else if (consts_equal)
5369 /* MIN (X, 0) == 0 -> X >= 0 */
5370 return fold_build2 (GE_EXPR, type, inner, comp_const);
5372 else if (consts_lt)
5373 /* MIN (X, 0) == 5 -> false */
5374 return omit_one_operand (type, integer_zero_node, inner);
5376 else
5377 /* MIN (X, 0) == -1 -> X == -1 */
5378 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5380 case GT_EXPR:
5381 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5382 /* MAX (X, 0) > 0 -> X > 0
5383 MAX (X, 0) > 5 -> X > 5 */
5384 return fold_build2 (GT_EXPR, type, inner, comp_const);
5386 else if (op_code == MAX_EXPR)
5387 /* MAX (X, 0) > -1 -> true */
5388 return omit_one_operand (type, integer_one_node, inner);
5390 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5391 /* MIN (X, 0) > 0 -> false
5392 MIN (X, 0) > 5 -> false */
5393 return omit_one_operand (type, integer_zero_node, inner);
5395 else
5396 /* MIN (X, 0) > -1 -> X > -1 */
5397 return fold_build2 (GT_EXPR, type, inner, comp_const);
5399 default:
5400 return NULL_TREE;
5404 /* T is an integer expression that is being multiplied, divided, or taken a
5405 modulus (CODE says which and what kind of divide or modulus) by a
5406 constant C. See if we can eliminate that operation by folding it with
5407 other operations already in T. WIDE_TYPE, if non-null, is a type that
5408 should be used for the computation if wider than our type.
5410 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5411 (X * 2) + (Y * 4). We must, however, be assured that either the original
5412 expression would not overflow or that overflow is undefined for the type
5413 in the language in question.
5415 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5416 the machine has a multiply-accumulate insn or that this is part of an
5417 addressing calculation.
5419 If we return a non-null expression, it is an equivalent form of the
5420 original computation, but need not be in the original type. */
5422 static tree
5423 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5425 /* To avoid exponential search depth, refuse to allow recursion past
5426 three levels. Beyond that (1) it's highly unlikely that we'll find
5427 something interesting and (2) we've probably processed it before
5428 when we built the inner expression. */
5430 static int depth;
5431 tree ret;
5433 if (depth > 3)
5434 return NULL;
5436 depth++;
5437 ret = extract_muldiv_1 (t, c, code, wide_type);
5438 depth--;
5440 return ret;
5443 static tree
5444 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5446 tree type = TREE_TYPE (t);
5447 enum tree_code tcode = TREE_CODE (t);
5448 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5449 > GET_MODE_SIZE (TYPE_MODE (type)))
5450 ? wide_type : type);
5451 tree t1, t2;
5452 int same_p = tcode == code;
5453 tree op0 = NULL_TREE, op1 = NULL_TREE;
5455 /* Don't deal with constants of zero here; they confuse the code below. */
5456 if (integer_zerop (c))
5457 return NULL_TREE;
5459 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5460 op0 = TREE_OPERAND (t, 0);
5462 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5463 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5465 /* Note that we need not handle conditional operations here since fold
5466 already handles those cases. So just do arithmetic here. */
5467 switch (tcode)
5469 case INTEGER_CST:
5470 /* For a constant, we can always simplify if we are a multiply
5471 or (for divide and modulus) if it is a multiple of our constant. */
5472 if (code == MULT_EXPR
5473 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5474 return const_binop (code, fold_convert (ctype, t),
5475 fold_convert (ctype, c), 0);
5476 break;
5478 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5479 /* If op0 is an expression ... */
5480 if ((COMPARISON_CLASS_P (op0)
5481 || UNARY_CLASS_P (op0)
5482 || BINARY_CLASS_P (op0)
5483 || EXPRESSION_CLASS_P (op0))
5484 /* ... and is unsigned, and its type is smaller than ctype,
5485 then we cannot pass through as widening. */
5486 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5487 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5488 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5489 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5490 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5491 /* ... or this is a truncation (t is narrower than op0),
5492 then we cannot pass through this narrowing. */
5493 || (GET_MODE_SIZE (TYPE_MODE (type))
5494 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5495 /* ... or signedness changes for division or modulus,
5496 then we cannot pass through this conversion. */
5497 || (code != MULT_EXPR
5498 && (TYPE_UNSIGNED (ctype)
5499 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5500 break;
5502 /* Pass the constant down and see if we can make a simplification. If
5503 we can, replace this expression with the inner simplification for
5504 possible later conversion to our or some other type. */
5505 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5506 && TREE_CODE (t2) == INTEGER_CST
5507 && ! TREE_CONSTANT_OVERFLOW (t2)
5508 && (0 != (t1 = extract_muldiv (op0, t2, code,
5509 code == MULT_EXPR
5510 ? ctype : NULL_TREE))))
5511 return t1;
5512 break;
5514 case ABS_EXPR:
5515 /* If widening the type changes it from signed to unsigned, then we
5516 must avoid building ABS_EXPR itself as unsigned. */
5517 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5519 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5520 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5522 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5523 return fold_convert (ctype, t1);
5525 break;
5527 /* FALLTHROUGH */
5528 case NEGATE_EXPR:
5529 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5530 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5531 break;
5533 case MIN_EXPR: case MAX_EXPR:
5534 /* If widening the type changes the signedness, then we can't perform
5535 this optimization as that changes the result. */
5536 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5537 break;
5539 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5540 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5541 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5543 if (tree_int_cst_sgn (c) < 0)
5544 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5546 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5547 fold_convert (ctype, t2));
5549 break;
5551 case LSHIFT_EXPR: case RSHIFT_EXPR:
5552 /* If the second operand is constant, this is a multiplication
5553 or floor division, by a power of two, so we can treat it that
5554 way unless the multiplier or divisor overflows. Signed
5555 left-shift overflow is implementation-defined rather than
5556 undefined in C90, so do not convert signed left shift into
5557 multiplication. */
5558 if (TREE_CODE (op1) == INTEGER_CST
5559 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5560 /* const_binop may not detect overflow correctly,
5561 so check for it explicitly here. */
5562 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5563 && TREE_INT_CST_HIGH (op1) == 0
5564 && 0 != (t1 = fold_convert (ctype,
5565 const_binop (LSHIFT_EXPR,
5566 size_one_node,
5567 op1, 0)))
5568 && ! TREE_OVERFLOW (t1))
5569 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5570 ? MULT_EXPR : FLOOR_DIV_EXPR,
5571 ctype, fold_convert (ctype, op0), t1),
5572 c, code, wide_type);
5573 break;
5575 case PLUS_EXPR: case MINUS_EXPR:
5576 /* See if we can eliminate the operation on both sides. If we can, we
5577 can return a new PLUS or MINUS. If we can't, the only remaining
5578 cases where we can do anything are if the second operand is a
5579 constant. */
5580 t1 = extract_muldiv (op0, c, code, wide_type);
5581 t2 = extract_muldiv (op1, c, code, wide_type);
5582 if (t1 != 0 && t2 != 0
5583 && (code == MULT_EXPR
5584 /* If not multiplication, we can only do this if both operands
5585 are divisible by c. */
5586 || (multiple_of_p (ctype, op0, c)
5587 && multiple_of_p (ctype, op1, c))))
5588 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5589 fold_convert (ctype, t2));
5591 /* If this was a subtraction, negate OP1 and set it to be an addition.
5592 This simplifies the logic below. */
5593 if (tcode == MINUS_EXPR)
5594 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5596 if (TREE_CODE (op1) != INTEGER_CST)
5597 break;
5599 /* If either OP1 or C are negative, this optimization is not safe for
5600 some of the division and remainder types while for others we need
5601 to change the code. */
5602 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5604 if (code == CEIL_DIV_EXPR)
5605 code = FLOOR_DIV_EXPR;
5606 else if (code == FLOOR_DIV_EXPR)
5607 code = CEIL_DIV_EXPR;
5608 else if (code != MULT_EXPR
5609 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5610 break;
5613 /* If it's a multiply or a division/modulus operation of a multiple
5614 of our constant, do the operation and verify it doesn't overflow. */
5615 if (code == MULT_EXPR
5616 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5618 op1 = const_binop (code, fold_convert (ctype, op1),
5619 fold_convert (ctype, c), 0);
5620 /* We allow the constant to overflow with wrapping semantics. */
5621 if (op1 == 0
5622 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5623 break;
5625 else
5626 break;
5628 /* If we have an unsigned type is not a sizetype, we cannot widen
5629 the operation since it will change the result if the original
5630 computation overflowed. */
5631 if (TYPE_UNSIGNED (ctype)
5632 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5633 && ctype != type)
5634 break;
5636 /* If we were able to eliminate our operation from the first side,
5637 apply our operation to the second side and reform the PLUS. */
5638 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5639 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5641 /* The last case is if we are a multiply. In that case, we can
5642 apply the distributive law to commute the multiply and addition
5643 if the multiplication of the constants doesn't overflow. */
5644 if (code == MULT_EXPR)
5645 return fold_build2 (tcode, ctype,
5646 fold_build2 (code, ctype,
5647 fold_convert (ctype, op0),
5648 fold_convert (ctype, c)),
5649 op1);
5651 break;
5653 case MULT_EXPR:
5654 /* We have a special case here if we are doing something like
5655 (C * 8) % 4 since we know that's zero. */
5656 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5657 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5658 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5659 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5660 return omit_one_operand (type, integer_zero_node, op0);
5662 /* ... fall through ... */
5664 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5665 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5666 /* If we can extract our operation from the LHS, do so and return a
5667 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5668 do something only if the second operand is a constant. */
5669 if (same_p
5670 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5671 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5672 fold_convert (ctype, op1));
5673 else if (tcode == MULT_EXPR && code == MULT_EXPR
5674 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5675 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5676 fold_convert (ctype, t1));
5677 else if (TREE_CODE (op1) != INTEGER_CST)
5678 return 0;
5680 /* If these are the same operation types, we can associate them
5681 assuming no overflow. */
5682 if (tcode == code
5683 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5684 fold_convert (ctype, c), 0))
5685 && ! TREE_OVERFLOW (t1))
5686 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5688 /* If these operations "cancel" each other, we have the main
5689 optimizations of this pass, which occur when either constant is a
5690 multiple of the other, in which case we replace this with either an
5691 operation or CODE or TCODE.
5693 If we have an unsigned type that is not a sizetype, we cannot do
5694 this since it will change the result if the original computation
5695 overflowed. */
5696 if ((! TYPE_UNSIGNED (ctype)
5697 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5698 && ! flag_wrapv
5699 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5700 || (tcode == MULT_EXPR
5701 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5702 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5704 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5705 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5706 fold_convert (ctype,
5707 const_binop (TRUNC_DIV_EXPR,
5708 op1, c, 0)));
5709 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5710 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5711 fold_convert (ctype,
5712 const_binop (TRUNC_DIV_EXPR,
5713 c, op1, 0)));
5715 break;
5717 default:
5718 break;
5721 return 0;
5724 /* Return a node which has the indicated constant VALUE (either 0 or
5725 1), and is of the indicated TYPE. */
5727 tree
5728 constant_boolean_node (int value, tree type)
5730 if (type == integer_type_node)
5731 return value ? integer_one_node : integer_zero_node;
5732 else if (type == boolean_type_node)
5733 return value ? boolean_true_node : boolean_false_node;
5734 else
5735 return build_int_cst (type, value);
5739 /* Return true if expr looks like an ARRAY_REF and set base and
5740 offset to the appropriate trees. If there is no offset,
5741 offset is set to NULL_TREE. Base will be canonicalized to
5742 something you can get the element type from using
5743 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5744 in bytes to the base. */
5746 static bool
5747 extract_array_ref (tree expr, tree *base, tree *offset)
5749 /* One canonical form is a PLUS_EXPR with the first
5750 argument being an ADDR_EXPR with a possible NOP_EXPR
5751 attached. */
5752 if (TREE_CODE (expr) == PLUS_EXPR)
5754 tree op0 = TREE_OPERAND (expr, 0);
5755 tree inner_base, dummy1;
5756 /* Strip NOP_EXPRs here because the C frontends and/or
5757 folders present us (int *)&x.a + 4B possibly. */
5758 STRIP_NOPS (op0);
5759 if (extract_array_ref (op0, &inner_base, &dummy1))
5761 *base = inner_base;
5762 if (dummy1 == NULL_TREE)
5763 *offset = TREE_OPERAND (expr, 1);
5764 else
5765 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5766 dummy1, TREE_OPERAND (expr, 1));
5767 return true;
5770 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5771 which we transform into an ADDR_EXPR with appropriate
5772 offset. For other arguments to the ADDR_EXPR we assume
5773 zero offset and as such do not care about the ADDR_EXPR
5774 type and strip possible nops from it. */
5775 else if (TREE_CODE (expr) == ADDR_EXPR)
5777 tree op0 = TREE_OPERAND (expr, 0);
5778 if (TREE_CODE (op0) == ARRAY_REF)
5780 tree idx = TREE_OPERAND (op0, 1);
5781 *base = TREE_OPERAND (op0, 0);
5782 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5783 array_ref_element_size (op0));
5785 else
5787 /* Handle array-to-pointer decay as &a. */
5788 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5789 *base = TREE_OPERAND (expr, 0);
5790 else
5791 *base = expr;
5792 *offset = NULL_TREE;
5794 return true;
5796 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5797 else if (SSA_VAR_P (expr)
5798 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5800 *base = expr;
5801 *offset = NULL_TREE;
5802 return true;
5805 return false;
5809 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5810 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5811 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5812 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5813 COND is the first argument to CODE; otherwise (as in the example
5814 given here), it is the second argument. TYPE is the type of the
5815 original expression. Return NULL_TREE if no simplification is
5816 possible. */
5818 static tree
5819 fold_binary_op_with_conditional_arg (enum tree_code code,
5820 tree type, tree op0, tree op1,
5821 tree cond, tree arg, int cond_first_p)
5823 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5824 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5825 tree test, true_value, false_value;
5826 tree lhs = NULL_TREE;
5827 tree rhs = NULL_TREE;
5829 /* This transformation is only worthwhile if we don't have to wrap
5830 arg in a SAVE_EXPR, and the operation can be simplified on at least
5831 one of the branches once its pushed inside the COND_EXPR. */
5832 if (!TREE_CONSTANT (arg))
5833 return NULL_TREE;
5835 if (TREE_CODE (cond) == COND_EXPR)
5837 test = TREE_OPERAND (cond, 0);
5838 true_value = TREE_OPERAND (cond, 1);
5839 false_value = TREE_OPERAND (cond, 2);
5840 /* If this operand throws an expression, then it does not make
5841 sense to try to perform a logical or arithmetic operation
5842 involving it. */
5843 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5844 lhs = true_value;
5845 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5846 rhs = false_value;
5848 else
5850 tree testtype = TREE_TYPE (cond);
5851 test = cond;
5852 true_value = constant_boolean_node (true, testtype);
5853 false_value = constant_boolean_node (false, testtype);
5856 arg = fold_convert (arg_type, arg);
5857 if (lhs == 0)
5859 true_value = fold_convert (cond_type, true_value);
5860 if (cond_first_p)
5861 lhs = fold_build2 (code, type, true_value, arg);
5862 else
5863 lhs = fold_build2 (code, type, arg, true_value);
5865 if (rhs == 0)
5867 false_value = fold_convert (cond_type, false_value);
5868 if (cond_first_p)
5869 rhs = fold_build2 (code, type, false_value, arg);
5870 else
5871 rhs = fold_build2 (code, type, arg, false_value);
5874 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5875 return fold_convert (type, test);
5879 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5881 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5882 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5883 ADDEND is the same as X.
5885 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5886 and finite. The problematic cases are when X is zero, and its mode
5887 has signed zeros. In the case of rounding towards -infinity,
5888 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5889 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5891 static bool
5892 fold_real_zero_addition_p (tree type, tree addend, int negate)
5894 if (!real_zerop (addend))
5895 return false;
5897 /* Don't allow the fold with -fsignaling-nans. */
5898 if (HONOR_SNANS (TYPE_MODE (type)))
5899 return false;
5901 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5902 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5903 return true;
5905 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5906 if (TREE_CODE (addend) == REAL_CST
5907 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5908 negate = !negate;
5910 /* The mode has signed zeros, and we have to honor their sign.
5911 In this situation, there is only one case we can return true for.
5912 X - 0 is the same as X unless rounding towards -infinity is
5913 supported. */
5914 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5917 /* Subroutine of fold() that checks comparisons of built-in math
5918 functions against real constants.
5920 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5921 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5922 is the type of the result and ARG0 and ARG1 are the operands of the
5923 comparison. ARG1 must be a TREE_REAL_CST.
5925 The function returns the constant folded tree if a simplification
5926 can be made, and NULL_TREE otherwise. */
5928 static tree
5929 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5930 tree type, tree arg0, tree arg1)
5932 REAL_VALUE_TYPE c;
5934 if (BUILTIN_SQRT_P (fcode))
5936 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5937 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5939 c = TREE_REAL_CST (arg1);
5940 if (REAL_VALUE_NEGATIVE (c))
5942 /* sqrt(x) < y is always false, if y is negative. */
5943 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5944 return omit_one_operand (type, integer_zero_node, arg);
5946 /* sqrt(x) > y is always true, if y is negative and we
5947 don't care about NaNs, i.e. negative values of x. */
5948 if (code == NE_EXPR || !HONOR_NANS (mode))
5949 return omit_one_operand (type, integer_one_node, arg);
5951 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5952 return fold_build2 (GE_EXPR, type, arg,
5953 build_real (TREE_TYPE (arg), dconst0));
5955 else if (code == GT_EXPR || code == GE_EXPR)
5957 REAL_VALUE_TYPE c2;
5959 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5960 real_convert (&c2, mode, &c2);
5962 if (REAL_VALUE_ISINF (c2))
5964 /* sqrt(x) > y is x == +Inf, when y is very large. */
5965 if (HONOR_INFINITIES (mode))
5966 return fold_build2 (EQ_EXPR, type, arg,
5967 build_real (TREE_TYPE (arg), c2));
5969 /* sqrt(x) > y is always false, when y is very large
5970 and we don't care about infinities. */
5971 return omit_one_operand (type, integer_zero_node, arg);
5974 /* sqrt(x) > c is the same as x > c*c. */
5975 return fold_build2 (code, type, arg,
5976 build_real (TREE_TYPE (arg), c2));
5978 else if (code == LT_EXPR || code == LE_EXPR)
5980 REAL_VALUE_TYPE c2;
5982 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5983 real_convert (&c2, mode, &c2);
5985 if (REAL_VALUE_ISINF (c2))
5987 /* sqrt(x) < y is always true, when y is a very large
5988 value and we don't care about NaNs or Infinities. */
5989 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5990 return omit_one_operand (type, integer_one_node, arg);
5992 /* sqrt(x) < y is x != +Inf when y is very large and we
5993 don't care about NaNs. */
5994 if (! HONOR_NANS (mode))
5995 return fold_build2 (NE_EXPR, type, arg,
5996 build_real (TREE_TYPE (arg), c2));
5998 /* sqrt(x) < y is x >= 0 when y is very large and we
5999 don't care about Infinities. */
6000 if (! HONOR_INFINITIES (mode))
6001 return fold_build2 (GE_EXPR, type, arg,
6002 build_real (TREE_TYPE (arg), dconst0));
6004 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6005 if (lang_hooks.decls.global_bindings_p () != 0
6006 || CONTAINS_PLACEHOLDER_P (arg))
6007 return NULL_TREE;
6009 arg = save_expr (arg);
6010 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6011 fold_build2 (GE_EXPR, type, arg,
6012 build_real (TREE_TYPE (arg),
6013 dconst0)),
6014 fold_build2 (NE_EXPR, type, arg,
6015 build_real (TREE_TYPE (arg),
6016 c2)));
6019 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6020 if (! HONOR_NANS (mode))
6021 return fold_build2 (code, type, arg,
6022 build_real (TREE_TYPE (arg), c2));
6024 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6025 if (lang_hooks.decls.global_bindings_p () == 0
6026 && ! CONTAINS_PLACEHOLDER_P (arg))
6028 arg = save_expr (arg);
6029 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6030 fold_build2 (GE_EXPR, type, arg,
6031 build_real (TREE_TYPE (arg),
6032 dconst0)),
6033 fold_build2 (code, type, arg,
6034 build_real (TREE_TYPE (arg),
6035 c2)));
6040 return NULL_TREE;
6043 /* Subroutine of fold() that optimizes comparisons against Infinities,
6044 either +Inf or -Inf.
6046 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6047 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6048 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6050 The function returns the constant folded tree if a simplification
6051 can be made, and NULL_TREE otherwise. */
6053 static tree
6054 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6056 enum machine_mode mode;
6057 REAL_VALUE_TYPE max;
6058 tree temp;
6059 bool neg;
6061 mode = TYPE_MODE (TREE_TYPE (arg0));
6063 /* For negative infinity swap the sense of the comparison. */
6064 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6065 if (neg)
6066 code = swap_tree_comparison (code);
6068 switch (code)
6070 case GT_EXPR:
6071 /* x > +Inf is always false, if with ignore sNANs. */
6072 if (HONOR_SNANS (mode))
6073 return NULL_TREE;
6074 return omit_one_operand (type, integer_zero_node, arg0);
6076 case LE_EXPR:
6077 /* x <= +Inf is always true, if we don't case about NaNs. */
6078 if (! HONOR_NANS (mode))
6079 return omit_one_operand (type, integer_one_node, arg0);
6081 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6082 if (lang_hooks.decls.global_bindings_p () == 0
6083 && ! CONTAINS_PLACEHOLDER_P (arg0))
6085 arg0 = save_expr (arg0);
6086 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6088 break;
6090 case EQ_EXPR:
6091 case GE_EXPR:
6092 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6093 real_maxval (&max, neg, mode);
6094 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6095 arg0, build_real (TREE_TYPE (arg0), max));
6097 case LT_EXPR:
6098 /* x < +Inf is always equal to x <= DBL_MAX. */
6099 real_maxval (&max, neg, mode);
6100 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6101 arg0, build_real (TREE_TYPE (arg0), max));
6103 case NE_EXPR:
6104 /* x != +Inf is always equal to !(x > DBL_MAX). */
6105 real_maxval (&max, neg, mode);
6106 if (! HONOR_NANS (mode))
6107 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6108 arg0, build_real (TREE_TYPE (arg0), max));
6110 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6111 arg0, build_real (TREE_TYPE (arg0), max));
6112 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6114 default:
6115 break;
6118 return NULL_TREE;
6121 /* Subroutine of fold() that optimizes comparisons of a division by
6122 a nonzero integer constant against an integer constant, i.e.
6123 X/C1 op C2.
6125 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6126 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6127 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6129 The function returns the constant folded tree if a simplification
6130 can be made, and NULL_TREE otherwise. */
6132 static tree
6133 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6135 tree prod, tmp, hi, lo;
6136 tree arg00 = TREE_OPERAND (arg0, 0);
6137 tree arg01 = TREE_OPERAND (arg0, 1);
6138 unsigned HOST_WIDE_INT lpart;
6139 HOST_WIDE_INT hpart;
6140 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6141 bool neg_overflow;
6142 int overflow;
6144 /* We have to do this the hard way to detect unsigned overflow.
6145 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6146 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6147 TREE_INT_CST_HIGH (arg01),
6148 TREE_INT_CST_LOW (arg1),
6149 TREE_INT_CST_HIGH (arg1),
6150 &lpart, &hpart, unsigned_p);
6151 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6152 prod = force_fit_type (prod, -1, overflow, false);
6153 neg_overflow = false;
6155 if (unsigned_p)
6157 tmp = int_const_binop (MINUS_EXPR, arg01,
6158 build_int_cst (TREE_TYPE (arg01), 1), 0);
6159 lo = prod;
6161 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6162 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6163 TREE_INT_CST_HIGH (prod),
6164 TREE_INT_CST_LOW (tmp),
6165 TREE_INT_CST_HIGH (tmp),
6166 &lpart, &hpart, unsigned_p);
6167 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6168 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6169 TREE_CONSTANT_OVERFLOW (prod));
6171 else if (tree_int_cst_sgn (arg01) >= 0)
6173 tmp = int_const_binop (MINUS_EXPR, arg01,
6174 build_int_cst (TREE_TYPE (arg01), 1), 0);
6175 switch (tree_int_cst_sgn (arg1))
6177 case -1:
6178 neg_overflow = true;
6179 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6180 hi = prod;
6181 break;
6183 case 0:
6184 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6185 hi = tmp;
6186 break;
6188 case 1:
6189 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6190 lo = prod;
6191 break;
6193 default:
6194 gcc_unreachable ();
6197 else
6199 /* A negative divisor reverses the relational operators. */
6200 code = swap_tree_comparison (code);
6202 tmp = int_const_binop (PLUS_EXPR, arg01,
6203 build_int_cst (TREE_TYPE (arg01), 1), 0);
6204 switch (tree_int_cst_sgn (arg1))
6206 case -1:
6207 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6208 lo = prod;
6209 break;
6211 case 0:
6212 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6213 lo = tmp;
6214 break;
6216 case 1:
6217 neg_overflow = true;
6218 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6219 hi = prod;
6220 break;
6222 default:
6223 gcc_unreachable ();
6227 switch (code)
6229 case EQ_EXPR:
6230 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6231 return omit_one_operand (type, integer_zero_node, arg00);
6232 if (TREE_OVERFLOW (hi))
6233 return fold_build2 (GE_EXPR, type, arg00, lo);
6234 if (TREE_OVERFLOW (lo))
6235 return fold_build2 (LE_EXPR, type, arg00, hi);
6236 return build_range_check (type, arg00, 1, lo, hi);
6238 case NE_EXPR:
6239 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6240 return omit_one_operand (type, integer_one_node, arg00);
6241 if (TREE_OVERFLOW (hi))
6242 return fold_build2 (LT_EXPR, type, arg00, lo);
6243 if (TREE_OVERFLOW (lo))
6244 return fold_build2 (GT_EXPR, type, arg00, hi);
6245 return build_range_check (type, arg00, 0, lo, hi);
6247 case LT_EXPR:
6248 if (TREE_OVERFLOW (lo))
6250 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6251 return omit_one_operand (type, tmp, arg00);
6253 return fold_build2 (LT_EXPR, type, arg00, lo);
6255 case LE_EXPR:
6256 if (TREE_OVERFLOW (hi))
6258 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6259 return omit_one_operand (type, tmp, arg00);
6261 return fold_build2 (LE_EXPR, type, arg00, hi);
6263 case GT_EXPR:
6264 if (TREE_OVERFLOW (hi))
6266 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6267 return omit_one_operand (type, tmp, arg00);
6269 return fold_build2 (GT_EXPR, type, arg00, hi);
6271 case GE_EXPR:
6272 if (TREE_OVERFLOW (lo))
6274 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6275 return omit_one_operand (type, tmp, arg00);
6277 return fold_build2 (GE_EXPR, type, arg00, lo);
6279 default:
6280 break;
6283 return NULL_TREE;
6287 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6288 equality/inequality test, then return a simplified form of the test
6289 using a sign testing. Otherwise return NULL. TYPE is the desired
6290 result type. */
6292 static tree
6293 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6294 tree result_type)
6296 /* If this is testing a single bit, we can optimize the test. */
6297 if ((code == NE_EXPR || code == EQ_EXPR)
6298 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6299 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6301 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6302 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6303 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6305 if (arg00 != NULL_TREE
6306 /* This is only a win if casting to a signed type is cheap,
6307 i.e. when arg00's type is not a partial mode. */
6308 && TYPE_PRECISION (TREE_TYPE (arg00))
6309 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6311 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6312 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6313 result_type, fold_convert (stype, arg00),
6314 build_int_cst (stype, 0));
6318 return NULL_TREE;
6321 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6322 equality/inequality test, then return a simplified form of
6323 the test using shifts and logical operations. Otherwise return
6324 NULL. TYPE is the desired result type. */
6326 tree
6327 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6328 tree result_type)
6330 /* If this is testing a single bit, we can optimize the test. */
6331 if ((code == NE_EXPR || code == EQ_EXPR)
6332 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6333 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6335 tree inner = TREE_OPERAND (arg0, 0);
6336 tree type = TREE_TYPE (arg0);
6337 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6338 enum machine_mode operand_mode = TYPE_MODE (type);
6339 int ops_unsigned;
6340 tree signed_type, unsigned_type, intermediate_type;
6341 tree tem, one;
6343 /* First, see if we can fold the single bit test into a sign-bit
6344 test. */
6345 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6346 result_type);
6347 if (tem)
6348 return tem;
6350 /* Otherwise we have (A & C) != 0 where C is a single bit,
6351 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6352 Similarly for (A & C) == 0. */
6354 /* If INNER is a right shift of a constant and it plus BITNUM does
6355 not overflow, adjust BITNUM and INNER. */
6356 if (TREE_CODE (inner) == RSHIFT_EXPR
6357 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6358 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6359 && bitnum < TYPE_PRECISION (type)
6360 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6361 bitnum - TYPE_PRECISION (type)))
6363 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6364 inner = TREE_OPERAND (inner, 0);
6367 /* If we are going to be able to omit the AND below, we must do our
6368 operations as unsigned. If we must use the AND, we have a choice.
6369 Normally unsigned is faster, but for some machines signed is. */
6370 #ifdef LOAD_EXTEND_OP
6371 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6372 && !flag_syntax_only) ? 0 : 1;
6373 #else
6374 ops_unsigned = 1;
6375 #endif
6377 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6378 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6379 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6380 inner = fold_convert (intermediate_type, inner);
6382 if (bitnum != 0)
6383 inner = build2 (RSHIFT_EXPR, intermediate_type,
6384 inner, size_int (bitnum));
6386 one = build_int_cst (intermediate_type, 1);
6388 if (code == EQ_EXPR)
6389 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6391 /* Put the AND last so it can combine with more things. */
6392 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6394 /* Make sure to return the proper type. */
6395 inner = fold_convert (result_type, inner);
6397 return inner;
6399 return NULL_TREE;
6402 /* Check whether we are allowed to reorder operands arg0 and arg1,
6403 such that the evaluation of arg1 occurs before arg0. */
6405 static bool
6406 reorder_operands_p (tree arg0, tree arg1)
6408 if (! flag_evaluation_order)
6409 return true;
6410 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6411 return true;
6412 return ! TREE_SIDE_EFFECTS (arg0)
6413 && ! TREE_SIDE_EFFECTS (arg1);
6416 /* Test whether it is preferable two swap two operands, ARG0 and
6417 ARG1, for example because ARG0 is an integer constant and ARG1
6418 isn't. If REORDER is true, only recommend swapping if we can
6419 evaluate the operands in reverse order. */
6421 bool
6422 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6424 STRIP_SIGN_NOPS (arg0);
6425 STRIP_SIGN_NOPS (arg1);
6427 if (TREE_CODE (arg1) == INTEGER_CST)
6428 return 0;
6429 if (TREE_CODE (arg0) == INTEGER_CST)
6430 return 1;
6432 if (TREE_CODE (arg1) == REAL_CST)
6433 return 0;
6434 if (TREE_CODE (arg0) == REAL_CST)
6435 return 1;
6437 if (TREE_CODE (arg1) == COMPLEX_CST)
6438 return 0;
6439 if (TREE_CODE (arg0) == COMPLEX_CST)
6440 return 1;
6442 if (TREE_CONSTANT (arg1))
6443 return 0;
6444 if (TREE_CONSTANT (arg0))
6445 return 1;
6447 if (optimize_size)
6448 return 0;
6450 if (reorder && flag_evaluation_order
6451 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6452 return 0;
6454 if (DECL_P (arg1))
6455 return 0;
6456 if (DECL_P (arg0))
6457 return 1;
6459 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6460 for commutative and comparison operators. Ensuring a canonical
6461 form allows the optimizers to find additional redundancies without
6462 having to explicitly check for both orderings. */
6463 if (TREE_CODE (arg0) == SSA_NAME
6464 && TREE_CODE (arg1) == SSA_NAME
6465 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6466 return 1;
6468 return 0;
6471 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6472 ARG0 is extended to a wider type. */
6474 static tree
6475 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6477 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6478 tree arg1_unw;
6479 tree shorter_type, outer_type;
6480 tree min, max;
6481 bool above, below;
6483 if (arg0_unw == arg0)
6484 return NULL_TREE;
6485 shorter_type = TREE_TYPE (arg0_unw);
6487 #ifdef HAVE_canonicalize_funcptr_for_compare
6488 /* Disable this optimization if we're casting a function pointer
6489 type on targets that require function pointer canonicalization. */
6490 if (HAVE_canonicalize_funcptr_for_compare
6491 && TREE_CODE (shorter_type) == POINTER_TYPE
6492 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6493 return NULL_TREE;
6494 #endif
6496 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6497 return NULL_TREE;
6499 arg1_unw = get_unwidened (arg1, shorter_type);
6501 /* If possible, express the comparison in the shorter mode. */
6502 if ((code == EQ_EXPR || code == NE_EXPR
6503 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6504 && (TREE_TYPE (arg1_unw) == shorter_type
6505 || (TREE_CODE (arg1_unw) == INTEGER_CST
6506 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6507 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6508 && int_fits_type_p (arg1_unw, shorter_type))))
6509 return fold_build2 (code, type, arg0_unw,
6510 fold_convert (shorter_type, arg1_unw));
6512 if (TREE_CODE (arg1_unw) != INTEGER_CST
6513 || TREE_CODE (shorter_type) != INTEGER_TYPE
6514 || !int_fits_type_p (arg1_unw, shorter_type))
6515 return NULL_TREE;
6517 /* If we are comparing with the integer that does not fit into the range
6518 of the shorter type, the result is known. */
6519 outer_type = TREE_TYPE (arg1_unw);
6520 min = lower_bound_in_type (outer_type, shorter_type);
6521 max = upper_bound_in_type (outer_type, shorter_type);
6523 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6524 max, arg1_unw));
6525 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6526 arg1_unw, min));
6528 switch (code)
6530 case EQ_EXPR:
6531 if (above || below)
6532 return omit_one_operand (type, integer_zero_node, arg0);
6533 break;
6535 case NE_EXPR:
6536 if (above || below)
6537 return omit_one_operand (type, integer_one_node, arg0);
6538 break;
6540 case LT_EXPR:
6541 case LE_EXPR:
6542 if (above)
6543 return omit_one_operand (type, integer_one_node, arg0);
6544 else if (below)
6545 return omit_one_operand (type, integer_zero_node, arg0);
6547 case GT_EXPR:
6548 case GE_EXPR:
6549 if (above)
6550 return omit_one_operand (type, integer_zero_node, arg0);
6551 else if (below)
6552 return omit_one_operand (type, integer_one_node, arg0);
6554 default:
6555 break;
6558 return NULL_TREE;
6561 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6562 ARG0 just the signedness is changed. */
6564 static tree
6565 fold_sign_changed_comparison (enum tree_code code, tree type,
6566 tree arg0, tree arg1)
6568 tree arg0_inner, tmp;
6569 tree inner_type, outer_type;
6571 if (TREE_CODE (arg0) != NOP_EXPR
6572 && TREE_CODE (arg0) != CONVERT_EXPR)
6573 return NULL_TREE;
6575 outer_type = TREE_TYPE (arg0);
6576 arg0_inner = TREE_OPERAND (arg0, 0);
6577 inner_type = TREE_TYPE (arg0_inner);
6579 #ifdef HAVE_canonicalize_funcptr_for_compare
6580 /* Disable this optimization if we're casting a function pointer
6581 type on targets that require function pointer canonicalization. */
6582 if (HAVE_canonicalize_funcptr_for_compare
6583 && TREE_CODE (inner_type) == POINTER_TYPE
6584 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6585 return NULL_TREE;
6586 #endif
6588 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6589 return NULL_TREE;
6591 if (TREE_CODE (arg1) != INTEGER_CST
6592 && !((TREE_CODE (arg1) == NOP_EXPR
6593 || TREE_CODE (arg1) == CONVERT_EXPR)
6594 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6595 return NULL_TREE;
6597 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6598 && code != NE_EXPR
6599 && code != EQ_EXPR)
6600 return NULL_TREE;
6602 if (TREE_CODE (arg1) == INTEGER_CST)
6604 tmp = build_int_cst_wide (inner_type,
6605 TREE_INT_CST_LOW (arg1),
6606 TREE_INT_CST_HIGH (arg1));
6607 arg1 = force_fit_type (tmp, 0,
6608 TREE_OVERFLOW (arg1),
6609 TREE_CONSTANT_OVERFLOW (arg1));
6611 else
6612 arg1 = fold_convert (inner_type, arg1);
6614 return fold_build2 (code, type, arg0_inner, arg1);
6617 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6618 step of the array. Reconstructs s and delta in the case of s * delta
6619 being an integer constant (and thus already folded).
6620 ADDR is the address. MULT is the multiplicative expression.
6621 If the function succeeds, the new address expression is returned. Otherwise
6622 NULL_TREE is returned. */
6624 static tree
6625 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6627 tree s, delta, step;
6628 tree ref = TREE_OPERAND (addr, 0), pref;
6629 tree ret, pos;
6630 tree itype;
6632 /* Canonicalize op1 into a possibly non-constant delta
6633 and an INTEGER_CST s. */
6634 if (TREE_CODE (op1) == MULT_EXPR)
6636 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6638 STRIP_NOPS (arg0);
6639 STRIP_NOPS (arg1);
6641 if (TREE_CODE (arg0) == INTEGER_CST)
6643 s = arg0;
6644 delta = arg1;
6646 else if (TREE_CODE (arg1) == INTEGER_CST)
6648 s = arg1;
6649 delta = arg0;
6651 else
6652 return NULL_TREE;
6654 else if (TREE_CODE (op1) == INTEGER_CST)
6656 delta = op1;
6657 s = NULL_TREE;
6659 else
6661 /* Simulate we are delta * 1. */
6662 delta = op1;
6663 s = integer_one_node;
6666 for (;; ref = TREE_OPERAND (ref, 0))
6668 if (TREE_CODE (ref) == ARRAY_REF)
6670 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6671 if (! itype)
6672 continue;
6674 step = array_ref_element_size (ref);
6675 if (TREE_CODE (step) != INTEGER_CST)
6676 continue;
6678 if (s)
6680 if (! tree_int_cst_equal (step, s))
6681 continue;
6683 else
6685 /* Try if delta is a multiple of step. */
6686 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6687 if (! tmp)
6688 continue;
6689 delta = tmp;
6692 break;
6695 if (!handled_component_p (ref))
6696 return NULL_TREE;
6699 /* We found the suitable array reference. So copy everything up to it,
6700 and replace the index. */
6702 pref = TREE_OPERAND (addr, 0);
6703 ret = copy_node (pref);
6704 pos = ret;
6706 while (pref != ref)
6708 pref = TREE_OPERAND (pref, 0);
6709 TREE_OPERAND (pos, 0) = copy_node (pref);
6710 pos = TREE_OPERAND (pos, 0);
6713 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6714 fold_convert (itype,
6715 TREE_OPERAND (pos, 1)),
6716 fold_convert (itype, delta));
6718 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6722 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6723 means A >= Y && A != MAX, but in this case we know that
6724 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6726 static tree
6727 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6729 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6731 if (TREE_CODE (bound) == LT_EXPR)
6732 a = TREE_OPERAND (bound, 0);
6733 else if (TREE_CODE (bound) == GT_EXPR)
6734 a = TREE_OPERAND (bound, 1);
6735 else
6736 return NULL_TREE;
6738 typea = TREE_TYPE (a);
6739 if (!INTEGRAL_TYPE_P (typea)
6740 && !POINTER_TYPE_P (typea))
6741 return NULL_TREE;
6743 if (TREE_CODE (ineq) == LT_EXPR)
6745 a1 = TREE_OPERAND (ineq, 1);
6746 y = TREE_OPERAND (ineq, 0);
6748 else if (TREE_CODE (ineq) == GT_EXPR)
6750 a1 = TREE_OPERAND (ineq, 0);
6751 y = TREE_OPERAND (ineq, 1);
6753 else
6754 return NULL_TREE;
6756 if (TREE_TYPE (a1) != typea)
6757 return NULL_TREE;
6759 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6760 if (!integer_onep (diff))
6761 return NULL_TREE;
6763 return fold_build2 (GE_EXPR, type, a, y);
6766 /* Fold a sum or difference of at least one multiplication.
6767 Returns the folded tree or NULL if no simplification could be made. */
6769 static tree
6770 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6772 tree arg00, arg01, arg10, arg11;
6773 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6775 /* (A * C) +- (B * C) -> (A+-B) * C.
6776 (A * C) +- A -> A * (C+-1).
6777 We are most concerned about the case where C is a constant,
6778 but other combinations show up during loop reduction. Since
6779 it is not difficult, try all four possibilities. */
6781 if (TREE_CODE (arg0) == MULT_EXPR)
6783 arg00 = TREE_OPERAND (arg0, 0);
6784 arg01 = TREE_OPERAND (arg0, 1);
6786 else
6788 arg00 = arg0;
6789 arg01 = build_one_cst (type);
6791 if (TREE_CODE (arg1) == MULT_EXPR)
6793 arg10 = TREE_OPERAND (arg1, 0);
6794 arg11 = TREE_OPERAND (arg1, 1);
6796 else
6798 arg10 = arg1;
6799 arg11 = build_one_cst (type);
6801 same = NULL_TREE;
6803 if (operand_equal_p (arg01, arg11, 0))
6804 same = arg01, alt0 = arg00, alt1 = arg10;
6805 else if (operand_equal_p (arg00, arg10, 0))
6806 same = arg00, alt0 = arg01, alt1 = arg11;
6807 else if (operand_equal_p (arg00, arg11, 0))
6808 same = arg00, alt0 = arg01, alt1 = arg10;
6809 else if (operand_equal_p (arg01, arg10, 0))
6810 same = arg01, alt0 = arg00, alt1 = arg11;
6812 /* No identical multiplicands; see if we can find a common
6813 power-of-two factor in non-power-of-two multiplies. This
6814 can help in multi-dimensional array access. */
6815 else if (host_integerp (arg01, 0)
6816 && host_integerp (arg11, 0))
6818 HOST_WIDE_INT int01, int11, tmp;
6819 bool swap = false;
6820 tree maybe_same;
6821 int01 = TREE_INT_CST_LOW (arg01);
6822 int11 = TREE_INT_CST_LOW (arg11);
6824 /* Move min of absolute values to int11. */
6825 if ((int01 >= 0 ? int01 : -int01)
6826 < (int11 >= 0 ? int11 : -int11))
6828 tmp = int01, int01 = int11, int11 = tmp;
6829 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6830 maybe_same = arg01;
6831 swap = true;
6833 else
6834 maybe_same = arg11;
6836 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6838 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6839 build_int_cst (TREE_TYPE (arg00),
6840 int01 / int11));
6841 alt1 = arg10;
6842 same = maybe_same;
6843 if (swap)
6844 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6848 if (same)
6849 return fold_build2 (MULT_EXPR, type,
6850 fold_build2 (code, type,
6851 fold_convert (type, alt0),
6852 fold_convert (type, alt1)),
6853 fold_convert (type, same));
6855 return NULL_TREE;
6858 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6859 specified by EXPR into the buffer PTR of length LEN bytes.
6860 Return the number of bytes placed in the buffer, or zero
6861 upon failure. */
6863 static int
6864 native_encode_int (tree expr, unsigned char *ptr, int len)
6866 tree type = TREE_TYPE (expr);
6867 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6868 int byte, offset, word, words;
6869 unsigned char value;
6871 if (total_bytes > len)
6872 return 0;
6873 words = total_bytes / UNITS_PER_WORD;
6875 for (byte = 0; byte < total_bytes; byte++)
6877 int bitpos = byte * BITS_PER_UNIT;
6878 if (bitpos < HOST_BITS_PER_WIDE_INT)
6879 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6880 else
6881 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6882 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6884 if (total_bytes > UNITS_PER_WORD)
6886 word = byte / UNITS_PER_WORD;
6887 if (WORDS_BIG_ENDIAN)
6888 word = (words - 1) - word;
6889 offset = word * UNITS_PER_WORD;
6890 if (BYTES_BIG_ENDIAN)
6891 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6892 else
6893 offset += byte % UNITS_PER_WORD;
6895 else
6896 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6897 ptr[offset] = value;
6899 return total_bytes;
6903 /* Subroutine of native_encode_expr. Encode the REAL_CST
6904 specified by EXPR into the buffer PTR of length LEN bytes.
6905 Return the number of bytes placed in the buffer, or zero
6906 upon failure. */
6908 static int
6909 native_encode_real (tree expr, unsigned char *ptr, int len)
6911 tree type = TREE_TYPE (expr);
6912 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6913 int byte, offset, word, words;
6914 unsigned char value;
6916 /* There are always 32 bits in each long, no matter the size of
6917 the hosts long. We handle floating point representations with
6918 up to 192 bits. */
6919 long tmp[6];
6921 if (total_bytes > len)
6922 return 0;
6923 words = total_bytes / UNITS_PER_WORD;
6925 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6927 for (byte = 0; byte < total_bytes; byte++)
6929 int bitpos = byte * BITS_PER_UNIT;
6930 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6932 if (total_bytes > UNITS_PER_WORD)
6934 word = byte / UNITS_PER_WORD;
6935 if (FLOAT_WORDS_BIG_ENDIAN)
6936 word = (words - 1) - word;
6937 offset = word * UNITS_PER_WORD;
6938 if (BYTES_BIG_ENDIAN)
6939 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6940 else
6941 offset += byte % UNITS_PER_WORD;
6943 else
6944 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6945 ptr[offset] = value;
6947 return total_bytes;
6950 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6951 specified by EXPR into the buffer PTR of length LEN bytes.
6952 Return the number of bytes placed in the buffer, or zero
6953 upon failure. */
6955 static int
6956 native_encode_complex (tree expr, unsigned char *ptr, int len)
6958 int rsize, isize;
6959 tree part;
6961 part = TREE_REALPART (expr);
6962 rsize = native_encode_expr (part, ptr, len);
6963 if (rsize == 0)
6964 return 0;
6965 part = TREE_IMAGPART (expr);
6966 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6967 if (isize != rsize)
6968 return 0;
6969 return rsize + isize;
6973 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6974 specified by EXPR into the buffer PTR of length LEN bytes.
6975 Return the number of bytes placed in the buffer, or zero
6976 upon failure. */
6978 static int
6979 native_encode_vector (tree expr, unsigned char *ptr, int len)
6981 int i, size, offset, count;
6982 tree itype, elem, elements;
6984 offset = 0;
6985 elements = TREE_VECTOR_CST_ELTS (expr);
6986 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6987 itype = TREE_TYPE (TREE_TYPE (expr));
6988 size = GET_MODE_SIZE (TYPE_MODE (itype));
6989 for (i = 0; i < count; i++)
6991 if (elements)
6993 elem = TREE_VALUE (elements);
6994 elements = TREE_CHAIN (elements);
6996 else
6997 elem = NULL_TREE;
6999 if (elem)
7001 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7002 return 0;
7004 else
7006 if (offset + size > len)
7007 return 0;
7008 memset (ptr+offset, 0, size);
7010 offset += size;
7012 return offset;
7016 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7017 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7018 buffer PTR of length LEN bytes. Return the number of bytes
7019 placed in the buffer, or zero upon failure. */
7021 static int
7022 native_encode_expr (tree expr, unsigned char *ptr, int len)
7024 switch (TREE_CODE (expr))
7026 case INTEGER_CST:
7027 return native_encode_int (expr, ptr, len);
7029 case REAL_CST:
7030 return native_encode_real (expr, ptr, len);
7032 case COMPLEX_CST:
7033 return native_encode_complex (expr, ptr, len);
7035 case VECTOR_CST:
7036 return native_encode_vector (expr, ptr, len);
7038 default:
7039 return 0;
7044 /* Subroutine of native_interpret_expr. Interpret the contents of
7045 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7046 If the buffer cannot be interpreted, return NULL_TREE. */
7048 static tree
7049 native_interpret_int (tree type, unsigned char *ptr, int len)
7051 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7052 int byte, offset, word, words;
7053 unsigned char value;
7054 unsigned int HOST_WIDE_INT lo = 0;
7055 HOST_WIDE_INT hi = 0;
7057 if (total_bytes > len)
7058 return NULL_TREE;
7059 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7060 return NULL_TREE;
7061 words = total_bytes / UNITS_PER_WORD;
7063 for (byte = 0; byte < total_bytes; byte++)
7065 int bitpos = byte * BITS_PER_UNIT;
7066 if (total_bytes > UNITS_PER_WORD)
7068 word = byte / UNITS_PER_WORD;
7069 if (WORDS_BIG_ENDIAN)
7070 word = (words - 1) - word;
7071 offset = word * UNITS_PER_WORD;
7072 if (BYTES_BIG_ENDIAN)
7073 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7074 else
7075 offset += byte % UNITS_PER_WORD;
7077 else
7078 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7079 value = ptr[offset];
7081 if (bitpos < HOST_BITS_PER_WIDE_INT)
7082 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7083 else
7084 hi |= (unsigned HOST_WIDE_INT) value
7085 << (bitpos - HOST_BITS_PER_WIDE_INT);
7088 return force_fit_type (build_int_cst_wide (type, lo, hi),
7089 0, false, false);
7093 /* Subroutine of native_interpret_expr. Interpret the contents of
7094 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7095 If the buffer cannot be interpreted, return NULL_TREE. */
7097 static tree
7098 native_interpret_real (tree type, unsigned char *ptr, int len)
7100 enum machine_mode mode = TYPE_MODE (type);
7101 int total_bytes = GET_MODE_SIZE (mode);
7102 int byte, offset, word, words;
7103 unsigned char value;
7104 /* There are always 32 bits in each long, no matter the size of
7105 the hosts long. We handle floating point representations with
7106 up to 192 bits. */
7107 REAL_VALUE_TYPE r;
7108 long tmp[6];
7110 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7111 if (total_bytes > len || total_bytes > 24)
7112 return NULL_TREE;
7113 words = total_bytes / UNITS_PER_WORD;
7115 memset (tmp, 0, sizeof (tmp));
7116 for (byte = 0; byte < total_bytes; byte++)
7118 int bitpos = byte * BITS_PER_UNIT;
7119 if (total_bytes > UNITS_PER_WORD)
7121 word = byte / UNITS_PER_WORD;
7122 if (FLOAT_WORDS_BIG_ENDIAN)
7123 word = (words - 1) - word;
7124 offset = word * UNITS_PER_WORD;
7125 if (BYTES_BIG_ENDIAN)
7126 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7127 else
7128 offset += byte % UNITS_PER_WORD;
7130 else
7131 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7132 value = ptr[offset];
7134 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7137 real_from_target (&r, tmp, mode);
7138 return build_real (type, r);
7142 /* Subroutine of native_interpret_expr. Interpret the contents of
7143 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7144 If the buffer cannot be interpreted, return NULL_TREE. */
7146 static tree
7147 native_interpret_complex (tree type, unsigned char *ptr, int len)
7149 tree etype, rpart, ipart;
7150 int size;
7152 etype = TREE_TYPE (type);
7153 size = GET_MODE_SIZE (TYPE_MODE (etype));
7154 if (size * 2 > len)
7155 return NULL_TREE;
7156 rpart = native_interpret_expr (etype, ptr, size);
7157 if (!rpart)
7158 return NULL_TREE;
7159 ipart = native_interpret_expr (etype, ptr+size, size);
7160 if (!ipart)
7161 return NULL_TREE;
7162 return build_complex (type, rpart, ipart);
7166 /* Subroutine of native_interpret_expr. Interpret the contents of
7167 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7168 If the buffer cannot be interpreted, return NULL_TREE. */
7170 static tree
7171 native_interpret_vector (tree type, unsigned char *ptr, int len)
7173 tree etype, elem, elements;
7174 int i, size, count;
7176 etype = TREE_TYPE (type);
7177 size = GET_MODE_SIZE (TYPE_MODE (etype));
7178 count = TYPE_VECTOR_SUBPARTS (type);
7179 if (size * count > len)
7180 return NULL_TREE;
7182 elements = NULL_TREE;
7183 for (i = count - 1; i >= 0; i--)
7185 elem = native_interpret_expr (etype, ptr+(i*size), size);
7186 if (!elem)
7187 return NULL_TREE;
7188 elements = tree_cons (NULL_TREE, elem, elements);
7190 return build_vector (type, elements);
7194 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7195 the buffer PTR of length LEN as a constant of type TYPE. For
7196 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7197 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7198 return NULL_TREE. */
7200 static tree
7201 native_interpret_expr (tree type, unsigned char *ptr, int len)
7203 switch (TREE_CODE (type))
7205 case INTEGER_TYPE:
7206 case ENUMERAL_TYPE:
7207 case BOOLEAN_TYPE:
7208 return native_interpret_int (type, ptr, len);
7210 case REAL_TYPE:
7211 return native_interpret_real (type, ptr, len);
7213 case COMPLEX_TYPE:
7214 return native_interpret_complex (type, ptr, len);
7216 case VECTOR_TYPE:
7217 return native_interpret_vector (type, ptr, len);
7219 default:
7220 return NULL_TREE;
7225 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7226 TYPE at compile-time. If we're unable to perform the conversion
7227 return NULL_TREE. */
7229 static tree
7230 fold_view_convert_expr (tree type, tree expr)
7232 /* We support up to 512-bit values (for V8DFmode). */
7233 unsigned char buffer[64];
7234 int len;
7236 /* Check that the host and target are sane. */
7237 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7238 return NULL_TREE;
7240 len = native_encode_expr (expr, buffer, sizeof (buffer));
7241 if (len == 0)
7242 return NULL_TREE;
7244 return native_interpret_expr (type, buffer, len);
7248 /* Fold a unary expression of code CODE and type TYPE with operand
7249 OP0. Return the folded expression if folding is successful.
7250 Otherwise, return NULL_TREE. */
7252 tree
7253 fold_unary (enum tree_code code, tree type, tree op0)
7255 tree tem;
7256 tree arg0;
7257 enum tree_code_class kind = TREE_CODE_CLASS (code);
7259 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7260 && TREE_CODE_LENGTH (code) == 1);
7262 arg0 = op0;
7263 if (arg0)
7265 if (code == NOP_EXPR || code == CONVERT_EXPR
7266 || code == FLOAT_EXPR || code == ABS_EXPR)
7268 /* Don't use STRIP_NOPS, because signedness of argument type
7269 matters. */
7270 STRIP_SIGN_NOPS (arg0);
7272 else
7274 /* Strip any conversions that don't change the mode. This
7275 is safe for every expression, except for a comparison
7276 expression because its signedness is derived from its
7277 operands.
7279 Note that this is done as an internal manipulation within
7280 the constant folder, in order to find the simplest
7281 representation of the arguments so that their form can be
7282 studied. In any cases, the appropriate type conversions
7283 should be put back in the tree that will get out of the
7284 constant folder. */
7285 STRIP_NOPS (arg0);
7289 if (TREE_CODE_CLASS (code) == tcc_unary)
7291 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7292 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7293 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7294 else if (TREE_CODE (arg0) == COND_EXPR)
7296 tree arg01 = TREE_OPERAND (arg0, 1);
7297 tree arg02 = TREE_OPERAND (arg0, 2);
7298 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7299 arg01 = fold_build1 (code, type, arg01);
7300 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7301 arg02 = fold_build1 (code, type, arg02);
7302 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7303 arg01, arg02);
7305 /* If this was a conversion, and all we did was to move into
7306 inside the COND_EXPR, bring it back out. But leave it if
7307 it is a conversion from integer to integer and the
7308 result precision is no wider than a word since such a
7309 conversion is cheap and may be optimized away by combine,
7310 while it couldn't if it were outside the COND_EXPR. Then return
7311 so we don't get into an infinite recursion loop taking the
7312 conversion out and then back in. */
7314 if ((code == NOP_EXPR || code == CONVERT_EXPR
7315 || code == NON_LVALUE_EXPR)
7316 && TREE_CODE (tem) == COND_EXPR
7317 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7318 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7319 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7320 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7321 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7322 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7323 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7324 && (INTEGRAL_TYPE_P
7325 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7326 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7327 || flag_syntax_only))
7328 tem = build1 (code, type,
7329 build3 (COND_EXPR,
7330 TREE_TYPE (TREE_OPERAND
7331 (TREE_OPERAND (tem, 1), 0)),
7332 TREE_OPERAND (tem, 0),
7333 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7334 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7335 return tem;
7337 else if (COMPARISON_CLASS_P (arg0))
7339 if (TREE_CODE (type) == BOOLEAN_TYPE)
7341 arg0 = copy_node (arg0);
7342 TREE_TYPE (arg0) = type;
7343 return arg0;
7345 else if (TREE_CODE (type) != INTEGER_TYPE)
7346 return fold_build3 (COND_EXPR, type, arg0,
7347 fold_build1 (code, type,
7348 integer_one_node),
7349 fold_build1 (code, type,
7350 integer_zero_node));
7354 switch (code)
7356 case NOP_EXPR:
7357 case FLOAT_EXPR:
7358 case CONVERT_EXPR:
7359 case FIX_TRUNC_EXPR:
7360 if (TREE_TYPE (op0) == type)
7361 return op0;
7363 /* If we have (type) (a CMP b) and type is an integral type, return
7364 new expression involving the new type. */
7365 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7366 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7367 TREE_OPERAND (op0, 1));
7369 /* Handle cases of two conversions in a row. */
7370 if (TREE_CODE (op0) == NOP_EXPR
7371 || TREE_CODE (op0) == CONVERT_EXPR)
7373 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7374 tree inter_type = TREE_TYPE (op0);
7375 int inside_int = INTEGRAL_TYPE_P (inside_type);
7376 int inside_ptr = POINTER_TYPE_P (inside_type);
7377 int inside_float = FLOAT_TYPE_P (inside_type);
7378 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7379 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7380 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7381 int inter_int = INTEGRAL_TYPE_P (inter_type);
7382 int inter_ptr = POINTER_TYPE_P (inter_type);
7383 int inter_float = FLOAT_TYPE_P (inter_type);
7384 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7385 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7386 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7387 int final_int = INTEGRAL_TYPE_P (type);
7388 int final_ptr = POINTER_TYPE_P (type);
7389 int final_float = FLOAT_TYPE_P (type);
7390 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7391 unsigned int final_prec = TYPE_PRECISION (type);
7392 int final_unsignedp = TYPE_UNSIGNED (type);
7394 /* In addition to the cases of two conversions in a row
7395 handled below, if we are converting something to its own
7396 type via an object of identical or wider precision, neither
7397 conversion is needed. */
7398 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7399 && (((inter_int || inter_ptr) && final_int)
7400 || (inter_float && final_float))
7401 && inter_prec >= final_prec)
7402 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7404 /* Likewise, if the intermediate and final types are either both
7405 float or both integer, we don't need the middle conversion if
7406 it is wider than the final type and doesn't change the signedness
7407 (for integers). Avoid this if the final type is a pointer
7408 since then we sometimes need the inner conversion. Likewise if
7409 the outer has a precision not equal to the size of its mode. */
7410 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7411 || (inter_float && inside_float)
7412 || (inter_vec && inside_vec))
7413 && inter_prec >= inside_prec
7414 && (inter_float || inter_vec
7415 || inter_unsignedp == inside_unsignedp)
7416 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7417 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7418 && ! final_ptr
7419 && (! final_vec || inter_prec == inside_prec))
7420 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7422 /* If we have a sign-extension of a zero-extended value, we can
7423 replace that by a single zero-extension. */
7424 if (inside_int && inter_int && final_int
7425 && inside_prec < inter_prec && inter_prec < final_prec
7426 && inside_unsignedp && !inter_unsignedp)
7427 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7429 /* Two conversions in a row are not needed unless:
7430 - some conversion is floating-point (overstrict for now), or
7431 - some conversion is a vector (overstrict for now), or
7432 - the intermediate type is narrower than both initial and
7433 final, or
7434 - the intermediate type and innermost type differ in signedness,
7435 and the outermost type is wider than the intermediate, or
7436 - the initial type is a pointer type and the precisions of the
7437 intermediate and final types differ, or
7438 - the final type is a pointer type and the precisions of the
7439 initial and intermediate types differ.
7440 - the final type is a pointer type and the initial type not
7441 - the initial type is a pointer to an array and the final type
7442 not. */
7443 if (! inside_float && ! inter_float && ! final_float
7444 && ! inside_vec && ! inter_vec && ! final_vec
7445 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7446 && ! (inside_int && inter_int
7447 && inter_unsignedp != inside_unsignedp
7448 && inter_prec < final_prec)
7449 && ((inter_unsignedp && inter_prec > inside_prec)
7450 == (final_unsignedp && final_prec > inter_prec))
7451 && ! (inside_ptr && inter_prec != final_prec)
7452 && ! (final_ptr && inside_prec != inter_prec)
7453 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7454 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7455 && final_ptr == inside_ptr
7456 && ! (inside_ptr
7457 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7458 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7459 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7462 /* Handle (T *)&A.B.C for A being of type T and B and C
7463 living at offset zero. This occurs frequently in
7464 C++ upcasting and then accessing the base. */
7465 if (TREE_CODE (op0) == ADDR_EXPR
7466 && POINTER_TYPE_P (type)
7467 && handled_component_p (TREE_OPERAND (op0, 0)))
7469 HOST_WIDE_INT bitsize, bitpos;
7470 tree offset;
7471 enum machine_mode mode;
7472 int unsignedp, volatilep;
7473 tree base = TREE_OPERAND (op0, 0);
7474 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7475 &mode, &unsignedp, &volatilep, false);
7476 /* If the reference was to a (constant) zero offset, we can use
7477 the address of the base if it has the same base type
7478 as the result type. */
7479 if (! offset && bitpos == 0
7480 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7481 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7482 return fold_convert (type, build_fold_addr_expr (base));
7485 if ((TREE_CODE (op0) == MODIFY_EXPR
7486 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7487 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7488 /* Detect assigning a bitfield. */
7489 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7490 && DECL_BIT_FIELD
7491 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7493 /* Don't leave an assignment inside a conversion
7494 unless assigning a bitfield. */
7495 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7496 /* First do the assignment, then return converted constant. */
7497 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7498 TREE_NO_WARNING (tem) = 1;
7499 TREE_USED (tem) = 1;
7500 return tem;
7503 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7504 constants (if x has signed type, the sign bit cannot be set
7505 in c). This folds extension into the BIT_AND_EXPR. */
7506 if (INTEGRAL_TYPE_P (type)
7507 && TREE_CODE (type) != BOOLEAN_TYPE
7508 && TREE_CODE (op0) == BIT_AND_EXPR
7509 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7511 tree and = op0;
7512 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7513 int change = 0;
7515 if (TYPE_UNSIGNED (TREE_TYPE (and))
7516 || (TYPE_PRECISION (type)
7517 <= TYPE_PRECISION (TREE_TYPE (and))))
7518 change = 1;
7519 else if (TYPE_PRECISION (TREE_TYPE (and1))
7520 <= HOST_BITS_PER_WIDE_INT
7521 && host_integerp (and1, 1))
7523 unsigned HOST_WIDE_INT cst;
7525 cst = tree_low_cst (and1, 1);
7526 cst &= (HOST_WIDE_INT) -1
7527 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7528 change = (cst == 0);
7529 #ifdef LOAD_EXTEND_OP
7530 if (change
7531 && !flag_syntax_only
7532 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7533 == ZERO_EXTEND))
7535 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7536 and0 = fold_convert (uns, and0);
7537 and1 = fold_convert (uns, and1);
7539 #endif
7541 if (change)
7543 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7544 TREE_INT_CST_HIGH (and1));
7545 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7546 TREE_CONSTANT_OVERFLOW (and1));
7547 return fold_build2 (BIT_AND_EXPR, type,
7548 fold_convert (type, and0), tem);
7552 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7553 T2 being pointers to types of the same size. */
7554 if (POINTER_TYPE_P (type)
7555 && BINARY_CLASS_P (arg0)
7556 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7557 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7559 tree arg00 = TREE_OPERAND (arg0, 0);
7560 tree t0 = type;
7561 tree t1 = TREE_TYPE (arg00);
7562 tree tt0 = TREE_TYPE (t0);
7563 tree tt1 = TREE_TYPE (t1);
7564 tree s0 = TYPE_SIZE (tt0);
7565 tree s1 = TYPE_SIZE (tt1);
7567 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7568 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7569 TREE_OPERAND (arg0, 1));
7572 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7573 of the same precision, and X is a integer type not narrower than
7574 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7575 if (INTEGRAL_TYPE_P (type)
7576 && TREE_CODE (op0) == BIT_NOT_EXPR
7577 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7578 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7579 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7580 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7582 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7583 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7584 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7585 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7588 tem = fold_convert_const (code, type, arg0);
7589 return tem ? tem : NULL_TREE;
7591 case VIEW_CONVERT_EXPR:
7592 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7593 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7594 return fold_view_convert_expr (type, op0);
7596 case NEGATE_EXPR:
7597 tem = fold_negate_expr (arg0);
7598 if (tem)
7599 return fold_convert (type, tem);
7600 return NULL_TREE;
7602 case ABS_EXPR:
7603 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7604 return fold_abs_const (arg0, type);
7605 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7606 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7607 /* Convert fabs((double)float) into (double)fabsf(float). */
7608 else if (TREE_CODE (arg0) == NOP_EXPR
7609 && TREE_CODE (type) == REAL_TYPE)
7611 tree targ0 = strip_float_extensions (arg0);
7612 if (targ0 != arg0)
7613 return fold_convert (type, fold_build1 (ABS_EXPR,
7614 TREE_TYPE (targ0),
7615 targ0));
7617 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7618 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7619 return arg0;
7621 /* Strip sign ops from argument. */
7622 if (TREE_CODE (type) == REAL_TYPE)
7624 tem = fold_strip_sign_ops (arg0);
7625 if (tem)
7626 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7628 return NULL_TREE;
7630 case CONJ_EXPR:
7631 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7632 return fold_convert (type, arg0);
7633 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7635 tree itype = TREE_TYPE (type);
7636 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7637 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7638 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7640 if (TREE_CODE (arg0) == COMPLEX_CST)
7642 tree itype = TREE_TYPE (type);
7643 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7644 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7645 return build_complex (type, rpart, negate_expr (ipart));
7647 if (TREE_CODE (arg0) == CONJ_EXPR)
7648 return fold_convert (type, TREE_OPERAND (arg0, 0));
7649 return NULL_TREE;
7651 case BIT_NOT_EXPR:
7652 if (TREE_CODE (arg0) == INTEGER_CST)
7653 return fold_not_const (arg0, type);
7654 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7655 return TREE_OPERAND (arg0, 0);
7656 /* Convert ~ (-A) to A - 1. */
7657 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7658 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7659 build_int_cst (type, 1));
7660 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7661 else if (INTEGRAL_TYPE_P (type)
7662 && ((TREE_CODE (arg0) == MINUS_EXPR
7663 && integer_onep (TREE_OPERAND (arg0, 1)))
7664 || (TREE_CODE (arg0) == PLUS_EXPR
7665 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7666 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7667 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7668 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7669 && (tem = fold_unary (BIT_NOT_EXPR, type,
7670 fold_convert (type,
7671 TREE_OPERAND (arg0, 0)))))
7672 return fold_build2 (BIT_XOR_EXPR, type, tem,
7673 fold_convert (type, TREE_OPERAND (arg0, 1)));
7674 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7675 && (tem = fold_unary (BIT_NOT_EXPR, type,
7676 fold_convert (type,
7677 TREE_OPERAND (arg0, 1)))))
7678 return fold_build2 (BIT_XOR_EXPR, type,
7679 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7681 return NULL_TREE;
7683 case TRUTH_NOT_EXPR:
7684 /* The argument to invert_truthvalue must have Boolean type. */
7685 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7686 arg0 = fold_convert (boolean_type_node, arg0);
7688 /* Note that the operand of this must be an int
7689 and its values must be 0 or 1.
7690 ("true" is a fixed value perhaps depending on the language,
7691 but we don't handle values other than 1 correctly yet.) */
7692 tem = fold_truth_not_expr (arg0);
7693 if (!tem)
7694 return NULL_TREE;
7695 return fold_convert (type, tem);
7697 case REALPART_EXPR:
7698 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7699 return fold_convert (type, arg0);
7700 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7701 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7702 TREE_OPERAND (arg0, 1));
7703 if (TREE_CODE (arg0) == COMPLEX_CST)
7704 return fold_convert (type, TREE_REALPART (arg0));
7705 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7707 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7708 tem = fold_build2 (TREE_CODE (arg0), itype,
7709 fold_build1 (REALPART_EXPR, itype,
7710 TREE_OPERAND (arg0, 0)),
7711 fold_build1 (REALPART_EXPR, itype,
7712 TREE_OPERAND (arg0, 1)));
7713 return fold_convert (type, tem);
7715 if (TREE_CODE (arg0) == CONJ_EXPR)
7717 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7718 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7719 return fold_convert (type, tem);
7721 if (TREE_CODE (arg0) == CALL_EXPR)
7723 tree fn = get_callee_fndecl (arg0);
7724 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7725 switch (DECL_FUNCTION_CODE (fn))
7727 CASE_FLT_FN (BUILT_IN_CEXPI):
7728 fn = mathfn_built_in (type, BUILT_IN_COS);
7729 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7731 default:;
7734 return NULL_TREE;
7736 case IMAGPART_EXPR:
7737 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7738 return fold_convert (type, integer_zero_node);
7739 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7740 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7741 TREE_OPERAND (arg0, 0));
7742 if (TREE_CODE (arg0) == COMPLEX_CST)
7743 return fold_convert (type, TREE_IMAGPART (arg0));
7744 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7746 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7747 tem = fold_build2 (TREE_CODE (arg0), itype,
7748 fold_build1 (IMAGPART_EXPR, itype,
7749 TREE_OPERAND (arg0, 0)),
7750 fold_build1 (IMAGPART_EXPR, itype,
7751 TREE_OPERAND (arg0, 1)));
7752 return fold_convert (type, tem);
7754 if (TREE_CODE (arg0) == CONJ_EXPR)
7756 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7757 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7758 return fold_convert (type, negate_expr (tem));
7760 if (TREE_CODE (arg0) == CALL_EXPR)
7762 tree fn = get_callee_fndecl (arg0);
7763 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7764 switch (DECL_FUNCTION_CODE (fn))
7766 CASE_FLT_FN (BUILT_IN_CEXPI):
7767 fn = mathfn_built_in (type, BUILT_IN_SIN);
7768 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7770 default:;
7773 return NULL_TREE;
7775 default:
7776 return NULL_TREE;
7777 } /* switch (code) */
7780 /* Fold a binary expression of code CODE and type TYPE with operands
7781 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7782 Return the folded expression if folding is successful. Otherwise,
7783 return NULL_TREE. */
7785 static tree
7786 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7788 enum tree_code compl_code;
7790 if (code == MIN_EXPR)
7791 compl_code = MAX_EXPR;
7792 else if (code == MAX_EXPR)
7793 compl_code = MIN_EXPR;
7794 else
7795 gcc_unreachable ();
7797 /* MIN (MAX (a, b), b) == b. */
7798 if (TREE_CODE (op0) == compl_code
7799 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7800 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7802 /* MIN (MAX (b, a), b) == b. */
7803 if (TREE_CODE (op0) == compl_code
7804 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7805 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7806 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7808 /* MIN (a, MAX (a, b)) == a. */
7809 if (TREE_CODE (op1) == compl_code
7810 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7811 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7812 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7814 /* MIN (a, MAX (b, a)) == a. */
7815 if (TREE_CODE (op1) == compl_code
7816 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7817 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7818 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7820 return NULL_TREE;
7823 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7824 by changing CODE to reduce the magnitude of constants involved in
7825 ARG0 of the comparison.
7826 Returns a canonicalized comparison tree if a simplification was
7827 possible, otherwise returns NULL_TREE. */
7829 static tree
7830 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7831 tree arg0, tree arg1)
7833 enum tree_code code0 = TREE_CODE (arg0);
7834 tree t, cst0 = NULL_TREE;
7835 int sgn0;
7836 bool swap = false;
7838 /* Match A +- CST code arg1 and CST code arg1. */
7839 if (!(((code0 == MINUS_EXPR
7840 || code0 == PLUS_EXPR)
7841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7842 || code0 == INTEGER_CST))
7843 return NULL_TREE;
7845 /* Identify the constant in arg0 and its sign. */
7846 if (code0 == INTEGER_CST)
7847 cst0 = arg0;
7848 else
7849 cst0 = TREE_OPERAND (arg0, 1);
7850 sgn0 = tree_int_cst_sgn (cst0);
7852 /* Overflowed constants and zero will cause problems. */
7853 if (integer_zerop (cst0)
7854 || TREE_OVERFLOW (cst0))
7855 return NULL_TREE;
7857 /* See if we can reduce the magnitude of the constant in
7858 arg0 by changing the comparison code. */
7859 if (code0 == INTEGER_CST)
7861 /* CST <= arg1 -> CST-1 < arg1. */
7862 if (code == LE_EXPR && sgn0 == 1)
7863 code = LT_EXPR;
7864 /* -CST < arg1 -> -CST-1 <= arg1. */
7865 else if (code == LT_EXPR && sgn0 == -1)
7866 code = LE_EXPR;
7867 /* CST > arg1 -> CST-1 >= arg1. */
7868 else if (code == GT_EXPR && sgn0 == 1)
7869 code = GE_EXPR;
7870 /* -CST >= arg1 -> -CST-1 > arg1. */
7871 else if (code == GE_EXPR && sgn0 == -1)
7872 code = GT_EXPR;
7873 else
7874 return NULL_TREE;
7875 /* arg1 code' CST' might be more canonical. */
7876 swap = true;
7878 else
7880 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7881 if (code == LT_EXPR
7882 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7883 code = LE_EXPR;
7884 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7885 else if (code == GT_EXPR
7886 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7887 code = GE_EXPR;
7888 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7889 else if (code == LE_EXPR
7890 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7891 code = LT_EXPR;
7892 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7893 else if (code == GE_EXPR
7894 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7895 code = GT_EXPR;
7896 else
7897 return NULL_TREE;
7900 /* Now build the constant reduced in magnitude. */
7901 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7902 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7903 if (code0 != INTEGER_CST)
7904 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7906 /* If swapping might yield to a more canonical form, do so. */
7907 if (swap)
7908 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7909 else
7910 return fold_build2 (code, type, t, arg1);
7913 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7914 overflow further. Try to decrease the magnitude of constants involved
7915 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7916 and put sole constants at the second argument position.
7917 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7919 static tree
7920 maybe_canonicalize_comparison (enum tree_code code, tree type,
7921 tree arg0, tree arg1)
7923 tree t;
7925 /* In principle pointers also have undefined overflow behavior,
7926 but that causes problems elsewhere. */
7927 if ((flag_wrapv || flag_trapv)
7928 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7929 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7930 return NULL_TREE;
7932 /* Try canonicalization by simplifying arg0. */
7933 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7934 if (t)
7935 return t;
7937 /* Try canonicalization by simplifying arg1 using the swapped
7938 comparison. */
7939 code = swap_tree_comparison (code);
7940 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7943 /* Subroutine of fold_binary. This routine performs all of the
7944 transformations that are common to the equality/inequality
7945 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7946 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7947 fold_binary should call fold_binary. Fold a comparison with
7948 tree code CODE and type TYPE with operands OP0 and OP1. Return
7949 the folded comparison or NULL_TREE. */
7951 static tree
7952 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7954 tree arg0, arg1, tem;
7956 arg0 = op0;
7957 arg1 = op1;
7959 STRIP_SIGN_NOPS (arg0);
7960 STRIP_SIGN_NOPS (arg1);
7962 tem = fold_relational_const (code, type, arg0, arg1);
7963 if (tem != NULL_TREE)
7964 return tem;
7966 /* If one arg is a real or integer constant, put it last. */
7967 if (tree_swap_operands_p (arg0, arg1, true))
7968 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7970 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7971 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7972 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7973 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7974 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7975 && !(flag_wrapv || flag_trapv))
7976 && (TREE_CODE (arg1) == INTEGER_CST
7977 && !TREE_OVERFLOW (arg1)))
7979 tree const1 = TREE_OPERAND (arg0, 1);
7980 tree const2 = arg1;
7981 tree variable = TREE_OPERAND (arg0, 0);
7982 tree lhs;
7983 int lhs_add;
7984 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7986 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7987 TREE_TYPE (arg1), const2, const1);
7988 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7989 && (TREE_CODE (lhs) != INTEGER_CST
7990 || !TREE_OVERFLOW (lhs)))
7991 return fold_build2 (code, type, variable, lhs);
7994 /* For comparisons of pointers we can decompose it to a compile time
7995 comparison of the base objects and the offsets into the object.
7996 This requires at least one operand being an ADDR_EXPR to do more
7997 than the operand_equal_p test below. */
7998 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7999 && (TREE_CODE (arg0) == ADDR_EXPR
8000 || TREE_CODE (arg1) == ADDR_EXPR))
8002 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8003 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8004 enum machine_mode mode;
8005 int volatilep, unsignedp;
8006 bool indirect_base0 = false;
8008 /* Get base and offset for the access. Strip ADDR_EXPR for
8009 get_inner_reference, but put it back by stripping INDIRECT_REF
8010 off the base object if possible. */
8011 base0 = arg0;
8012 if (TREE_CODE (arg0) == ADDR_EXPR)
8014 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8015 &bitsize, &bitpos0, &offset0, &mode,
8016 &unsignedp, &volatilep, false);
8017 if (TREE_CODE (base0) == INDIRECT_REF)
8018 base0 = TREE_OPERAND (base0, 0);
8019 else
8020 indirect_base0 = true;
8023 base1 = arg1;
8024 if (TREE_CODE (arg1) == ADDR_EXPR)
8026 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8027 &bitsize, &bitpos1, &offset1, &mode,
8028 &unsignedp, &volatilep, false);
8029 /* We have to make sure to have an indirect/non-indirect base1
8030 just the same as we did for base0. */
8031 if (TREE_CODE (base1) == INDIRECT_REF
8032 && !indirect_base0)
8033 base1 = TREE_OPERAND (base1, 0);
8034 else if (!indirect_base0)
8035 base1 = NULL_TREE;
8037 else if (indirect_base0)
8038 base1 = NULL_TREE;
8040 /* If we have equivalent bases we might be able to simplify. */
8041 if (base0 && base1
8042 && operand_equal_p (base0, base1, 0))
8044 /* We can fold this expression to a constant if the non-constant
8045 offset parts are equal. */
8046 if (offset0 == offset1
8047 || (offset0 && offset1
8048 && operand_equal_p (offset0, offset1, 0)))
8050 switch (code)
8052 case EQ_EXPR:
8053 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8054 case NE_EXPR:
8055 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8056 case LT_EXPR:
8057 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8058 case LE_EXPR:
8059 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8060 case GE_EXPR:
8061 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8062 case GT_EXPR:
8063 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8064 default:;
8067 /* We can simplify the comparison to a comparison of the variable
8068 offset parts if the constant offset parts are equal.
8069 Be careful to use signed size type here because otherwise we
8070 mess with array offsets in the wrong way. This is possible
8071 because pointer arithmetic is restricted to retain within an
8072 object and overflow on pointer differences is undefined as of
8073 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8074 else if (bitpos0 == bitpos1)
8076 tree signed_size_type_node;
8077 signed_size_type_node = signed_type_for (size_type_node);
8079 /* By converting to signed size type we cover middle-end pointer
8080 arithmetic which operates on unsigned pointer types of size
8081 type size and ARRAY_REF offsets which are properly sign or
8082 zero extended from their type in case it is narrower than
8083 size type. */
8084 if (offset0 == NULL_TREE)
8085 offset0 = build_int_cst (signed_size_type_node, 0);
8086 else
8087 offset0 = fold_convert (signed_size_type_node, offset0);
8088 if (offset1 == NULL_TREE)
8089 offset1 = build_int_cst (signed_size_type_node, 0);
8090 else
8091 offset1 = fold_convert (signed_size_type_node, offset1);
8093 return fold_build2 (code, type, offset0, offset1);
8098 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8099 same object, then we can fold this to a comparison of the two offsets in
8100 signed size type. This is possible because pointer arithmetic is
8101 restricted to retain within an object and overflow on pointer differences
8102 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8103 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8104 && !flag_wrapv && !flag_trapv)
8106 tree base0, offset0, base1, offset1;
8108 if (extract_array_ref (arg0, &base0, &offset0)
8109 && extract_array_ref (arg1, &base1, &offset1)
8110 && operand_equal_p (base0, base1, 0))
8112 tree signed_size_type_node;
8113 signed_size_type_node = signed_type_for (size_type_node);
8115 /* By converting to signed size type we cover middle-end pointer
8116 arithmetic which operates on unsigned pointer types of size
8117 type size and ARRAY_REF offsets which are properly sign or
8118 zero extended from their type in case it is narrower than
8119 size type. */
8120 if (offset0 == NULL_TREE)
8121 offset0 = build_int_cst (signed_size_type_node, 0);
8122 else
8123 offset0 = fold_convert (signed_size_type_node, offset0);
8124 if (offset1 == NULL_TREE)
8125 offset1 = build_int_cst (signed_size_type_node, 0);
8126 else
8127 offset1 = fold_convert (signed_size_type_node, offset1);
8129 return fold_build2 (code, type, offset0, offset1);
8133 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8134 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8135 the resulting offset is smaller in absolute value than the
8136 original one. */
8137 if (!(flag_wrapv || flag_trapv)
8138 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8139 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8140 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8141 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8142 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8143 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8144 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8146 tree const1 = TREE_OPERAND (arg0, 1);
8147 tree const2 = TREE_OPERAND (arg1, 1);
8148 tree variable1 = TREE_OPERAND (arg0, 0);
8149 tree variable2 = TREE_OPERAND (arg1, 0);
8150 tree cst;
8152 /* Put the constant on the side where it doesn't overflow and is
8153 of lower absolute value than before. */
8154 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8155 ? MINUS_EXPR : PLUS_EXPR,
8156 const2, const1, 0);
8157 if (!TREE_OVERFLOW (cst)
8158 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8159 return fold_build2 (code, type,
8160 variable1,
8161 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8162 variable2, cst));
8164 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8165 ? MINUS_EXPR : PLUS_EXPR,
8166 const1, const2, 0);
8167 if (!TREE_OVERFLOW (cst)
8168 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8169 return fold_build2 (code, type,
8170 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8171 variable1, cst),
8172 variable2);
8175 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8176 if (tem)
8177 return tem;
8179 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8181 tree targ0 = strip_float_extensions (arg0);
8182 tree targ1 = strip_float_extensions (arg1);
8183 tree newtype = TREE_TYPE (targ0);
8185 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8186 newtype = TREE_TYPE (targ1);
8188 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8189 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8190 return fold_build2 (code, type, fold_convert (newtype, targ0),
8191 fold_convert (newtype, targ1));
8193 /* (-a) CMP (-b) -> b CMP a */
8194 if (TREE_CODE (arg0) == NEGATE_EXPR
8195 && TREE_CODE (arg1) == NEGATE_EXPR)
8196 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8197 TREE_OPERAND (arg0, 0));
8199 if (TREE_CODE (arg1) == REAL_CST)
8201 REAL_VALUE_TYPE cst;
8202 cst = TREE_REAL_CST (arg1);
8204 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8205 if (TREE_CODE (arg0) == NEGATE_EXPR)
8206 return fold_build2 (swap_tree_comparison (code), type,
8207 TREE_OPERAND (arg0, 0),
8208 build_real (TREE_TYPE (arg1),
8209 REAL_VALUE_NEGATE (cst)));
8211 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8212 /* a CMP (-0) -> a CMP 0 */
8213 if (REAL_VALUE_MINUS_ZERO (cst))
8214 return fold_build2 (code, type, arg0,
8215 build_real (TREE_TYPE (arg1), dconst0));
8217 /* x != NaN is always true, other ops are always false. */
8218 if (REAL_VALUE_ISNAN (cst)
8219 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8221 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8222 return omit_one_operand (type, tem, arg0);
8225 /* Fold comparisons against infinity. */
8226 if (REAL_VALUE_ISINF (cst))
8228 tem = fold_inf_compare (code, type, arg0, arg1);
8229 if (tem != NULL_TREE)
8230 return tem;
8234 /* If this is a comparison of a real constant with a PLUS_EXPR
8235 or a MINUS_EXPR of a real constant, we can convert it into a
8236 comparison with a revised real constant as long as no overflow
8237 occurs when unsafe_math_optimizations are enabled. */
8238 if (flag_unsafe_math_optimizations
8239 && TREE_CODE (arg1) == REAL_CST
8240 && (TREE_CODE (arg0) == PLUS_EXPR
8241 || TREE_CODE (arg0) == MINUS_EXPR)
8242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8243 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8244 ? MINUS_EXPR : PLUS_EXPR,
8245 arg1, TREE_OPERAND (arg0, 1), 0))
8246 && ! TREE_CONSTANT_OVERFLOW (tem))
8247 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8249 /* Likewise, we can simplify a comparison of a real constant with
8250 a MINUS_EXPR whose first operand is also a real constant, i.e.
8251 (c1 - x) < c2 becomes x > c1-c2. */
8252 if (flag_unsafe_math_optimizations
8253 && TREE_CODE (arg1) == REAL_CST
8254 && TREE_CODE (arg0) == MINUS_EXPR
8255 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8256 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8257 arg1, 0))
8258 && ! TREE_CONSTANT_OVERFLOW (tem))
8259 return fold_build2 (swap_tree_comparison (code), type,
8260 TREE_OPERAND (arg0, 1), tem);
8262 /* Fold comparisons against built-in math functions. */
8263 if (TREE_CODE (arg1) == REAL_CST
8264 && flag_unsafe_math_optimizations
8265 && ! flag_errno_math)
8267 enum built_in_function fcode = builtin_mathfn_code (arg0);
8269 if (fcode != END_BUILTINS)
8271 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8272 if (tem != NULL_TREE)
8273 return tem;
8278 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8279 if (TREE_CONSTANT (arg1)
8280 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8281 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8282 /* This optimization is invalid for ordered comparisons
8283 if CONST+INCR overflows or if foo+incr might overflow.
8284 This optimization is invalid for floating point due to rounding.
8285 For pointer types we assume overflow doesn't happen. */
8286 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8287 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8288 && (code == EQ_EXPR || code == NE_EXPR))))
8290 tree varop, newconst;
8292 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8294 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8295 arg1, TREE_OPERAND (arg0, 1));
8296 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8297 TREE_OPERAND (arg0, 0),
8298 TREE_OPERAND (arg0, 1));
8300 else
8302 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8303 arg1, TREE_OPERAND (arg0, 1));
8304 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8305 TREE_OPERAND (arg0, 0),
8306 TREE_OPERAND (arg0, 1));
8310 /* If VAROP is a reference to a bitfield, we must mask
8311 the constant by the width of the field. */
8312 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8313 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8314 && host_integerp (DECL_SIZE (TREE_OPERAND
8315 (TREE_OPERAND (varop, 0), 1)), 1))
8317 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8318 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8319 tree folded_compare, shift;
8321 /* First check whether the comparison would come out
8322 always the same. If we don't do that we would
8323 change the meaning with the masking. */
8324 folded_compare = fold_build2 (code, type,
8325 TREE_OPERAND (varop, 0), arg1);
8326 if (TREE_CODE (folded_compare) == INTEGER_CST)
8327 return omit_one_operand (type, folded_compare, varop);
8329 shift = build_int_cst (NULL_TREE,
8330 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8331 shift = fold_convert (TREE_TYPE (varop), shift);
8332 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8333 newconst, shift);
8334 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8335 newconst, shift);
8338 return fold_build2 (code, type, varop, newconst);
8341 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8342 && (TREE_CODE (arg0) == NOP_EXPR
8343 || TREE_CODE (arg0) == CONVERT_EXPR))
8345 /* If we are widening one operand of an integer comparison,
8346 see if the other operand is similarly being widened. Perhaps we
8347 can do the comparison in the narrower type. */
8348 tem = fold_widened_comparison (code, type, arg0, arg1);
8349 if (tem)
8350 return tem;
8352 /* Or if we are changing signedness. */
8353 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8354 if (tem)
8355 return tem;
8358 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8359 constant, we can simplify it. */
8360 if (TREE_CODE (arg1) == INTEGER_CST
8361 && (TREE_CODE (arg0) == MIN_EXPR
8362 || TREE_CODE (arg0) == MAX_EXPR)
8363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8365 tem = optimize_minmax_comparison (code, type, op0, op1);
8366 if (tem)
8367 return tem;
8370 /* Simplify comparison of something with itself. (For IEEE
8371 floating-point, we can only do some of these simplifications.) */
8372 if (operand_equal_p (arg0, arg1, 0))
8374 switch (code)
8376 case EQ_EXPR:
8377 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8378 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8379 return constant_boolean_node (1, type);
8380 break;
8382 case GE_EXPR:
8383 case LE_EXPR:
8384 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8385 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8386 return constant_boolean_node (1, type);
8387 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8389 case NE_EXPR:
8390 /* For NE, we can only do this simplification if integer
8391 or we don't honor IEEE floating point NaNs. */
8392 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8393 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8394 break;
8395 /* ... fall through ... */
8396 case GT_EXPR:
8397 case LT_EXPR:
8398 return constant_boolean_node (0, type);
8399 default:
8400 gcc_unreachable ();
8404 /* If we are comparing an expression that just has comparisons
8405 of two integer values, arithmetic expressions of those comparisons,
8406 and constants, we can simplify it. There are only three cases
8407 to check: the two values can either be equal, the first can be
8408 greater, or the second can be greater. Fold the expression for
8409 those three values. Since each value must be 0 or 1, we have
8410 eight possibilities, each of which corresponds to the constant 0
8411 or 1 or one of the six possible comparisons.
8413 This handles common cases like (a > b) == 0 but also handles
8414 expressions like ((x > y) - (y > x)) > 0, which supposedly
8415 occur in macroized code. */
8417 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8419 tree cval1 = 0, cval2 = 0;
8420 int save_p = 0;
8422 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8423 /* Don't handle degenerate cases here; they should already
8424 have been handled anyway. */
8425 && cval1 != 0 && cval2 != 0
8426 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8427 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8428 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8429 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8430 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8431 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8432 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8434 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8435 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8437 /* We can't just pass T to eval_subst in case cval1 or cval2
8438 was the same as ARG1. */
8440 tree high_result
8441 = fold_build2 (code, type,
8442 eval_subst (arg0, cval1, maxval,
8443 cval2, minval),
8444 arg1);
8445 tree equal_result
8446 = fold_build2 (code, type,
8447 eval_subst (arg0, cval1, maxval,
8448 cval2, maxval),
8449 arg1);
8450 tree low_result
8451 = fold_build2 (code, type,
8452 eval_subst (arg0, cval1, minval,
8453 cval2, maxval),
8454 arg1);
8456 /* All three of these results should be 0 or 1. Confirm they are.
8457 Then use those values to select the proper code to use. */
8459 if (TREE_CODE (high_result) == INTEGER_CST
8460 && TREE_CODE (equal_result) == INTEGER_CST
8461 && TREE_CODE (low_result) == INTEGER_CST)
8463 /* Make a 3-bit mask with the high-order bit being the
8464 value for `>', the next for '=', and the low for '<'. */
8465 switch ((integer_onep (high_result) * 4)
8466 + (integer_onep (equal_result) * 2)
8467 + integer_onep (low_result))
8469 case 0:
8470 /* Always false. */
8471 return omit_one_operand (type, integer_zero_node, arg0);
8472 case 1:
8473 code = LT_EXPR;
8474 break;
8475 case 2:
8476 code = EQ_EXPR;
8477 break;
8478 case 3:
8479 code = LE_EXPR;
8480 break;
8481 case 4:
8482 code = GT_EXPR;
8483 break;
8484 case 5:
8485 code = NE_EXPR;
8486 break;
8487 case 6:
8488 code = GE_EXPR;
8489 break;
8490 case 7:
8491 /* Always true. */
8492 return omit_one_operand (type, integer_one_node, arg0);
8495 if (save_p)
8496 return save_expr (build2 (code, type, cval1, cval2));
8497 return fold_build2 (code, type, cval1, cval2);
8502 /* Fold a comparison of the address of COMPONENT_REFs with the same
8503 type and component to a comparison of the address of the base
8504 object. In short, &x->a OP &y->a to x OP y and
8505 &x->a OP &y.a to x OP &y */
8506 if (TREE_CODE (arg0) == ADDR_EXPR
8507 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8508 && TREE_CODE (arg1) == ADDR_EXPR
8509 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8511 tree cref0 = TREE_OPERAND (arg0, 0);
8512 tree cref1 = TREE_OPERAND (arg1, 0);
8513 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8515 tree op0 = TREE_OPERAND (cref0, 0);
8516 tree op1 = TREE_OPERAND (cref1, 0);
8517 return fold_build2 (code, type,
8518 build_fold_addr_expr (op0),
8519 build_fold_addr_expr (op1));
8523 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8524 into a single range test. */
8525 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8526 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8527 && TREE_CODE (arg1) == INTEGER_CST
8528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8529 && !integer_zerop (TREE_OPERAND (arg0, 1))
8530 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8531 && !TREE_OVERFLOW (arg1))
8533 tem = fold_div_compare (code, type, arg0, arg1);
8534 if (tem != NULL_TREE)
8535 return tem;
8538 /* Fold ~X op ~Y as Y op X. */
8539 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8540 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8541 return fold_build2 (code, type,
8542 TREE_OPERAND (arg1, 0),
8543 TREE_OPERAND (arg0, 0));
8545 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8546 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8547 && TREE_CODE (arg1) == INTEGER_CST)
8548 return fold_build2 (swap_tree_comparison (code), type,
8549 TREE_OPERAND (arg0, 0),
8550 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8552 return NULL_TREE;
8556 /* Subroutine of fold_binary. Optimize complex multiplications of the
8557 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8558 argument EXPR represents the expression "z" of type TYPE. */
8560 static tree
8561 fold_mult_zconjz (tree type, tree expr)
8563 tree itype = TREE_TYPE (type);
8564 tree rpart, ipart, tem;
8566 if (TREE_CODE (expr) == COMPLEX_EXPR)
8568 rpart = TREE_OPERAND (expr, 0);
8569 ipart = TREE_OPERAND (expr, 1);
8571 else if (TREE_CODE (expr) == COMPLEX_CST)
8573 rpart = TREE_REALPART (expr);
8574 ipart = TREE_IMAGPART (expr);
8576 else
8578 expr = save_expr (expr);
8579 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8580 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8583 rpart = save_expr (rpart);
8584 ipart = save_expr (ipart);
8585 tem = fold_build2 (PLUS_EXPR, itype,
8586 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8587 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8588 return fold_build2 (COMPLEX_EXPR, type, tem,
8589 fold_convert (itype, integer_zero_node));
8593 /* Fold a binary expression of code CODE and type TYPE with operands
8594 OP0 and OP1. Return the folded expression if folding is
8595 successful. Otherwise, return NULL_TREE. */
8597 tree
8598 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8600 enum tree_code_class kind = TREE_CODE_CLASS (code);
8601 tree arg0, arg1, tem;
8602 tree t1 = NULL_TREE;
8604 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8605 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8606 && TREE_CODE_LENGTH (code) == 2
8607 && op0 != NULL_TREE
8608 && op1 != NULL_TREE);
8610 arg0 = op0;
8611 arg1 = op1;
8613 /* Strip any conversions that don't change the mode. This is
8614 safe for every expression, except for a comparison expression
8615 because its signedness is derived from its operands. So, in
8616 the latter case, only strip conversions that don't change the
8617 signedness.
8619 Note that this is done as an internal manipulation within the
8620 constant folder, in order to find the simplest representation
8621 of the arguments so that their form can be studied. In any
8622 cases, the appropriate type conversions should be put back in
8623 the tree that will get out of the constant folder. */
8625 if (kind == tcc_comparison)
8627 STRIP_SIGN_NOPS (arg0);
8628 STRIP_SIGN_NOPS (arg1);
8630 else
8632 STRIP_NOPS (arg0);
8633 STRIP_NOPS (arg1);
8636 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8637 constant but we can't do arithmetic on them. */
8638 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8639 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8640 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8641 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8643 if (kind == tcc_binary)
8644 tem = const_binop (code, arg0, arg1, 0);
8645 else if (kind == tcc_comparison)
8646 tem = fold_relational_const (code, type, arg0, arg1);
8647 else
8648 tem = NULL_TREE;
8650 if (tem != NULL_TREE)
8652 if (TREE_TYPE (tem) != type)
8653 tem = fold_convert (type, tem);
8654 return tem;
8658 /* If this is a commutative operation, and ARG0 is a constant, move it
8659 to ARG1 to reduce the number of tests below. */
8660 if (commutative_tree_code (code)
8661 && tree_swap_operands_p (arg0, arg1, true))
8662 return fold_build2 (code, type, op1, op0);
8664 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8666 First check for cases where an arithmetic operation is applied to a
8667 compound, conditional, or comparison operation. Push the arithmetic
8668 operation inside the compound or conditional to see if any folding
8669 can then be done. Convert comparison to conditional for this purpose.
8670 The also optimizes non-constant cases that used to be done in
8671 expand_expr.
8673 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8674 one of the operands is a comparison and the other is a comparison, a
8675 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8676 code below would make the expression more complex. Change it to a
8677 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8678 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8680 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8681 || code == EQ_EXPR || code == NE_EXPR)
8682 && ((truth_value_p (TREE_CODE (arg0))
8683 && (truth_value_p (TREE_CODE (arg1))
8684 || (TREE_CODE (arg1) == BIT_AND_EXPR
8685 && integer_onep (TREE_OPERAND (arg1, 1)))))
8686 || (truth_value_p (TREE_CODE (arg1))
8687 && (truth_value_p (TREE_CODE (arg0))
8688 || (TREE_CODE (arg0) == BIT_AND_EXPR
8689 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8691 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8692 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8693 : TRUTH_XOR_EXPR,
8694 boolean_type_node,
8695 fold_convert (boolean_type_node, arg0),
8696 fold_convert (boolean_type_node, arg1));
8698 if (code == EQ_EXPR)
8699 tem = invert_truthvalue (tem);
8701 return fold_convert (type, tem);
8704 if (TREE_CODE_CLASS (code) == tcc_binary
8705 || TREE_CODE_CLASS (code) == tcc_comparison)
8707 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8708 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8709 fold_build2 (code, type,
8710 TREE_OPERAND (arg0, 1), op1));
8711 if (TREE_CODE (arg1) == COMPOUND_EXPR
8712 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8713 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8714 fold_build2 (code, type,
8715 op0, TREE_OPERAND (arg1, 1)));
8717 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8719 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8720 arg0, arg1,
8721 /*cond_first_p=*/1);
8722 if (tem != NULL_TREE)
8723 return tem;
8726 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8728 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8729 arg1, arg0,
8730 /*cond_first_p=*/0);
8731 if (tem != NULL_TREE)
8732 return tem;
8736 switch (code)
8738 case PLUS_EXPR:
8739 /* A + (-B) -> A - B */
8740 if (TREE_CODE (arg1) == NEGATE_EXPR)
8741 return fold_build2 (MINUS_EXPR, type,
8742 fold_convert (type, arg0),
8743 fold_convert (type, TREE_OPERAND (arg1, 0)));
8744 /* (-A) + B -> B - A */
8745 if (TREE_CODE (arg0) == NEGATE_EXPR
8746 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8747 return fold_build2 (MINUS_EXPR, type,
8748 fold_convert (type, arg1),
8749 fold_convert (type, TREE_OPERAND (arg0, 0)));
8750 /* Convert ~A + 1 to -A. */
8751 if (INTEGRAL_TYPE_P (type)
8752 && TREE_CODE (arg0) == BIT_NOT_EXPR
8753 && integer_onep (arg1))
8754 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8756 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8757 same or one. */
8758 if ((TREE_CODE (arg0) == MULT_EXPR
8759 || TREE_CODE (arg1) == MULT_EXPR)
8760 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8762 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8763 if (tem)
8764 return tem;
8767 if (! FLOAT_TYPE_P (type))
8769 if (integer_zerop (arg1))
8770 return non_lvalue (fold_convert (type, arg0));
8772 /* ~X + X is -1. */
8773 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8774 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8775 && !TYPE_TRAP_SIGNED (type))
8777 t1 = build_int_cst (type, -1);
8778 t1 = force_fit_type (t1, 0, false, false);
8779 return omit_one_operand (type, t1, arg1);
8782 /* X + ~X is -1. */
8783 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8784 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8785 && !TYPE_TRAP_SIGNED (type))
8787 t1 = build_int_cst (type, -1);
8788 t1 = force_fit_type (t1, 0, false, false);
8789 return omit_one_operand (type, t1, arg0);
8792 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8793 with a constant, and the two constants have no bits in common,
8794 we should treat this as a BIT_IOR_EXPR since this may produce more
8795 simplifications. */
8796 if (TREE_CODE (arg0) == BIT_AND_EXPR
8797 && TREE_CODE (arg1) == BIT_AND_EXPR
8798 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8799 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8800 && integer_zerop (const_binop (BIT_AND_EXPR,
8801 TREE_OPERAND (arg0, 1),
8802 TREE_OPERAND (arg1, 1), 0)))
8804 code = BIT_IOR_EXPR;
8805 goto bit_ior;
8808 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8809 (plus (plus (mult) (mult)) (foo)) so that we can
8810 take advantage of the factoring cases below. */
8811 if (((TREE_CODE (arg0) == PLUS_EXPR
8812 || TREE_CODE (arg0) == MINUS_EXPR)
8813 && TREE_CODE (arg1) == MULT_EXPR)
8814 || ((TREE_CODE (arg1) == PLUS_EXPR
8815 || TREE_CODE (arg1) == MINUS_EXPR)
8816 && TREE_CODE (arg0) == MULT_EXPR))
8818 tree parg0, parg1, parg, marg;
8819 enum tree_code pcode;
8821 if (TREE_CODE (arg1) == MULT_EXPR)
8822 parg = arg0, marg = arg1;
8823 else
8824 parg = arg1, marg = arg0;
8825 pcode = TREE_CODE (parg);
8826 parg0 = TREE_OPERAND (parg, 0);
8827 parg1 = TREE_OPERAND (parg, 1);
8828 STRIP_NOPS (parg0);
8829 STRIP_NOPS (parg1);
8831 if (TREE_CODE (parg0) == MULT_EXPR
8832 && TREE_CODE (parg1) != MULT_EXPR)
8833 return fold_build2 (pcode, type,
8834 fold_build2 (PLUS_EXPR, type,
8835 fold_convert (type, parg0),
8836 fold_convert (type, marg)),
8837 fold_convert (type, parg1));
8838 if (TREE_CODE (parg0) != MULT_EXPR
8839 && TREE_CODE (parg1) == MULT_EXPR)
8840 return fold_build2 (PLUS_EXPR, type,
8841 fold_convert (type, parg0),
8842 fold_build2 (pcode, type,
8843 fold_convert (type, marg),
8844 fold_convert (type,
8845 parg1)));
8848 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8849 of the array. Loop optimizer sometimes produce this type of
8850 expressions. */
8851 if (TREE_CODE (arg0) == ADDR_EXPR)
8853 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8854 if (tem)
8855 return fold_convert (type, tem);
8857 else if (TREE_CODE (arg1) == ADDR_EXPR)
8859 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8860 if (tem)
8861 return fold_convert (type, tem);
8864 else
8866 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8867 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8868 return non_lvalue (fold_convert (type, arg0));
8870 /* Likewise if the operands are reversed. */
8871 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8872 return non_lvalue (fold_convert (type, arg1));
8874 /* Convert X + -C into X - C. */
8875 if (TREE_CODE (arg1) == REAL_CST
8876 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8878 tem = fold_negate_const (arg1, type);
8879 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8880 return fold_build2 (MINUS_EXPR, type,
8881 fold_convert (type, arg0),
8882 fold_convert (type, tem));
8885 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8886 to __complex__ ( x, y ). This is not the same for SNaNs or
8887 if singed zeros are involved. */
8888 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8889 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8890 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8892 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8893 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8894 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8895 bool arg0rz = false, arg0iz = false;
8896 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8897 || (arg0i && (arg0iz = real_zerop (arg0i))))
8899 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8900 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8901 if (arg0rz && arg1i && real_zerop (arg1i))
8903 tree rp = arg1r ? arg1r
8904 : build1 (REALPART_EXPR, rtype, arg1);
8905 tree ip = arg0i ? arg0i
8906 : build1 (IMAGPART_EXPR, rtype, arg0);
8907 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8909 else if (arg0iz && arg1r && real_zerop (arg1r))
8911 tree rp = arg0r ? arg0r
8912 : build1 (REALPART_EXPR, rtype, arg0);
8913 tree ip = arg1i ? arg1i
8914 : build1 (IMAGPART_EXPR, rtype, arg1);
8915 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8920 if (flag_unsafe_math_optimizations
8921 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8922 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8923 && (tem = distribute_real_division (code, type, arg0, arg1)))
8924 return tem;
8926 /* Convert x+x into x*2.0. */
8927 if (operand_equal_p (arg0, arg1, 0)
8928 && SCALAR_FLOAT_TYPE_P (type))
8929 return fold_build2 (MULT_EXPR, type, arg0,
8930 build_real (type, dconst2));
8932 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8933 if (flag_unsafe_math_optimizations
8934 && TREE_CODE (arg1) == PLUS_EXPR
8935 && TREE_CODE (arg0) != MULT_EXPR)
8937 tree tree10 = TREE_OPERAND (arg1, 0);
8938 tree tree11 = TREE_OPERAND (arg1, 1);
8939 if (TREE_CODE (tree11) == MULT_EXPR
8940 && TREE_CODE (tree10) == MULT_EXPR)
8942 tree tree0;
8943 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8944 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8947 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8948 if (flag_unsafe_math_optimizations
8949 && TREE_CODE (arg0) == PLUS_EXPR
8950 && TREE_CODE (arg1) != MULT_EXPR)
8952 tree tree00 = TREE_OPERAND (arg0, 0);
8953 tree tree01 = TREE_OPERAND (arg0, 1);
8954 if (TREE_CODE (tree01) == MULT_EXPR
8955 && TREE_CODE (tree00) == MULT_EXPR)
8957 tree tree0;
8958 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8959 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8964 bit_rotate:
8965 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8966 is a rotate of A by C1 bits. */
8967 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8968 is a rotate of A by B bits. */
8970 enum tree_code code0, code1;
8971 code0 = TREE_CODE (arg0);
8972 code1 = TREE_CODE (arg1);
8973 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8974 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8975 && operand_equal_p (TREE_OPERAND (arg0, 0),
8976 TREE_OPERAND (arg1, 0), 0)
8977 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8979 tree tree01, tree11;
8980 enum tree_code code01, code11;
8982 tree01 = TREE_OPERAND (arg0, 1);
8983 tree11 = TREE_OPERAND (arg1, 1);
8984 STRIP_NOPS (tree01);
8985 STRIP_NOPS (tree11);
8986 code01 = TREE_CODE (tree01);
8987 code11 = TREE_CODE (tree11);
8988 if (code01 == INTEGER_CST
8989 && code11 == INTEGER_CST
8990 && TREE_INT_CST_HIGH (tree01) == 0
8991 && TREE_INT_CST_HIGH (tree11) == 0
8992 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8993 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8994 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8995 code0 == LSHIFT_EXPR ? tree01 : tree11);
8996 else if (code11 == MINUS_EXPR)
8998 tree tree110, tree111;
8999 tree110 = TREE_OPERAND (tree11, 0);
9000 tree111 = TREE_OPERAND (tree11, 1);
9001 STRIP_NOPS (tree110);
9002 STRIP_NOPS (tree111);
9003 if (TREE_CODE (tree110) == INTEGER_CST
9004 && 0 == compare_tree_int (tree110,
9005 TYPE_PRECISION
9006 (TREE_TYPE (TREE_OPERAND
9007 (arg0, 0))))
9008 && operand_equal_p (tree01, tree111, 0))
9009 return build2 ((code0 == LSHIFT_EXPR
9010 ? LROTATE_EXPR
9011 : RROTATE_EXPR),
9012 type, TREE_OPERAND (arg0, 0), tree01);
9014 else if (code01 == MINUS_EXPR)
9016 tree tree010, tree011;
9017 tree010 = TREE_OPERAND (tree01, 0);
9018 tree011 = TREE_OPERAND (tree01, 1);
9019 STRIP_NOPS (tree010);
9020 STRIP_NOPS (tree011);
9021 if (TREE_CODE (tree010) == INTEGER_CST
9022 && 0 == compare_tree_int (tree010,
9023 TYPE_PRECISION
9024 (TREE_TYPE (TREE_OPERAND
9025 (arg0, 0))))
9026 && operand_equal_p (tree11, tree011, 0))
9027 return build2 ((code0 != LSHIFT_EXPR
9028 ? LROTATE_EXPR
9029 : RROTATE_EXPR),
9030 type, TREE_OPERAND (arg0, 0), tree11);
9035 associate:
9036 /* In most languages, can't associate operations on floats through
9037 parentheses. Rather than remember where the parentheses were, we
9038 don't associate floats at all, unless the user has specified
9039 -funsafe-math-optimizations. */
9041 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9043 tree var0, con0, lit0, minus_lit0;
9044 tree var1, con1, lit1, minus_lit1;
9046 /* Split both trees into variables, constants, and literals. Then
9047 associate each group together, the constants with literals,
9048 then the result with variables. This increases the chances of
9049 literals being recombined later and of generating relocatable
9050 expressions for the sum of a constant and literal. */
9051 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9052 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9053 code == MINUS_EXPR);
9055 /* Only do something if we found more than two objects. Otherwise,
9056 nothing has changed and we risk infinite recursion. */
9057 if (2 < ((var0 != 0) + (var1 != 0)
9058 + (con0 != 0) + (con1 != 0)
9059 + (lit0 != 0) + (lit1 != 0)
9060 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9062 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9063 if (code == MINUS_EXPR)
9064 code = PLUS_EXPR;
9066 var0 = associate_trees (var0, var1, code, type);
9067 con0 = associate_trees (con0, con1, code, type);
9068 lit0 = associate_trees (lit0, lit1, code, type);
9069 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9071 /* Preserve the MINUS_EXPR if the negative part of the literal is
9072 greater than the positive part. Otherwise, the multiplicative
9073 folding code (i.e extract_muldiv) may be fooled in case
9074 unsigned constants are subtracted, like in the following
9075 example: ((X*2 + 4) - 8U)/2. */
9076 if (minus_lit0 && lit0)
9078 if (TREE_CODE (lit0) == INTEGER_CST
9079 && TREE_CODE (minus_lit0) == INTEGER_CST
9080 && tree_int_cst_lt (lit0, minus_lit0))
9082 minus_lit0 = associate_trees (minus_lit0, lit0,
9083 MINUS_EXPR, type);
9084 lit0 = 0;
9086 else
9088 lit0 = associate_trees (lit0, minus_lit0,
9089 MINUS_EXPR, type);
9090 minus_lit0 = 0;
9093 if (minus_lit0)
9095 if (con0 == 0)
9096 return fold_convert (type,
9097 associate_trees (var0, minus_lit0,
9098 MINUS_EXPR, type));
9099 else
9101 con0 = associate_trees (con0, minus_lit0,
9102 MINUS_EXPR, type);
9103 return fold_convert (type,
9104 associate_trees (var0, con0,
9105 PLUS_EXPR, type));
9109 con0 = associate_trees (con0, lit0, code, type);
9110 return fold_convert (type, associate_trees (var0, con0,
9111 code, type));
9115 return NULL_TREE;
9117 case MINUS_EXPR:
9118 /* A - (-B) -> A + B */
9119 if (TREE_CODE (arg1) == NEGATE_EXPR)
9120 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9121 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9122 if (TREE_CODE (arg0) == NEGATE_EXPR
9123 && (FLOAT_TYPE_P (type)
9124 || INTEGRAL_TYPE_P (type))
9125 && negate_expr_p (arg1)
9126 && reorder_operands_p (arg0, arg1))
9127 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9128 TREE_OPERAND (arg0, 0));
9129 /* Convert -A - 1 to ~A. */
9130 if (INTEGRAL_TYPE_P (type)
9131 && TREE_CODE (arg0) == NEGATE_EXPR
9132 && integer_onep (arg1)
9133 && !TYPE_TRAP_SIGNED (type))
9134 return fold_build1 (BIT_NOT_EXPR, type,
9135 fold_convert (type, TREE_OPERAND (arg0, 0)));
9137 /* Convert -1 - A to ~A. */
9138 if (INTEGRAL_TYPE_P (type)
9139 && integer_all_onesp (arg0))
9140 return fold_build1 (BIT_NOT_EXPR, type, op1);
9142 if (! FLOAT_TYPE_P (type))
9144 if (integer_zerop (arg0))
9145 return negate_expr (fold_convert (type, arg1));
9146 if (integer_zerop (arg1))
9147 return non_lvalue (fold_convert (type, arg0));
9149 /* Fold A - (A & B) into ~B & A. */
9150 if (!TREE_SIDE_EFFECTS (arg0)
9151 && TREE_CODE (arg1) == BIT_AND_EXPR)
9153 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9154 return fold_build2 (BIT_AND_EXPR, type,
9155 fold_build1 (BIT_NOT_EXPR, type,
9156 TREE_OPERAND (arg1, 0)),
9157 arg0);
9158 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9159 return fold_build2 (BIT_AND_EXPR, type,
9160 fold_build1 (BIT_NOT_EXPR, type,
9161 TREE_OPERAND (arg1, 1)),
9162 arg0);
9165 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9166 any power of 2 minus 1. */
9167 if (TREE_CODE (arg0) == BIT_AND_EXPR
9168 && TREE_CODE (arg1) == BIT_AND_EXPR
9169 && operand_equal_p (TREE_OPERAND (arg0, 0),
9170 TREE_OPERAND (arg1, 0), 0))
9172 tree mask0 = TREE_OPERAND (arg0, 1);
9173 tree mask1 = TREE_OPERAND (arg1, 1);
9174 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9176 if (operand_equal_p (tem, mask1, 0))
9178 tem = fold_build2 (BIT_XOR_EXPR, type,
9179 TREE_OPERAND (arg0, 0), mask1);
9180 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9185 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9186 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9187 return non_lvalue (fold_convert (type, arg0));
9189 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9190 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9191 (-ARG1 + ARG0) reduces to -ARG1. */
9192 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9193 return negate_expr (fold_convert (type, arg1));
9195 /* Fold &x - &x. This can happen from &x.foo - &x.
9196 This is unsafe for certain floats even in non-IEEE formats.
9197 In IEEE, it is unsafe because it does wrong for NaNs.
9198 Also note that operand_equal_p is always false if an operand
9199 is volatile. */
9201 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9202 && operand_equal_p (arg0, arg1, 0))
9203 return fold_convert (type, integer_zero_node);
9205 /* A - B -> A + (-B) if B is easily negatable. */
9206 if (negate_expr_p (arg1)
9207 && ((FLOAT_TYPE_P (type)
9208 /* Avoid this transformation if B is a positive REAL_CST. */
9209 && (TREE_CODE (arg1) != REAL_CST
9210 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9211 || INTEGRAL_TYPE_P (type)))
9212 return fold_build2 (PLUS_EXPR, type,
9213 fold_convert (type, arg0),
9214 fold_convert (type, negate_expr (arg1)));
9216 /* Try folding difference of addresses. */
9218 HOST_WIDE_INT diff;
9220 if ((TREE_CODE (arg0) == ADDR_EXPR
9221 || TREE_CODE (arg1) == ADDR_EXPR)
9222 && ptr_difference_const (arg0, arg1, &diff))
9223 return build_int_cst_type (type, diff);
9226 /* Fold &a[i] - &a[j] to i-j. */
9227 if (TREE_CODE (arg0) == ADDR_EXPR
9228 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9229 && TREE_CODE (arg1) == ADDR_EXPR
9230 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9232 tree aref0 = TREE_OPERAND (arg0, 0);
9233 tree aref1 = TREE_OPERAND (arg1, 0);
9234 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9235 TREE_OPERAND (aref1, 0), 0))
9237 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9238 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9239 tree esz = array_ref_element_size (aref0);
9240 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9241 return fold_build2 (MULT_EXPR, type, diff,
9242 fold_convert (type, esz));
9247 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9248 of the array. Loop optimizer sometimes produce this type of
9249 expressions. */
9250 if (TREE_CODE (arg0) == ADDR_EXPR)
9252 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9253 if (tem)
9254 return fold_convert (type, tem);
9257 if (flag_unsafe_math_optimizations
9258 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9259 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9260 && (tem = distribute_real_division (code, type, arg0, arg1)))
9261 return tem;
9263 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9264 same or one. */
9265 if ((TREE_CODE (arg0) == MULT_EXPR
9266 || TREE_CODE (arg1) == MULT_EXPR)
9267 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9269 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9270 if (tem)
9271 return tem;
9274 goto associate;
9276 case MULT_EXPR:
9277 /* (-A) * (-B) -> A * B */
9278 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9279 return fold_build2 (MULT_EXPR, type,
9280 fold_convert (type, TREE_OPERAND (arg0, 0)),
9281 fold_convert (type, negate_expr (arg1)));
9282 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9283 return fold_build2 (MULT_EXPR, type,
9284 fold_convert (type, negate_expr (arg0)),
9285 fold_convert (type, TREE_OPERAND (arg1, 0)));
9287 if (! FLOAT_TYPE_P (type))
9289 if (integer_zerop (arg1))
9290 return omit_one_operand (type, arg1, arg0);
9291 if (integer_onep (arg1))
9292 return non_lvalue (fold_convert (type, arg0));
9293 /* Transform x * -1 into -x. */
9294 if (integer_all_onesp (arg1))
9295 return fold_convert (type, negate_expr (arg0));
9296 /* Transform x * -C into -x * C if x is easily negatable. */
9297 if (TREE_CODE (arg1) == INTEGER_CST
9298 && tree_int_cst_sgn (arg1) == -1
9299 && negate_expr_p (arg0)
9300 && (tem = negate_expr (arg1)) != arg1
9301 && !TREE_OVERFLOW (tem))
9302 return fold_build2 (MULT_EXPR, type,
9303 negate_expr (arg0), tem);
9305 /* (a * (1 << b)) is (a << b) */
9306 if (TREE_CODE (arg1) == LSHIFT_EXPR
9307 && integer_onep (TREE_OPERAND (arg1, 0)))
9308 return fold_build2 (LSHIFT_EXPR, type, arg0,
9309 TREE_OPERAND (arg1, 1));
9310 if (TREE_CODE (arg0) == LSHIFT_EXPR
9311 && integer_onep (TREE_OPERAND (arg0, 0)))
9312 return fold_build2 (LSHIFT_EXPR, type, arg1,
9313 TREE_OPERAND (arg0, 1));
9315 if (TREE_CODE (arg1) == INTEGER_CST
9316 && 0 != (tem = extract_muldiv (op0,
9317 fold_convert (type, arg1),
9318 code, NULL_TREE)))
9319 return fold_convert (type, tem);
9321 /* Optimize z * conj(z) for integer complex numbers. */
9322 if (TREE_CODE (arg0) == CONJ_EXPR
9323 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9324 return fold_mult_zconjz (type, arg1);
9325 if (TREE_CODE (arg1) == CONJ_EXPR
9326 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9327 return fold_mult_zconjz (type, arg0);
9329 else
9331 /* Maybe fold x * 0 to 0. The expressions aren't the same
9332 when x is NaN, since x * 0 is also NaN. Nor are they the
9333 same in modes with signed zeros, since multiplying a
9334 negative value by 0 gives -0, not +0. */
9335 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9336 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9337 && real_zerop (arg1))
9338 return omit_one_operand (type, arg1, arg0);
9339 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9340 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9341 && real_onep (arg1))
9342 return non_lvalue (fold_convert (type, arg0));
9344 /* Transform x * -1.0 into -x. */
9345 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9346 && real_minus_onep (arg1))
9347 return fold_convert (type, negate_expr (arg0));
9349 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9350 if (flag_unsafe_math_optimizations
9351 && TREE_CODE (arg0) == RDIV_EXPR
9352 && TREE_CODE (arg1) == REAL_CST
9353 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9355 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9356 arg1, 0);
9357 if (tem)
9358 return fold_build2 (RDIV_EXPR, type, tem,
9359 TREE_OPERAND (arg0, 1));
9362 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9363 if (operand_equal_p (arg0, arg1, 0))
9365 tree tem = fold_strip_sign_ops (arg0);
9366 if (tem != NULL_TREE)
9368 tem = fold_convert (type, tem);
9369 return fold_build2 (MULT_EXPR, type, tem, tem);
9373 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9374 This is not the same for NaNs or if singed zeros are
9375 involved. */
9376 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9377 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9378 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9379 && TREE_CODE (arg1) == COMPLEX_CST
9380 && real_zerop (TREE_REALPART (arg1)))
9382 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9383 if (real_onep (TREE_IMAGPART (arg1)))
9384 return fold_build2 (COMPLEX_EXPR, type,
9385 negate_expr (fold_build1 (IMAGPART_EXPR,
9386 rtype, arg0)),
9387 fold_build1 (REALPART_EXPR, rtype, arg0));
9388 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9389 return fold_build2 (COMPLEX_EXPR, type,
9390 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9391 negate_expr (fold_build1 (REALPART_EXPR,
9392 rtype, arg0)));
9395 /* Optimize z * conj(z) for floating point complex numbers.
9396 Guarded by flag_unsafe_math_optimizations as non-finite
9397 imaginary components don't produce scalar results. */
9398 if (flag_unsafe_math_optimizations
9399 && TREE_CODE (arg0) == CONJ_EXPR
9400 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9401 return fold_mult_zconjz (type, arg1);
9402 if (flag_unsafe_math_optimizations
9403 && TREE_CODE (arg1) == CONJ_EXPR
9404 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9405 return fold_mult_zconjz (type, arg0);
9407 if (flag_unsafe_math_optimizations)
9409 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9410 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9412 /* Optimizations of root(...)*root(...). */
9413 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9415 tree rootfn, arg, arglist;
9416 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9417 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9419 /* Optimize sqrt(x)*sqrt(x) as x. */
9420 if (BUILTIN_SQRT_P (fcode0)
9421 && operand_equal_p (arg00, arg10, 0)
9422 && ! HONOR_SNANS (TYPE_MODE (type)))
9423 return arg00;
9425 /* Optimize root(x)*root(y) as root(x*y). */
9426 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9427 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9428 arglist = build_tree_list (NULL_TREE, arg);
9429 return build_function_call_expr (rootfn, arglist);
9432 /* Optimize expN(x)*expN(y) as expN(x+y). */
9433 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9435 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9436 tree arg = fold_build2 (PLUS_EXPR, type,
9437 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9438 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9439 tree arglist = build_tree_list (NULL_TREE, arg);
9440 return build_function_call_expr (expfn, arglist);
9443 /* Optimizations of pow(...)*pow(...). */
9444 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9445 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9446 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9448 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9449 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9450 1)));
9451 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9452 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9453 1)));
9455 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9456 if (operand_equal_p (arg01, arg11, 0))
9458 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9459 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9460 tree arglist = tree_cons (NULL_TREE, arg,
9461 build_tree_list (NULL_TREE,
9462 arg01));
9463 return build_function_call_expr (powfn, arglist);
9466 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9467 if (operand_equal_p (arg00, arg10, 0))
9469 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9470 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9471 tree arglist = tree_cons (NULL_TREE, arg00,
9472 build_tree_list (NULL_TREE,
9473 arg));
9474 return build_function_call_expr (powfn, arglist);
9478 /* Optimize tan(x)*cos(x) as sin(x). */
9479 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9480 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9481 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9482 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9483 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9484 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9485 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9486 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9488 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9490 if (sinfn != NULL_TREE)
9491 return build_function_call_expr (sinfn,
9492 TREE_OPERAND (arg0, 1));
9495 /* Optimize x*pow(x,c) as pow(x,c+1). */
9496 if (fcode1 == BUILT_IN_POW
9497 || fcode1 == BUILT_IN_POWF
9498 || fcode1 == BUILT_IN_POWL)
9500 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9501 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9502 1)));
9503 if (TREE_CODE (arg11) == REAL_CST
9504 && ! TREE_CONSTANT_OVERFLOW (arg11)
9505 && operand_equal_p (arg0, arg10, 0))
9507 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9508 REAL_VALUE_TYPE c;
9509 tree arg, arglist;
9511 c = TREE_REAL_CST (arg11);
9512 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9513 arg = build_real (type, c);
9514 arglist = build_tree_list (NULL_TREE, arg);
9515 arglist = tree_cons (NULL_TREE, arg0, arglist);
9516 return build_function_call_expr (powfn, arglist);
9520 /* Optimize pow(x,c)*x as pow(x,c+1). */
9521 if (fcode0 == BUILT_IN_POW
9522 || fcode0 == BUILT_IN_POWF
9523 || fcode0 == BUILT_IN_POWL)
9525 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9526 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9527 1)));
9528 if (TREE_CODE (arg01) == REAL_CST
9529 && ! TREE_CONSTANT_OVERFLOW (arg01)
9530 && operand_equal_p (arg1, arg00, 0))
9532 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9533 REAL_VALUE_TYPE c;
9534 tree arg, arglist;
9536 c = TREE_REAL_CST (arg01);
9537 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9538 arg = build_real (type, c);
9539 arglist = build_tree_list (NULL_TREE, arg);
9540 arglist = tree_cons (NULL_TREE, arg1, arglist);
9541 return build_function_call_expr (powfn, arglist);
9545 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9546 if (! optimize_size
9547 && operand_equal_p (arg0, arg1, 0))
9549 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9551 if (powfn)
9553 tree arg = build_real (type, dconst2);
9554 tree arglist = build_tree_list (NULL_TREE, arg);
9555 arglist = tree_cons (NULL_TREE, arg0, arglist);
9556 return build_function_call_expr (powfn, arglist);
9561 goto associate;
9563 case BIT_IOR_EXPR:
9564 bit_ior:
9565 if (integer_all_onesp (arg1))
9566 return omit_one_operand (type, arg1, arg0);
9567 if (integer_zerop (arg1))
9568 return non_lvalue (fold_convert (type, arg0));
9569 if (operand_equal_p (arg0, arg1, 0))
9570 return non_lvalue (fold_convert (type, arg0));
9572 /* ~X | X is -1. */
9573 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9574 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9576 t1 = build_int_cst (type, -1);
9577 t1 = force_fit_type (t1, 0, false, false);
9578 return omit_one_operand (type, t1, arg1);
9581 /* X | ~X is -1. */
9582 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9583 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9585 t1 = build_int_cst (type, -1);
9586 t1 = force_fit_type (t1, 0, false, false);
9587 return omit_one_operand (type, t1, arg0);
9590 /* Canonicalize (X & C1) | C2. */
9591 if (TREE_CODE (arg0) == BIT_AND_EXPR
9592 && TREE_CODE (arg1) == INTEGER_CST
9593 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9595 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9596 int width = TYPE_PRECISION (type);
9597 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9598 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9599 hi2 = TREE_INT_CST_HIGH (arg1);
9600 lo2 = TREE_INT_CST_LOW (arg1);
9602 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9603 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9604 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9606 if (width > HOST_BITS_PER_WIDE_INT)
9608 mhi = (unsigned HOST_WIDE_INT) -1
9609 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9610 mlo = -1;
9612 else
9614 mhi = 0;
9615 mlo = (unsigned HOST_WIDE_INT) -1
9616 >> (HOST_BITS_PER_WIDE_INT - width);
9619 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9620 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9621 return fold_build2 (BIT_IOR_EXPR, type,
9622 TREE_OPERAND (arg0, 0), arg1);
9624 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9625 hi1 &= mhi;
9626 lo1 &= mlo;
9627 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9628 return fold_build2 (BIT_IOR_EXPR, type,
9629 fold_build2 (BIT_AND_EXPR, type,
9630 TREE_OPERAND (arg0, 0),
9631 build_int_cst_wide (type,
9632 lo1 & ~lo2,
9633 hi1 & ~hi2)),
9634 arg1);
9637 /* (X & Y) | Y is (X, Y). */
9638 if (TREE_CODE (arg0) == BIT_AND_EXPR
9639 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9640 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9641 /* (X & Y) | X is (Y, X). */
9642 if (TREE_CODE (arg0) == BIT_AND_EXPR
9643 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9644 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9645 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9646 /* X | (X & Y) is (Y, X). */
9647 if (TREE_CODE (arg1) == BIT_AND_EXPR
9648 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9649 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9650 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9651 /* X | (Y & X) is (Y, X). */
9652 if (TREE_CODE (arg1) == BIT_AND_EXPR
9653 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9654 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9655 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9657 t1 = distribute_bit_expr (code, type, arg0, arg1);
9658 if (t1 != NULL_TREE)
9659 return t1;
9661 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9663 This results in more efficient code for machines without a NAND
9664 instruction. Combine will canonicalize to the first form
9665 which will allow use of NAND instructions provided by the
9666 backend if they exist. */
9667 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9668 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9670 return fold_build1 (BIT_NOT_EXPR, type,
9671 build2 (BIT_AND_EXPR, type,
9672 TREE_OPERAND (arg0, 0),
9673 TREE_OPERAND (arg1, 0)));
9676 /* See if this can be simplified into a rotate first. If that
9677 is unsuccessful continue in the association code. */
9678 goto bit_rotate;
9680 case BIT_XOR_EXPR:
9681 if (integer_zerop (arg1))
9682 return non_lvalue (fold_convert (type, arg0));
9683 if (integer_all_onesp (arg1))
9684 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9685 if (operand_equal_p (arg0, arg1, 0))
9686 return omit_one_operand (type, integer_zero_node, arg0);
9688 /* ~X ^ X is -1. */
9689 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9690 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9692 t1 = build_int_cst (type, -1);
9693 t1 = force_fit_type (t1, 0, false, false);
9694 return omit_one_operand (type, t1, arg1);
9697 /* X ^ ~X is -1. */
9698 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9699 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9701 t1 = build_int_cst (type, -1);
9702 t1 = force_fit_type (t1, 0, false, false);
9703 return omit_one_operand (type, t1, arg0);
9706 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9707 with a constant, and the two constants have no bits in common,
9708 we should treat this as a BIT_IOR_EXPR since this may produce more
9709 simplifications. */
9710 if (TREE_CODE (arg0) == BIT_AND_EXPR
9711 && TREE_CODE (arg1) == BIT_AND_EXPR
9712 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9713 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9714 && integer_zerop (const_binop (BIT_AND_EXPR,
9715 TREE_OPERAND (arg0, 1),
9716 TREE_OPERAND (arg1, 1), 0)))
9718 code = BIT_IOR_EXPR;
9719 goto bit_ior;
9722 /* (X | Y) ^ X -> Y & ~ X*/
9723 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9724 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9726 tree t2 = TREE_OPERAND (arg0, 1);
9727 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9728 arg1);
9729 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9730 fold_convert (type, t1));
9731 return t1;
9734 /* (Y | X) ^ X -> Y & ~ X*/
9735 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9736 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9738 tree t2 = TREE_OPERAND (arg0, 0);
9739 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9740 arg1);
9741 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9742 fold_convert (type, t1));
9743 return t1;
9746 /* X ^ (X | Y) -> Y & ~ X*/
9747 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9748 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9750 tree t2 = TREE_OPERAND (arg1, 1);
9751 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9752 arg0);
9753 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9754 fold_convert (type, t1));
9755 return t1;
9758 /* X ^ (Y | X) -> Y & ~ X*/
9759 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9760 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9762 tree t2 = TREE_OPERAND (arg1, 0);
9763 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9764 arg0);
9765 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9766 fold_convert (type, t1));
9767 return t1;
9770 /* Convert ~X ^ ~Y to X ^ Y. */
9771 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9772 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9773 return fold_build2 (code, type,
9774 fold_convert (type, TREE_OPERAND (arg0, 0)),
9775 fold_convert (type, TREE_OPERAND (arg1, 0)));
9777 /* Convert ~X ^ C to X ^ ~C. */
9778 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9779 && TREE_CODE (arg1) == INTEGER_CST)
9780 return fold_build2 (code, type,
9781 fold_convert (type, TREE_OPERAND (arg0, 0)),
9782 fold_build1 (BIT_NOT_EXPR, type, arg1));
9784 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9785 if (TREE_CODE (arg0) == BIT_AND_EXPR
9786 && integer_onep (TREE_OPERAND (arg0, 1))
9787 && integer_onep (arg1))
9788 return fold_build2 (EQ_EXPR, type, arg0,
9789 build_int_cst (TREE_TYPE (arg0), 0));
9791 /* Fold (X & Y) ^ Y as ~X & Y. */
9792 if (TREE_CODE (arg0) == BIT_AND_EXPR
9793 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9795 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9796 return fold_build2 (BIT_AND_EXPR, type,
9797 fold_build1 (BIT_NOT_EXPR, type, tem),
9798 fold_convert (type, arg1));
9800 /* Fold (X & Y) ^ X as ~Y & X. */
9801 if (TREE_CODE (arg0) == BIT_AND_EXPR
9802 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9803 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9805 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9806 return fold_build2 (BIT_AND_EXPR, type,
9807 fold_build1 (BIT_NOT_EXPR, type, tem),
9808 fold_convert (type, arg1));
9810 /* Fold X ^ (X & Y) as X & ~Y. */
9811 if (TREE_CODE (arg1) == BIT_AND_EXPR
9812 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9814 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9815 return fold_build2 (BIT_AND_EXPR, type,
9816 fold_convert (type, arg0),
9817 fold_build1 (BIT_NOT_EXPR, type, tem));
9819 /* Fold X ^ (Y & X) as ~Y & X. */
9820 if (TREE_CODE (arg1) == BIT_AND_EXPR
9821 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9822 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9824 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9825 return fold_build2 (BIT_AND_EXPR, type,
9826 fold_build1 (BIT_NOT_EXPR, type, tem),
9827 fold_convert (type, arg0));
9830 /* See if this can be simplified into a rotate first. If that
9831 is unsuccessful continue in the association code. */
9832 goto bit_rotate;
9834 case BIT_AND_EXPR:
9835 if (integer_all_onesp (arg1))
9836 return non_lvalue (fold_convert (type, arg0));
9837 if (integer_zerop (arg1))
9838 return omit_one_operand (type, arg1, arg0);
9839 if (operand_equal_p (arg0, arg1, 0))
9840 return non_lvalue (fold_convert (type, arg0));
9842 /* ~X & X is always zero. */
9843 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9844 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9845 return omit_one_operand (type, integer_zero_node, arg1);
9847 /* X & ~X is always zero. */
9848 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9849 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9850 return omit_one_operand (type, integer_zero_node, arg0);
9852 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9853 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9854 && TREE_CODE (arg1) == INTEGER_CST
9855 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9856 return fold_build2 (BIT_IOR_EXPR, type,
9857 fold_build2 (BIT_AND_EXPR, type,
9858 TREE_OPERAND (arg0, 0), arg1),
9859 fold_build2 (BIT_AND_EXPR, type,
9860 TREE_OPERAND (arg0, 1), arg1));
9862 /* (X | Y) & Y is (X, Y). */
9863 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9864 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9865 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9866 /* (X | Y) & X is (Y, X). */
9867 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9868 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9869 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9870 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9871 /* X & (X | Y) is (Y, X). */
9872 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9873 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9874 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9875 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9876 /* X & (Y | X) is (Y, X). */
9877 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9878 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9879 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9880 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9882 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9883 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9884 && integer_onep (TREE_OPERAND (arg0, 1))
9885 && integer_onep (arg1))
9887 tem = TREE_OPERAND (arg0, 0);
9888 return fold_build2 (EQ_EXPR, type,
9889 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9890 build_int_cst (TREE_TYPE (tem), 1)),
9891 build_int_cst (TREE_TYPE (tem), 0));
9893 /* Fold ~X & 1 as (X & 1) == 0. */
9894 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9895 && integer_onep (arg1))
9897 tem = TREE_OPERAND (arg0, 0);
9898 return fold_build2 (EQ_EXPR, type,
9899 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9900 build_int_cst (TREE_TYPE (tem), 1)),
9901 build_int_cst (TREE_TYPE (tem), 0));
9904 /* Fold (X ^ Y) & Y as ~X & Y. */
9905 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9906 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9908 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9909 return fold_build2 (BIT_AND_EXPR, type,
9910 fold_build1 (BIT_NOT_EXPR, type, tem),
9911 fold_convert (type, arg1));
9913 /* Fold (X ^ Y) & X as ~Y & X. */
9914 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9915 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9916 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9918 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9919 return fold_build2 (BIT_AND_EXPR, type,
9920 fold_build1 (BIT_NOT_EXPR, type, tem),
9921 fold_convert (type, arg1));
9923 /* Fold X & (X ^ Y) as X & ~Y. */
9924 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9925 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9927 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9928 return fold_build2 (BIT_AND_EXPR, type,
9929 fold_convert (type, arg0),
9930 fold_build1 (BIT_NOT_EXPR, type, tem));
9932 /* Fold X & (Y ^ X) as ~Y & X. */
9933 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9934 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9935 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9937 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9938 return fold_build2 (BIT_AND_EXPR, type,
9939 fold_build1 (BIT_NOT_EXPR, type, tem),
9940 fold_convert (type, arg0));
9943 t1 = distribute_bit_expr (code, type, arg0, arg1);
9944 if (t1 != NULL_TREE)
9945 return t1;
9946 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9947 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9948 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9950 unsigned int prec
9951 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9953 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9954 && (~TREE_INT_CST_LOW (arg1)
9955 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9956 return fold_convert (type, TREE_OPERAND (arg0, 0));
9959 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9961 This results in more efficient code for machines without a NOR
9962 instruction. Combine will canonicalize to the first form
9963 which will allow use of NOR instructions provided by the
9964 backend if they exist. */
9965 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9966 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9968 return fold_build1 (BIT_NOT_EXPR, type,
9969 build2 (BIT_IOR_EXPR, type,
9970 TREE_OPERAND (arg0, 0),
9971 TREE_OPERAND (arg1, 0)));
9974 goto associate;
9976 case RDIV_EXPR:
9977 /* Don't touch a floating-point divide by zero unless the mode
9978 of the constant can represent infinity. */
9979 if (TREE_CODE (arg1) == REAL_CST
9980 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9981 && real_zerop (arg1))
9982 return NULL_TREE;
9984 /* Optimize A / A to 1.0 if we don't care about
9985 NaNs or Infinities. Skip the transformation
9986 for non-real operands. */
9987 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9988 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9989 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9990 && operand_equal_p (arg0, arg1, 0))
9992 tree r = build_real (TREE_TYPE (arg0), dconst1);
9994 return omit_two_operands (type, r, arg0, arg1);
9997 /* The complex version of the above A / A optimization. */
9998 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9999 && operand_equal_p (arg0, arg1, 0))
10001 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10002 if (! HONOR_NANS (TYPE_MODE (elem_type))
10003 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10005 tree r = build_real (elem_type, dconst1);
10006 /* omit_two_operands will call fold_convert for us. */
10007 return omit_two_operands (type, r, arg0, arg1);
10011 /* (-A) / (-B) -> A / B */
10012 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10013 return fold_build2 (RDIV_EXPR, type,
10014 TREE_OPERAND (arg0, 0),
10015 negate_expr (arg1));
10016 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10017 return fold_build2 (RDIV_EXPR, type,
10018 negate_expr (arg0),
10019 TREE_OPERAND (arg1, 0));
10021 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10022 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10023 && real_onep (arg1))
10024 return non_lvalue (fold_convert (type, arg0));
10026 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10027 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10028 && real_minus_onep (arg1))
10029 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10031 /* If ARG1 is a constant, we can convert this to a multiply by the
10032 reciprocal. This does not have the same rounding properties,
10033 so only do this if -funsafe-math-optimizations. We can actually
10034 always safely do it if ARG1 is a power of two, but it's hard to
10035 tell if it is or not in a portable manner. */
10036 if (TREE_CODE (arg1) == REAL_CST)
10038 if (flag_unsafe_math_optimizations
10039 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10040 arg1, 0)))
10041 return fold_build2 (MULT_EXPR, type, arg0, tem);
10042 /* Find the reciprocal if optimizing and the result is exact. */
10043 if (optimize)
10045 REAL_VALUE_TYPE r;
10046 r = TREE_REAL_CST (arg1);
10047 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10049 tem = build_real (type, r);
10050 return fold_build2 (MULT_EXPR, type,
10051 fold_convert (type, arg0), tem);
10055 /* Convert A/B/C to A/(B*C). */
10056 if (flag_unsafe_math_optimizations
10057 && TREE_CODE (arg0) == RDIV_EXPR)
10058 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10059 fold_build2 (MULT_EXPR, type,
10060 TREE_OPERAND (arg0, 1), arg1));
10062 /* Convert A/(B/C) to (A/B)*C. */
10063 if (flag_unsafe_math_optimizations
10064 && TREE_CODE (arg1) == RDIV_EXPR)
10065 return fold_build2 (MULT_EXPR, type,
10066 fold_build2 (RDIV_EXPR, type, arg0,
10067 TREE_OPERAND (arg1, 0)),
10068 TREE_OPERAND (arg1, 1));
10070 /* Convert C1/(X*C2) into (C1/C2)/X. */
10071 if (flag_unsafe_math_optimizations
10072 && TREE_CODE (arg1) == MULT_EXPR
10073 && TREE_CODE (arg0) == REAL_CST
10074 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10076 tree tem = const_binop (RDIV_EXPR, arg0,
10077 TREE_OPERAND (arg1, 1), 0);
10078 if (tem)
10079 return fold_build2 (RDIV_EXPR, type, tem,
10080 TREE_OPERAND (arg1, 0));
10083 if (flag_unsafe_math_optimizations)
10085 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10086 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10088 /* Optimize sin(x)/cos(x) as tan(x). */
10089 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10090 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10091 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10092 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10093 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10095 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10097 if (tanfn != NULL_TREE)
10098 return build_function_call_expr (tanfn,
10099 TREE_OPERAND (arg0, 1));
10102 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10103 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10104 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10105 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10106 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10107 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10109 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10111 if (tanfn != NULL_TREE)
10113 tree tmp = TREE_OPERAND (arg0, 1);
10114 tmp = build_function_call_expr (tanfn, tmp);
10115 return fold_build2 (RDIV_EXPR, type,
10116 build_real (type, dconst1), tmp);
10120 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10121 NaNs or Infinities. */
10122 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10123 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10124 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10126 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10127 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10129 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10130 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10131 && operand_equal_p (arg00, arg01, 0))
10133 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10135 if (cosfn != NULL_TREE)
10136 return build_function_call_expr (cosfn,
10137 TREE_OPERAND (arg0, 1));
10141 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10142 NaNs or Infinities. */
10143 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10144 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10145 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10147 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10148 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10150 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10151 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10152 && operand_equal_p (arg00, arg01, 0))
10154 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10156 if (cosfn != NULL_TREE)
10158 tree tmp = TREE_OPERAND (arg0, 1);
10159 tmp = build_function_call_expr (cosfn, tmp);
10160 return fold_build2 (RDIV_EXPR, type,
10161 build_real (type, dconst1),
10162 tmp);
10167 /* Optimize pow(x,c)/x as pow(x,c-1). */
10168 if (fcode0 == BUILT_IN_POW
10169 || fcode0 == BUILT_IN_POWF
10170 || fcode0 == BUILT_IN_POWL)
10172 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10173 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10174 if (TREE_CODE (arg01) == REAL_CST
10175 && ! TREE_CONSTANT_OVERFLOW (arg01)
10176 && operand_equal_p (arg1, arg00, 0))
10178 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10179 REAL_VALUE_TYPE c;
10180 tree arg, arglist;
10182 c = TREE_REAL_CST (arg01);
10183 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10184 arg = build_real (type, c);
10185 arglist = build_tree_list (NULL_TREE, arg);
10186 arglist = tree_cons (NULL_TREE, arg1, arglist);
10187 return build_function_call_expr (powfn, arglist);
10191 /* Optimize x/expN(y) into x*expN(-y). */
10192 if (BUILTIN_EXPONENT_P (fcode1))
10194 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10195 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10196 tree arglist = build_tree_list (NULL_TREE,
10197 fold_convert (type, arg));
10198 arg1 = build_function_call_expr (expfn, arglist);
10199 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10202 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10203 if (fcode1 == BUILT_IN_POW
10204 || fcode1 == BUILT_IN_POWF
10205 || fcode1 == BUILT_IN_POWL)
10207 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10208 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10209 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10210 tree neg11 = fold_convert (type, negate_expr (arg11));
10211 tree arglist = tree_cons(NULL_TREE, arg10,
10212 build_tree_list (NULL_TREE, neg11));
10213 arg1 = build_function_call_expr (powfn, arglist);
10214 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10217 return NULL_TREE;
10219 case TRUNC_DIV_EXPR:
10220 case FLOOR_DIV_EXPR:
10221 /* Simplify A / (B << N) where A and B are positive and B is
10222 a power of 2, to A >> (N + log2(B)). */
10223 if (TREE_CODE (arg1) == LSHIFT_EXPR
10224 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10226 tree sval = TREE_OPERAND (arg1, 0);
10227 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10229 tree sh_cnt = TREE_OPERAND (arg1, 1);
10230 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10232 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10233 sh_cnt, build_int_cst (NULL_TREE, pow2));
10234 return fold_build2 (RSHIFT_EXPR, type,
10235 fold_convert (type, arg0), sh_cnt);
10238 /* Fall thru */
10240 case ROUND_DIV_EXPR:
10241 case CEIL_DIV_EXPR:
10242 case EXACT_DIV_EXPR:
10243 if (integer_onep (arg1))
10244 return non_lvalue (fold_convert (type, arg0));
10245 if (integer_zerop (arg1))
10246 return NULL_TREE;
10247 /* X / -1 is -X. */
10248 if (!TYPE_UNSIGNED (type)
10249 && TREE_CODE (arg1) == INTEGER_CST
10250 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10251 && TREE_INT_CST_HIGH (arg1) == -1)
10252 return fold_convert (type, negate_expr (arg0));
10254 /* Convert -A / -B to A / B when the type is signed and overflow is
10255 undefined. */
10256 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10257 && TREE_CODE (arg0) == NEGATE_EXPR
10258 && negate_expr_p (arg1))
10259 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10260 negate_expr (arg1));
10261 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10262 && TREE_CODE (arg1) == NEGATE_EXPR
10263 && negate_expr_p (arg0))
10264 return fold_build2 (code, type, negate_expr (arg0),
10265 TREE_OPERAND (arg1, 0));
10267 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10268 operation, EXACT_DIV_EXPR.
10270 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10271 At one time others generated faster code, it's not clear if they do
10272 after the last round to changes to the DIV code in expmed.c. */
10273 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10274 && multiple_of_p (type, arg0, arg1))
10275 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10277 if (TREE_CODE (arg1) == INTEGER_CST
10278 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10279 return fold_convert (type, tem);
10281 return NULL_TREE;
10283 case CEIL_MOD_EXPR:
10284 case FLOOR_MOD_EXPR:
10285 case ROUND_MOD_EXPR:
10286 case TRUNC_MOD_EXPR:
10287 /* X % 1 is always zero, but be sure to preserve any side
10288 effects in X. */
10289 if (integer_onep (arg1))
10290 return omit_one_operand (type, integer_zero_node, arg0);
10292 /* X % 0, return X % 0 unchanged so that we can get the
10293 proper warnings and errors. */
10294 if (integer_zerop (arg1))
10295 return NULL_TREE;
10297 /* 0 % X is always zero, but be sure to preserve any side
10298 effects in X. Place this after checking for X == 0. */
10299 if (integer_zerop (arg0))
10300 return omit_one_operand (type, integer_zero_node, arg1);
10302 /* X % -1 is zero. */
10303 if (!TYPE_UNSIGNED (type)
10304 && TREE_CODE (arg1) == INTEGER_CST
10305 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10306 && TREE_INT_CST_HIGH (arg1) == -1)
10307 return omit_one_operand (type, integer_zero_node, arg0);
10309 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10310 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10311 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10312 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10314 tree c = arg1;
10315 /* Also optimize A % (C << N) where C is a power of 2,
10316 to A & ((C << N) - 1). */
10317 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10318 c = TREE_OPERAND (arg1, 0);
10320 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10322 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10323 build_int_cst (TREE_TYPE (arg1), 1));
10324 return fold_build2 (BIT_AND_EXPR, type,
10325 fold_convert (type, arg0),
10326 fold_convert (type, mask));
10330 /* X % -C is the same as X % C. */
10331 if (code == TRUNC_MOD_EXPR
10332 && !TYPE_UNSIGNED (type)
10333 && TREE_CODE (arg1) == INTEGER_CST
10334 && !TREE_CONSTANT_OVERFLOW (arg1)
10335 && TREE_INT_CST_HIGH (arg1) < 0
10336 && !flag_trapv
10337 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10338 && !sign_bit_p (arg1, arg1))
10339 return fold_build2 (code, type, fold_convert (type, arg0),
10340 fold_convert (type, negate_expr (arg1)));
10342 /* X % -Y is the same as X % Y. */
10343 if (code == TRUNC_MOD_EXPR
10344 && !TYPE_UNSIGNED (type)
10345 && TREE_CODE (arg1) == NEGATE_EXPR
10346 && !flag_trapv)
10347 return fold_build2 (code, type, fold_convert (type, arg0),
10348 fold_convert (type, TREE_OPERAND (arg1, 0)));
10350 if (TREE_CODE (arg1) == INTEGER_CST
10351 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10352 return fold_convert (type, tem);
10354 return NULL_TREE;
10356 case LROTATE_EXPR:
10357 case RROTATE_EXPR:
10358 if (integer_all_onesp (arg0))
10359 return omit_one_operand (type, arg0, arg1);
10360 goto shift;
10362 case RSHIFT_EXPR:
10363 /* Optimize -1 >> x for arithmetic right shifts. */
10364 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10365 return omit_one_operand (type, arg0, arg1);
10366 /* ... fall through ... */
10368 case LSHIFT_EXPR:
10369 shift:
10370 if (integer_zerop (arg1))
10371 return non_lvalue (fold_convert (type, arg0));
10372 if (integer_zerop (arg0))
10373 return omit_one_operand (type, arg0, arg1);
10375 /* Since negative shift count is not well-defined,
10376 don't try to compute it in the compiler. */
10377 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10378 return NULL_TREE;
10380 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10381 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10382 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10383 && host_integerp (TREE_OPERAND (arg0, 1), false)
10384 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10386 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10387 + TREE_INT_CST_LOW (arg1));
10389 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10390 being well defined. */
10391 if (low >= TYPE_PRECISION (type))
10393 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10394 low = low % TYPE_PRECISION (type);
10395 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10396 return build_int_cst (type, 0);
10397 else
10398 low = TYPE_PRECISION (type) - 1;
10401 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10402 build_int_cst (type, low));
10405 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10406 into x & ((unsigned)-1 >> c) for unsigned types. */
10407 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10408 || (TYPE_UNSIGNED (type)
10409 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10410 && host_integerp (arg1, false)
10411 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10412 && host_integerp (TREE_OPERAND (arg0, 1), false)
10413 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10415 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10416 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10417 tree lshift;
10418 tree arg00;
10420 if (low0 == low1)
10422 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10424 lshift = build_int_cst (type, -1);
10425 lshift = int_const_binop (code, lshift, arg1, 0);
10427 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10431 /* Rewrite an LROTATE_EXPR by a constant into an
10432 RROTATE_EXPR by a new constant. */
10433 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10435 tree tem = build_int_cst (TREE_TYPE (arg1),
10436 GET_MODE_BITSIZE (TYPE_MODE (type)));
10437 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10438 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10441 /* If we have a rotate of a bit operation with the rotate count and
10442 the second operand of the bit operation both constant,
10443 permute the two operations. */
10444 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10445 && (TREE_CODE (arg0) == BIT_AND_EXPR
10446 || TREE_CODE (arg0) == BIT_IOR_EXPR
10447 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10449 return fold_build2 (TREE_CODE (arg0), type,
10450 fold_build2 (code, type,
10451 TREE_OPERAND (arg0, 0), arg1),
10452 fold_build2 (code, type,
10453 TREE_OPERAND (arg0, 1), arg1));
10455 /* Two consecutive rotates adding up to the width of the mode can
10456 be ignored. */
10457 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10458 && TREE_CODE (arg0) == RROTATE_EXPR
10459 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10460 && TREE_INT_CST_HIGH (arg1) == 0
10461 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10462 && ((TREE_INT_CST_LOW (arg1)
10463 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10464 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10465 return TREE_OPERAND (arg0, 0);
10467 return NULL_TREE;
10469 case MIN_EXPR:
10470 if (operand_equal_p (arg0, arg1, 0))
10471 return omit_one_operand (type, arg0, arg1);
10472 if (INTEGRAL_TYPE_P (type)
10473 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10474 return omit_one_operand (type, arg1, arg0);
10475 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10476 if (tem)
10477 return tem;
10478 goto associate;
10480 case MAX_EXPR:
10481 if (operand_equal_p (arg0, arg1, 0))
10482 return omit_one_operand (type, arg0, arg1);
10483 if (INTEGRAL_TYPE_P (type)
10484 && TYPE_MAX_VALUE (type)
10485 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10486 return omit_one_operand (type, arg1, arg0);
10487 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10488 if (tem)
10489 return tem;
10490 goto associate;
10492 case TRUTH_ANDIF_EXPR:
10493 /* Note that the operands of this must be ints
10494 and their values must be 0 or 1.
10495 ("true" is a fixed value perhaps depending on the language.) */
10496 /* If first arg is constant zero, return it. */
10497 if (integer_zerop (arg0))
10498 return fold_convert (type, arg0);
10499 case TRUTH_AND_EXPR:
10500 /* If either arg is constant true, drop it. */
10501 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10502 return non_lvalue (fold_convert (type, arg1));
10503 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10504 /* Preserve sequence points. */
10505 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10506 return non_lvalue (fold_convert (type, arg0));
10507 /* If second arg is constant zero, result is zero, but first arg
10508 must be evaluated. */
10509 if (integer_zerop (arg1))
10510 return omit_one_operand (type, arg1, arg0);
10511 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10512 case will be handled here. */
10513 if (integer_zerop (arg0))
10514 return omit_one_operand (type, arg0, arg1);
10516 /* !X && X is always false. */
10517 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10518 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10519 return omit_one_operand (type, integer_zero_node, arg1);
10520 /* X && !X is always false. */
10521 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10522 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10523 return omit_one_operand (type, integer_zero_node, arg0);
10525 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10526 means A >= Y && A != MAX, but in this case we know that
10527 A < X <= MAX. */
10529 if (!TREE_SIDE_EFFECTS (arg0)
10530 && !TREE_SIDE_EFFECTS (arg1))
10532 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10533 if (tem && !operand_equal_p (tem, arg0, 0))
10534 return fold_build2 (code, type, tem, arg1);
10536 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10537 if (tem && !operand_equal_p (tem, arg1, 0))
10538 return fold_build2 (code, type, arg0, tem);
10541 truth_andor:
10542 /* We only do these simplifications if we are optimizing. */
10543 if (!optimize)
10544 return NULL_TREE;
10546 /* Check for things like (A || B) && (A || C). We can convert this
10547 to A || (B && C). Note that either operator can be any of the four
10548 truth and/or operations and the transformation will still be
10549 valid. Also note that we only care about order for the
10550 ANDIF and ORIF operators. If B contains side effects, this
10551 might change the truth-value of A. */
10552 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10553 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10554 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10555 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10556 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10557 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10559 tree a00 = TREE_OPERAND (arg0, 0);
10560 tree a01 = TREE_OPERAND (arg0, 1);
10561 tree a10 = TREE_OPERAND (arg1, 0);
10562 tree a11 = TREE_OPERAND (arg1, 1);
10563 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10564 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10565 && (code == TRUTH_AND_EXPR
10566 || code == TRUTH_OR_EXPR));
10568 if (operand_equal_p (a00, a10, 0))
10569 return fold_build2 (TREE_CODE (arg0), type, a00,
10570 fold_build2 (code, type, a01, a11));
10571 else if (commutative && operand_equal_p (a00, a11, 0))
10572 return fold_build2 (TREE_CODE (arg0), type, a00,
10573 fold_build2 (code, type, a01, a10));
10574 else if (commutative && operand_equal_p (a01, a10, 0))
10575 return fold_build2 (TREE_CODE (arg0), type, a01,
10576 fold_build2 (code, type, a00, a11));
10578 /* This case if tricky because we must either have commutative
10579 operators or else A10 must not have side-effects. */
10581 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10582 && operand_equal_p (a01, a11, 0))
10583 return fold_build2 (TREE_CODE (arg0), type,
10584 fold_build2 (code, type, a00, a10),
10585 a01);
10588 /* See if we can build a range comparison. */
10589 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10590 return tem;
10592 /* Check for the possibility of merging component references. If our
10593 lhs is another similar operation, try to merge its rhs with our
10594 rhs. Then try to merge our lhs and rhs. */
10595 if (TREE_CODE (arg0) == code
10596 && 0 != (tem = fold_truthop (code, type,
10597 TREE_OPERAND (arg0, 1), arg1)))
10598 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10600 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10601 return tem;
10603 return NULL_TREE;
10605 case TRUTH_ORIF_EXPR:
10606 /* Note that the operands of this must be ints
10607 and their values must be 0 or true.
10608 ("true" is a fixed value perhaps depending on the language.) */
10609 /* If first arg is constant true, return it. */
10610 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10611 return fold_convert (type, arg0);
10612 case TRUTH_OR_EXPR:
10613 /* If either arg is constant zero, drop it. */
10614 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10615 return non_lvalue (fold_convert (type, arg1));
10616 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10617 /* Preserve sequence points. */
10618 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10619 return non_lvalue (fold_convert (type, arg0));
10620 /* If second arg is constant true, result is true, but we must
10621 evaluate first arg. */
10622 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10623 return omit_one_operand (type, arg1, arg0);
10624 /* Likewise for first arg, but note this only occurs here for
10625 TRUTH_OR_EXPR. */
10626 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10627 return omit_one_operand (type, arg0, arg1);
10629 /* !X || X is always true. */
10630 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10631 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10632 return omit_one_operand (type, integer_one_node, arg1);
10633 /* X || !X is always true. */
10634 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10635 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10636 return omit_one_operand (type, integer_one_node, arg0);
10638 goto truth_andor;
10640 case TRUTH_XOR_EXPR:
10641 /* If the second arg is constant zero, drop it. */
10642 if (integer_zerop (arg1))
10643 return non_lvalue (fold_convert (type, arg0));
10644 /* If the second arg is constant true, this is a logical inversion. */
10645 if (integer_onep (arg1))
10647 /* Only call invert_truthvalue if operand is a truth value. */
10648 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10649 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10650 else
10651 tem = invert_truthvalue (arg0);
10652 return non_lvalue (fold_convert (type, tem));
10654 /* Identical arguments cancel to zero. */
10655 if (operand_equal_p (arg0, arg1, 0))
10656 return omit_one_operand (type, integer_zero_node, arg0);
10658 /* !X ^ X is always true. */
10659 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10660 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10661 return omit_one_operand (type, integer_one_node, arg1);
10663 /* X ^ !X is always true. */
10664 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10665 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10666 return omit_one_operand (type, integer_one_node, arg0);
10668 return NULL_TREE;
10670 case EQ_EXPR:
10671 case NE_EXPR:
10672 tem = fold_comparison (code, type, op0, op1);
10673 if (tem != NULL_TREE)
10674 return tem;
10676 /* bool_var != 0 becomes bool_var. */
10677 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10678 && code == NE_EXPR)
10679 return non_lvalue (fold_convert (type, arg0));
10681 /* bool_var == 1 becomes bool_var. */
10682 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10683 && code == EQ_EXPR)
10684 return non_lvalue (fold_convert (type, arg0));
10686 /* bool_var != 1 becomes !bool_var. */
10687 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10688 && code == NE_EXPR)
10689 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10691 /* bool_var == 0 becomes !bool_var. */
10692 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10693 && code == EQ_EXPR)
10694 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10696 /* If this is an equality comparison of the address of a non-weak
10697 object against zero, then we know the result. */
10698 if (TREE_CODE (arg0) == ADDR_EXPR
10699 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10700 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10701 && integer_zerop (arg1))
10702 return constant_boolean_node (code != EQ_EXPR, type);
10704 /* If this is an equality comparison of the address of two non-weak,
10705 unaliased symbols neither of which are extern (since we do not
10706 have access to attributes for externs), then we know the result. */
10707 if (TREE_CODE (arg0) == ADDR_EXPR
10708 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10709 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10710 && ! lookup_attribute ("alias",
10711 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10712 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10713 && TREE_CODE (arg1) == ADDR_EXPR
10714 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10715 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10716 && ! lookup_attribute ("alias",
10717 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10718 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10720 /* We know that we're looking at the address of two
10721 non-weak, unaliased, static _DECL nodes.
10723 It is both wasteful and incorrect to call operand_equal_p
10724 to compare the two ADDR_EXPR nodes. It is wasteful in that
10725 all we need to do is test pointer equality for the arguments
10726 to the two ADDR_EXPR nodes. It is incorrect to use
10727 operand_equal_p as that function is NOT equivalent to a
10728 C equality test. It can in fact return false for two
10729 objects which would test as equal using the C equality
10730 operator. */
10731 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10732 return constant_boolean_node (equal
10733 ? code == EQ_EXPR : code != EQ_EXPR,
10734 type);
10737 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10738 a MINUS_EXPR of a constant, we can convert it into a comparison with
10739 a revised constant as long as no overflow occurs. */
10740 if (TREE_CODE (arg1) == INTEGER_CST
10741 && (TREE_CODE (arg0) == PLUS_EXPR
10742 || TREE_CODE (arg0) == MINUS_EXPR)
10743 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10744 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10745 ? MINUS_EXPR : PLUS_EXPR,
10746 fold_convert (TREE_TYPE (arg0), arg1),
10747 TREE_OPERAND (arg0, 1), 0))
10748 && ! TREE_CONSTANT_OVERFLOW (tem))
10749 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10751 /* Similarly for a NEGATE_EXPR. */
10752 if (TREE_CODE (arg0) == NEGATE_EXPR
10753 && TREE_CODE (arg1) == INTEGER_CST
10754 && 0 != (tem = negate_expr (arg1))
10755 && TREE_CODE (tem) == INTEGER_CST
10756 && ! TREE_CONSTANT_OVERFLOW (tem))
10757 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10759 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10760 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10761 && TREE_CODE (arg1) == INTEGER_CST
10762 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10763 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10764 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10765 fold_convert (TREE_TYPE (arg0), arg1),
10766 TREE_OPERAND (arg0, 1)));
10768 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10769 for !=. Don't do this for ordered comparisons due to overflow. */
10770 if (TREE_CODE (arg0) == MINUS_EXPR
10771 && integer_zerop (arg1))
10772 return fold_build2 (code, type,
10773 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10775 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10776 if (TREE_CODE (arg0) == ABS_EXPR
10777 && (integer_zerop (arg1) || real_zerop (arg1)))
10778 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10780 /* If this is an EQ or NE comparison with zero and ARG0 is
10781 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10782 two operations, but the latter can be done in one less insn
10783 on machines that have only two-operand insns or on which a
10784 constant cannot be the first operand. */
10785 if (TREE_CODE (arg0) == BIT_AND_EXPR
10786 && integer_zerop (arg1))
10788 tree arg00 = TREE_OPERAND (arg0, 0);
10789 tree arg01 = TREE_OPERAND (arg0, 1);
10790 if (TREE_CODE (arg00) == LSHIFT_EXPR
10791 && integer_onep (TREE_OPERAND (arg00, 0)))
10792 return
10793 fold_build2 (code, type,
10794 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10795 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10796 arg01, TREE_OPERAND (arg00, 1)),
10797 fold_convert (TREE_TYPE (arg0),
10798 integer_one_node)),
10799 arg1);
10800 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10801 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10802 return
10803 fold_build2 (code, type,
10804 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10805 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10806 arg00, TREE_OPERAND (arg01, 1)),
10807 fold_convert (TREE_TYPE (arg0),
10808 integer_one_node)),
10809 arg1);
10812 /* If this is an NE or EQ comparison of zero against the result of a
10813 signed MOD operation whose second operand is a power of 2, make
10814 the MOD operation unsigned since it is simpler and equivalent. */
10815 if (integer_zerop (arg1)
10816 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10817 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10818 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10819 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10820 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10821 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10823 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10824 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10825 fold_convert (newtype,
10826 TREE_OPERAND (arg0, 0)),
10827 fold_convert (newtype,
10828 TREE_OPERAND (arg0, 1)));
10830 return fold_build2 (code, type, newmod,
10831 fold_convert (newtype, arg1));
10834 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10835 C1 is a valid shift constant, and C2 is a power of two, i.e.
10836 a single bit. */
10837 if (TREE_CODE (arg0) == BIT_AND_EXPR
10838 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10839 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10840 == INTEGER_CST
10841 && integer_pow2p (TREE_OPERAND (arg0, 1))
10842 && integer_zerop (arg1))
10844 tree itype = TREE_TYPE (arg0);
10845 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10846 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10848 /* Check for a valid shift count. */
10849 if (TREE_INT_CST_HIGH (arg001) == 0
10850 && TREE_INT_CST_LOW (arg001) < prec)
10852 tree arg01 = TREE_OPERAND (arg0, 1);
10853 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10854 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10855 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10856 can be rewritten as (X & (C2 << C1)) != 0. */
10857 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10859 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10860 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10861 return fold_build2 (code, type, tem, arg1);
10863 /* Otherwise, for signed (arithmetic) shifts,
10864 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10865 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10866 else if (!TYPE_UNSIGNED (itype))
10867 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10868 arg000, build_int_cst (itype, 0));
10869 /* Otherwise, of unsigned (logical) shifts,
10870 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10871 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10872 else
10873 return omit_one_operand (type,
10874 code == EQ_EXPR ? integer_one_node
10875 : integer_zero_node,
10876 arg000);
10880 /* If this is an NE comparison of zero with an AND of one, remove the
10881 comparison since the AND will give the correct value. */
10882 if (code == NE_EXPR
10883 && integer_zerop (arg1)
10884 && TREE_CODE (arg0) == BIT_AND_EXPR
10885 && integer_onep (TREE_OPERAND (arg0, 1)))
10886 return fold_convert (type, arg0);
10888 /* If we have (A & C) == C where C is a power of 2, convert this into
10889 (A & C) != 0. Similarly for NE_EXPR. */
10890 if (TREE_CODE (arg0) == BIT_AND_EXPR
10891 && integer_pow2p (TREE_OPERAND (arg0, 1))
10892 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10893 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10894 arg0, fold_convert (TREE_TYPE (arg0),
10895 integer_zero_node));
10897 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10898 bit, then fold the expression into A < 0 or A >= 0. */
10899 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10900 if (tem)
10901 return tem;
10903 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10904 Similarly for NE_EXPR. */
10905 if (TREE_CODE (arg0) == BIT_AND_EXPR
10906 && TREE_CODE (arg1) == INTEGER_CST
10907 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10909 tree notc = fold_build1 (BIT_NOT_EXPR,
10910 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10911 TREE_OPERAND (arg0, 1));
10912 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10913 arg1, notc);
10914 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10915 if (integer_nonzerop (dandnotc))
10916 return omit_one_operand (type, rslt, arg0);
10919 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10920 Similarly for NE_EXPR. */
10921 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10922 && TREE_CODE (arg1) == INTEGER_CST
10923 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10925 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10926 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10927 TREE_OPERAND (arg0, 1), notd);
10928 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10929 if (integer_nonzerop (candnotd))
10930 return omit_one_operand (type, rslt, arg0);
10933 /* If this is a comparison of a field, we may be able to simplify it. */
10934 if (((TREE_CODE (arg0) == COMPONENT_REF
10935 && lang_hooks.can_use_bit_fields_p ())
10936 || TREE_CODE (arg0) == BIT_FIELD_REF)
10937 /* Handle the constant case even without -O
10938 to make sure the warnings are given. */
10939 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10941 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10942 if (t1)
10943 return t1;
10946 /* Optimize comparisons of strlen vs zero to a compare of the
10947 first character of the string vs zero. To wit,
10948 strlen(ptr) == 0 => *ptr == 0
10949 strlen(ptr) != 0 => *ptr != 0
10950 Other cases should reduce to one of these two (or a constant)
10951 due to the return value of strlen being unsigned. */
10952 if (TREE_CODE (arg0) == CALL_EXPR
10953 && integer_zerop (arg1))
10955 tree fndecl = get_callee_fndecl (arg0);
10956 tree arglist;
10958 if (fndecl
10959 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10960 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10961 && (arglist = TREE_OPERAND (arg0, 1))
10962 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10963 && ! TREE_CHAIN (arglist))
10965 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10966 return fold_build2 (code, type, iref,
10967 build_int_cst (TREE_TYPE (iref), 0));
10971 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10972 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10973 if (TREE_CODE (arg0) == RSHIFT_EXPR
10974 && integer_zerop (arg1)
10975 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10977 tree arg00 = TREE_OPERAND (arg0, 0);
10978 tree arg01 = TREE_OPERAND (arg0, 1);
10979 tree itype = TREE_TYPE (arg00);
10980 if (TREE_INT_CST_HIGH (arg01) == 0
10981 && TREE_INT_CST_LOW (arg01)
10982 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10984 if (TYPE_UNSIGNED (itype))
10986 itype = lang_hooks.types.signed_type (itype);
10987 arg00 = fold_convert (itype, arg00);
10989 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10990 type, arg00, build_int_cst (itype, 0));
10994 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10995 if (integer_zerop (arg1)
10996 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10997 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10998 TREE_OPERAND (arg0, 1));
11000 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11001 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11002 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11003 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11004 build_int_cst (TREE_TYPE (arg1), 0));
11005 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11006 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11007 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11008 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11009 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11010 build_int_cst (TREE_TYPE (arg1), 0));
11012 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11013 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11014 && TREE_CODE (arg1) == INTEGER_CST
11015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11016 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11017 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11018 TREE_OPERAND (arg0, 1), arg1));
11020 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11021 (X & C) == 0 when C is a single bit. */
11022 if (TREE_CODE (arg0) == BIT_AND_EXPR
11023 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11024 && integer_zerop (arg1)
11025 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11027 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11028 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11029 TREE_OPERAND (arg0, 1));
11030 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11031 type, tem, arg1);
11034 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11035 constant C is a power of two, i.e. a single bit. */
11036 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11037 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11038 && integer_zerop (arg1)
11039 && integer_pow2p (TREE_OPERAND (arg0, 1))
11040 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11041 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11043 tree arg00 = TREE_OPERAND (arg0, 0);
11044 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11045 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11048 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11049 when is C is a power of two, i.e. a single bit. */
11050 if (TREE_CODE (arg0) == BIT_AND_EXPR
11051 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11052 && integer_zerop (arg1)
11053 && integer_pow2p (TREE_OPERAND (arg0, 1))
11054 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11055 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11057 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11058 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11059 arg000, TREE_OPERAND (arg0, 1));
11060 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11061 tem, build_int_cst (TREE_TYPE (tem), 0));
11064 if (integer_zerop (arg1)
11065 && tree_expr_nonzero_p (arg0))
11067 tree res = constant_boolean_node (code==NE_EXPR, type);
11068 return omit_one_operand (type, res, arg0);
11071 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11072 if (TREE_CODE (arg0) == NEGATE_EXPR
11073 && TREE_CODE (arg1) == NEGATE_EXPR)
11074 return fold_build2 (code, type,
11075 TREE_OPERAND (arg0, 0),
11076 TREE_OPERAND (arg1, 0));
11078 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11079 if (TREE_CODE (arg0) == BIT_AND_EXPR
11080 && TREE_CODE (arg1) == BIT_AND_EXPR)
11082 tree arg00 = TREE_OPERAND (arg0, 0);
11083 tree arg01 = TREE_OPERAND (arg0, 1);
11084 tree arg10 = TREE_OPERAND (arg1, 0);
11085 tree arg11 = TREE_OPERAND (arg1, 1);
11086 tree itype = TREE_TYPE (arg0);
11088 if (operand_equal_p (arg01, arg11, 0))
11089 return fold_build2 (code, type,
11090 fold_build2 (BIT_AND_EXPR, itype,
11091 fold_build2 (BIT_XOR_EXPR, itype,
11092 arg00, arg10),
11093 arg01),
11094 build_int_cst (itype, 0));
11096 if (operand_equal_p (arg01, arg10, 0))
11097 return fold_build2 (code, type,
11098 fold_build2 (BIT_AND_EXPR, itype,
11099 fold_build2 (BIT_XOR_EXPR, itype,
11100 arg00, arg11),
11101 arg01),
11102 build_int_cst (itype, 0));
11104 if (operand_equal_p (arg00, arg11, 0))
11105 return fold_build2 (code, type,
11106 fold_build2 (BIT_AND_EXPR, itype,
11107 fold_build2 (BIT_XOR_EXPR, itype,
11108 arg01, arg10),
11109 arg00),
11110 build_int_cst (itype, 0));
11112 if (operand_equal_p (arg00, arg10, 0))
11113 return fold_build2 (code, type,
11114 fold_build2 (BIT_AND_EXPR, itype,
11115 fold_build2 (BIT_XOR_EXPR, itype,
11116 arg01, arg11),
11117 arg00),
11118 build_int_cst (itype, 0));
11121 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11122 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11124 tree arg00 = TREE_OPERAND (arg0, 0);
11125 tree arg01 = TREE_OPERAND (arg0, 1);
11126 tree arg10 = TREE_OPERAND (arg1, 0);
11127 tree arg11 = TREE_OPERAND (arg1, 1);
11128 tree itype = TREE_TYPE (arg0);
11130 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11131 operand_equal_p guarantees no side-effects so we don't need
11132 to use omit_one_operand on Z. */
11133 if (operand_equal_p (arg01, arg11, 0))
11134 return fold_build2 (code, type, arg00, arg10);
11135 if (operand_equal_p (arg01, arg10, 0))
11136 return fold_build2 (code, type, arg00, arg11);
11137 if (operand_equal_p (arg00, arg11, 0))
11138 return fold_build2 (code, type, arg01, arg10);
11139 if (operand_equal_p (arg00, arg10, 0))
11140 return fold_build2 (code, type, arg01, arg11);
11142 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11143 if (TREE_CODE (arg01) == INTEGER_CST
11144 && TREE_CODE (arg11) == INTEGER_CST)
11145 return fold_build2 (code, type,
11146 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11147 fold_build2 (BIT_XOR_EXPR, itype,
11148 arg01, arg11)),
11149 arg10);
11151 return NULL_TREE;
11153 case LT_EXPR:
11154 case GT_EXPR:
11155 case LE_EXPR:
11156 case GE_EXPR:
11157 tem = fold_comparison (code, type, op0, op1);
11158 if (tem != NULL_TREE)
11159 return tem;
11161 /* Transform comparisons of the form X +- C CMP X. */
11162 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11164 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11165 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11166 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11167 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
11168 && !(flag_wrapv || flag_trapv))))
11170 tree arg01 = TREE_OPERAND (arg0, 1);
11171 enum tree_code code0 = TREE_CODE (arg0);
11172 int is_positive;
11174 if (TREE_CODE (arg01) == REAL_CST)
11175 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11176 else
11177 is_positive = tree_int_cst_sgn (arg01);
11179 /* (X - c) > X becomes false. */
11180 if (code == GT_EXPR
11181 && ((code0 == MINUS_EXPR && is_positive >= 0)
11182 || (code0 == PLUS_EXPR && is_positive <= 0)))
11183 return constant_boolean_node (0, type);
11185 /* Likewise (X + c) < X becomes false. */
11186 if (code == LT_EXPR
11187 && ((code0 == PLUS_EXPR && is_positive >= 0)
11188 || (code0 == MINUS_EXPR && is_positive <= 0)))
11189 return constant_boolean_node (0, type);
11191 /* Convert (X - c) <= X to true. */
11192 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11193 && code == LE_EXPR
11194 && ((code0 == MINUS_EXPR && is_positive >= 0)
11195 || (code0 == PLUS_EXPR && is_positive <= 0)))
11196 return constant_boolean_node (1, type);
11198 /* Convert (X + c) >= X to true. */
11199 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11200 && code == GE_EXPR
11201 && ((code0 == PLUS_EXPR && is_positive >= 0)
11202 || (code0 == MINUS_EXPR && is_positive <= 0)))
11203 return constant_boolean_node (1, type);
11205 if (TREE_CODE (arg01) == INTEGER_CST)
11207 /* Convert X + c > X and X - c < X to true for integers. */
11208 if (code == GT_EXPR
11209 && ((code0 == PLUS_EXPR && is_positive > 0)
11210 || (code0 == MINUS_EXPR && is_positive < 0)))
11211 return constant_boolean_node (1, type);
11213 if (code == LT_EXPR
11214 && ((code0 == MINUS_EXPR && is_positive > 0)
11215 || (code0 == PLUS_EXPR && is_positive < 0)))
11216 return constant_boolean_node (1, type);
11218 /* Convert X + c <= X and X - c >= X to false for integers. */
11219 if (code == LE_EXPR
11220 && ((code0 == PLUS_EXPR && is_positive > 0)
11221 || (code0 == MINUS_EXPR && is_positive < 0)))
11222 return constant_boolean_node (0, type);
11224 if (code == GE_EXPR
11225 && ((code0 == MINUS_EXPR && is_positive > 0)
11226 || (code0 == PLUS_EXPR && is_positive < 0)))
11227 return constant_boolean_node (0, type);
11231 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11232 This transformation affects the cases which are handled in later
11233 optimizations involving comparisons with non-negative constants. */
11234 if (TREE_CODE (arg1) == INTEGER_CST
11235 && TREE_CODE (arg0) != INTEGER_CST
11236 && tree_int_cst_sgn (arg1) > 0)
11238 if (code == GE_EXPR)
11240 arg1 = const_binop (MINUS_EXPR, arg1,
11241 build_int_cst (TREE_TYPE (arg1), 1), 0);
11242 return fold_build2 (GT_EXPR, type, arg0,
11243 fold_convert (TREE_TYPE (arg0), arg1));
11245 if (code == LT_EXPR)
11247 arg1 = const_binop (MINUS_EXPR, arg1,
11248 build_int_cst (TREE_TYPE (arg1), 1), 0);
11249 return fold_build2 (LE_EXPR, type, arg0,
11250 fold_convert (TREE_TYPE (arg0), arg1));
11254 /* Comparisons with the highest or lowest possible integer of
11255 the specified precision will have known values. */
11257 tree arg1_type = TREE_TYPE (arg1);
11258 unsigned int width = TYPE_PRECISION (arg1_type);
11260 if (TREE_CODE (arg1) == INTEGER_CST
11261 && ! TREE_CONSTANT_OVERFLOW (arg1)
11262 && width <= 2 * HOST_BITS_PER_WIDE_INT
11263 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11265 HOST_WIDE_INT signed_max_hi;
11266 unsigned HOST_WIDE_INT signed_max_lo;
11267 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11269 if (width <= HOST_BITS_PER_WIDE_INT)
11271 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11272 - 1;
11273 signed_max_hi = 0;
11274 max_hi = 0;
11276 if (TYPE_UNSIGNED (arg1_type))
11278 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11279 min_lo = 0;
11280 min_hi = 0;
11282 else
11284 max_lo = signed_max_lo;
11285 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11286 min_hi = -1;
11289 else
11291 width -= HOST_BITS_PER_WIDE_INT;
11292 signed_max_lo = -1;
11293 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11294 - 1;
11295 max_lo = -1;
11296 min_lo = 0;
11298 if (TYPE_UNSIGNED (arg1_type))
11300 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11301 min_hi = 0;
11303 else
11305 max_hi = signed_max_hi;
11306 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11310 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11311 && TREE_INT_CST_LOW (arg1) == max_lo)
11312 switch (code)
11314 case GT_EXPR:
11315 return omit_one_operand (type, integer_zero_node, arg0);
11317 case GE_EXPR:
11318 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11320 case LE_EXPR:
11321 return omit_one_operand (type, integer_one_node, arg0);
11323 case LT_EXPR:
11324 return fold_build2 (NE_EXPR, type, arg0, arg1);
11326 /* The GE_EXPR and LT_EXPR cases above are not normally
11327 reached because of previous transformations. */
11329 default:
11330 break;
11332 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11333 == max_hi
11334 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11335 switch (code)
11337 case GT_EXPR:
11338 arg1 = const_binop (PLUS_EXPR, arg1,
11339 build_int_cst (TREE_TYPE (arg1), 1), 0);
11340 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11341 case LE_EXPR:
11342 arg1 = const_binop (PLUS_EXPR, arg1,
11343 build_int_cst (TREE_TYPE (arg1), 1), 0);
11344 return fold_build2 (NE_EXPR, type, arg0, arg1);
11345 default:
11346 break;
11348 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11349 == min_hi
11350 && TREE_INT_CST_LOW (arg1) == min_lo)
11351 switch (code)
11353 case LT_EXPR:
11354 return omit_one_operand (type, integer_zero_node, arg0);
11356 case LE_EXPR:
11357 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11359 case GE_EXPR:
11360 return omit_one_operand (type, integer_one_node, arg0);
11362 case GT_EXPR:
11363 return fold_build2 (NE_EXPR, type, op0, op1);
11365 default:
11366 break;
11368 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11369 == min_hi
11370 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11371 switch (code)
11373 case GE_EXPR:
11374 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11375 return fold_build2 (NE_EXPR, type, arg0, arg1);
11376 case LT_EXPR:
11377 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11378 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11379 default:
11380 break;
11383 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11384 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11385 && TYPE_UNSIGNED (arg1_type)
11386 /* We will flip the signedness of the comparison operator
11387 associated with the mode of arg1, so the sign bit is
11388 specified by this mode. Check that arg1 is the signed
11389 max associated with this sign bit. */
11390 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11391 /* signed_type does not work on pointer types. */
11392 && INTEGRAL_TYPE_P (arg1_type))
11394 /* The following case also applies to X < signed_max+1
11395 and X >= signed_max+1 because previous transformations. */
11396 if (code == LE_EXPR || code == GT_EXPR)
11398 tree st0, st1;
11399 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11400 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11401 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11402 type, fold_convert (st0, arg0),
11403 build_int_cst (st1, 0));
11409 /* If we are comparing an ABS_EXPR with a constant, we can
11410 convert all the cases into explicit comparisons, but they may
11411 well not be faster than doing the ABS and one comparison.
11412 But ABS (X) <= C is a range comparison, which becomes a subtraction
11413 and a comparison, and is probably faster. */
11414 if (code == LE_EXPR
11415 && TREE_CODE (arg1) == INTEGER_CST
11416 && TREE_CODE (arg0) == ABS_EXPR
11417 && ! TREE_SIDE_EFFECTS (arg0)
11418 && (0 != (tem = negate_expr (arg1)))
11419 && TREE_CODE (tem) == INTEGER_CST
11420 && ! TREE_CONSTANT_OVERFLOW (tem))
11421 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11422 build2 (GE_EXPR, type,
11423 TREE_OPERAND (arg0, 0), tem),
11424 build2 (LE_EXPR, type,
11425 TREE_OPERAND (arg0, 0), arg1));
11427 /* Convert ABS_EXPR<x> >= 0 to true. */
11428 if (code == GE_EXPR
11429 && tree_expr_nonnegative_p (arg0)
11430 && (integer_zerop (arg1)
11431 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11432 && real_zerop (arg1))))
11433 return omit_one_operand (type, integer_one_node, arg0);
11435 /* Convert ABS_EXPR<x> < 0 to false. */
11436 if (code == LT_EXPR
11437 && tree_expr_nonnegative_p (arg0)
11438 && (integer_zerop (arg1) || real_zerop (arg1)))
11439 return omit_one_operand (type, integer_zero_node, arg0);
11441 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11442 and similarly for >= into !=. */
11443 if ((code == LT_EXPR || code == GE_EXPR)
11444 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11445 && TREE_CODE (arg1) == LSHIFT_EXPR
11446 && integer_onep (TREE_OPERAND (arg1, 0)))
11447 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11448 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11449 TREE_OPERAND (arg1, 1)),
11450 build_int_cst (TREE_TYPE (arg0), 0));
11452 if ((code == LT_EXPR || code == GE_EXPR)
11453 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11454 && (TREE_CODE (arg1) == NOP_EXPR
11455 || TREE_CODE (arg1) == CONVERT_EXPR)
11456 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11457 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11458 return
11459 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11460 fold_convert (TREE_TYPE (arg0),
11461 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11462 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11463 1))),
11464 build_int_cst (TREE_TYPE (arg0), 0));
11466 return NULL_TREE;
11468 case UNORDERED_EXPR:
11469 case ORDERED_EXPR:
11470 case UNLT_EXPR:
11471 case UNLE_EXPR:
11472 case UNGT_EXPR:
11473 case UNGE_EXPR:
11474 case UNEQ_EXPR:
11475 case LTGT_EXPR:
11476 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11478 t1 = fold_relational_const (code, type, arg0, arg1);
11479 if (t1 != NULL_TREE)
11480 return t1;
11483 /* If the first operand is NaN, the result is constant. */
11484 if (TREE_CODE (arg0) == REAL_CST
11485 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11486 && (code != LTGT_EXPR || ! flag_trapping_math))
11488 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11489 ? integer_zero_node
11490 : integer_one_node;
11491 return omit_one_operand (type, t1, arg1);
11494 /* If the second operand is NaN, the result is constant. */
11495 if (TREE_CODE (arg1) == REAL_CST
11496 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11497 && (code != LTGT_EXPR || ! flag_trapping_math))
11499 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11500 ? integer_zero_node
11501 : integer_one_node;
11502 return omit_one_operand (type, t1, arg0);
11505 /* Simplify unordered comparison of something with itself. */
11506 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11507 && operand_equal_p (arg0, arg1, 0))
11508 return constant_boolean_node (1, type);
11510 if (code == LTGT_EXPR
11511 && !flag_trapping_math
11512 && operand_equal_p (arg0, arg1, 0))
11513 return constant_boolean_node (0, type);
11515 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11517 tree targ0 = strip_float_extensions (arg0);
11518 tree targ1 = strip_float_extensions (arg1);
11519 tree newtype = TREE_TYPE (targ0);
11521 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11522 newtype = TREE_TYPE (targ1);
11524 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11525 return fold_build2 (code, type, fold_convert (newtype, targ0),
11526 fold_convert (newtype, targ1));
11529 return NULL_TREE;
11531 case COMPOUND_EXPR:
11532 /* When pedantic, a compound expression can be neither an lvalue
11533 nor an integer constant expression. */
11534 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11535 return NULL_TREE;
11536 /* Don't let (0, 0) be null pointer constant. */
11537 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11538 : fold_convert (type, arg1);
11539 return pedantic_non_lvalue (tem);
11541 case COMPLEX_EXPR:
11542 if ((TREE_CODE (arg0) == REAL_CST
11543 && TREE_CODE (arg1) == REAL_CST)
11544 || (TREE_CODE (arg0) == INTEGER_CST
11545 && TREE_CODE (arg1) == INTEGER_CST))
11546 return build_complex (type, arg0, arg1);
11547 return NULL_TREE;
11549 case ASSERT_EXPR:
11550 /* An ASSERT_EXPR should never be passed to fold_binary. */
11551 gcc_unreachable ();
11553 default:
11554 return NULL_TREE;
11555 } /* switch (code) */
11558 /* Callback for walk_tree, looking for LABEL_EXPR.
11559 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11560 Do not check the sub-tree of GOTO_EXPR. */
11562 static tree
11563 contains_label_1 (tree *tp,
11564 int *walk_subtrees,
11565 void *data ATTRIBUTE_UNUSED)
11567 switch (TREE_CODE (*tp))
11569 case LABEL_EXPR:
11570 return *tp;
11571 case GOTO_EXPR:
11572 *walk_subtrees = 0;
11573 /* no break */
11574 default:
11575 return NULL_TREE;
11579 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11580 accessible from outside the sub-tree. Returns NULL_TREE if no
11581 addressable label is found. */
11583 static bool
11584 contains_label_p (tree st)
11586 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11589 /* Fold a ternary expression of code CODE and type TYPE with operands
11590 OP0, OP1, and OP2. Return the folded expression if folding is
11591 successful. Otherwise, return NULL_TREE. */
11593 tree
11594 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11596 tree tem;
11597 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11598 enum tree_code_class kind = TREE_CODE_CLASS (code);
11600 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11601 && TREE_CODE_LENGTH (code) == 3);
11603 /* Strip any conversions that don't change the mode. This is safe
11604 for every expression, except for a comparison expression because
11605 its signedness is derived from its operands. So, in the latter
11606 case, only strip conversions that don't change the signedness.
11608 Note that this is done as an internal manipulation within the
11609 constant folder, in order to find the simplest representation of
11610 the arguments so that their form can be studied. In any cases,
11611 the appropriate type conversions should be put back in the tree
11612 that will get out of the constant folder. */
11613 if (op0)
11615 arg0 = op0;
11616 STRIP_NOPS (arg0);
11619 if (op1)
11621 arg1 = op1;
11622 STRIP_NOPS (arg1);
11625 switch (code)
11627 case COMPONENT_REF:
11628 if (TREE_CODE (arg0) == CONSTRUCTOR
11629 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11631 unsigned HOST_WIDE_INT idx;
11632 tree field, value;
11633 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11634 if (field == arg1)
11635 return value;
11637 return NULL_TREE;
11639 case COND_EXPR:
11640 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11641 so all simple results must be passed through pedantic_non_lvalue. */
11642 if (TREE_CODE (arg0) == INTEGER_CST)
11644 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11645 tem = integer_zerop (arg0) ? op2 : op1;
11646 /* Only optimize constant conditions when the selected branch
11647 has the same type as the COND_EXPR. This avoids optimizing
11648 away "c ? x : throw", where the throw has a void type.
11649 Avoid throwing away that operand which contains label. */
11650 if ((!TREE_SIDE_EFFECTS (unused_op)
11651 || !contains_label_p (unused_op))
11652 && (! VOID_TYPE_P (TREE_TYPE (tem))
11653 || VOID_TYPE_P (type)))
11654 return pedantic_non_lvalue (tem);
11655 return NULL_TREE;
11657 if (operand_equal_p (arg1, op2, 0))
11658 return pedantic_omit_one_operand (type, arg1, arg0);
11660 /* If we have A op B ? A : C, we may be able to convert this to a
11661 simpler expression, depending on the operation and the values
11662 of B and C. Signed zeros prevent all of these transformations,
11663 for reasons given above each one.
11665 Also try swapping the arguments and inverting the conditional. */
11666 if (COMPARISON_CLASS_P (arg0)
11667 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11668 arg1, TREE_OPERAND (arg0, 1))
11669 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11671 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11672 if (tem)
11673 return tem;
11676 if (COMPARISON_CLASS_P (arg0)
11677 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11678 op2,
11679 TREE_OPERAND (arg0, 1))
11680 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11682 tem = fold_truth_not_expr (arg0);
11683 if (tem && COMPARISON_CLASS_P (tem))
11685 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11686 if (tem)
11687 return tem;
11691 /* If the second operand is simpler than the third, swap them
11692 since that produces better jump optimization results. */
11693 if (truth_value_p (TREE_CODE (arg0))
11694 && tree_swap_operands_p (op1, op2, false))
11696 /* See if this can be inverted. If it can't, possibly because
11697 it was a floating-point inequality comparison, don't do
11698 anything. */
11699 tem = fold_truth_not_expr (arg0);
11700 if (tem)
11701 return fold_build3 (code, type, tem, op2, op1);
11704 /* Convert A ? 1 : 0 to simply A. */
11705 if (integer_onep (op1)
11706 && integer_zerop (op2)
11707 /* If we try to convert OP0 to our type, the
11708 call to fold will try to move the conversion inside
11709 a COND, which will recurse. In that case, the COND_EXPR
11710 is probably the best choice, so leave it alone. */
11711 && type == TREE_TYPE (arg0))
11712 return pedantic_non_lvalue (arg0);
11714 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11715 over COND_EXPR in cases such as floating point comparisons. */
11716 if (integer_zerop (op1)
11717 && integer_onep (op2)
11718 && truth_value_p (TREE_CODE (arg0)))
11719 return pedantic_non_lvalue (fold_convert (type,
11720 invert_truthvalue (arg0)));
11722 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11723 if (TREE_CODE (arg0) == LT_EXPR
11724 && integer_zerop (TREE_OPERAND (arg0, 1))
11725 && integer_zerop (op2)
11726 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11728 /* sign_bit_p only checks ARG1 bits within A's precision.
11729 If <sign bit of A> has wider type than A, bits outside
11730 of A's precision in <sign bit of A> need to be checked.
11731 If they are all 0, this optimization needs to be done
11732 in unsigned A's type, if they are all 1 in signed A's type,
11733 otherwise this can't be done. */
11734 if (TYPE_PRECISION (TREE_TYPE (tem))
11735 < TYPE_PRECISION (TREE_TYPE (arg1))
11736 && TYPE_PRECISION (TREE_TYPE (tem))
11737 < TYPE_PRECISION (type))
11739 unsigned HOST_WIDE_INT mask_lo;
11740 HOST_WIDE_INT mask_hi;
11741 int inner_width, outer_width;
11742 tree tem_type;
11744 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11745 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11746 if (outer_width > TYPE_PRECISION (type))
11747 outer_width = TYPE_PRECISION (type);
11749 if (outer_width > HOST_BITS_PER_WIDE_INT)
11751 mask_hi = ((unsigned HOST_WIDE_INT) -1
11752 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11753 mask_lo = -1;
11755 else
11757 mask_hi = 0;
11758 mask_lo = ((unsigned HOST_WIDE_INT) -1
11759 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11761 if (inner_width > HOST_BITS_PER_WIDE_INT)
11763 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11764 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11765 mask_lo = 0;
11767 else
11768 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11769 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11771 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11772 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11774 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11775 tem = fold_convert (tem_type, tem);
11777 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11778 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11780 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11781 tem = fold_convert (tem_type, tem);
11783 else
11784 tem = NULL;
11787 if (tem)
11788 return fold_convert (type,
11789 fold_build2 (BIT_AND_EXPR,
11790 TREE_TYPE (tem), tem,
11791 fold_convert (TREE_TYPE (tem),
11792 arg1)));
11795 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11796 already handled above. */
11797 if (TREE_CODE (arg0) == BIT_AND_EXPR
11798 && integer_onep (TREE_OPERAND (arg0, 1))
11799 && integer_zerop (op2)
11800 && integer_pow2p (arg1))
11802 tree tem = TREE_OPERAND (arg0, 0);
11803 STRIP_NOPS (tem);
11804 if (TREE_CODE (tem) == RSHIFT_EXPR
11805 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11806 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11807 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11808 return fold_build2 (BIT_AND_EXPR, type,
11809 TREE_OPERAND (tem, 0), arg1);
11812 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11813 is probably obsolete because the first operand should be a
11814 truth value (that's why we have the two cases above), but let's
11815 leave it in until we can confirm this for all front-ends. */
11816 if (integer_zerop (op2)
11817 && TREE_CODE (arg0) == NE_EXPR
11818 && integer_zerop (TREE_OPERAND (arg0, 1))
11819 && integer_pow2p (arg1)
11820 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11821 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11822 arg1, OEP_ONLY_CONST))
11823 return pedantic_non_lvalue (fold_convert (type,
11824 TREE_OPERAND (arg0, 0)));
11826 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11827 if (integer_zerop (op2)
11828 && truth_value_p (TREE_CODE (arg0))
11829 && truth_value_p (TREE_CODE (arg1)))
11830 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11831 fold_convert (type, arg0),
11832 arg1);
11834 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11835 if (integer_onep (op2)
11836 && truth_value_p (TREE_CODE (arg0))
11837 && truth_value_p (TREE_CODE (arg1)))
11839 /* Only perform transformation if ARG0 is easily inverted. */
11840 tem = fold_truth_not_expr (arg0);
11841 if (tem)
11842 return fold_build2 (TRUTH_ORIF_EXPR, type,
11843 fold_convert (type, tem),
11844 arg1);
11847 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11848 if (integer_zerop (arg1)
11849 && truth_value_p (TREE_CODE (arg0))
11850 && truth_value_p (TREE_CODE (op2)))
11852 /* Only perform transformation if ARG0 is easily inverted. */
11853 tem = fold_truth_not_expr (arg0);
11854 if (tem)
11855 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11856 fold_convert (type, tem),
11857 op2);
11860 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11861 if (integer_onep (arg1)
11862 && truth_value_p (TREE_CODE (arg0))
11863 && truth_value_p (TREE_CODE (op2)))
11864 return fold_build2 (TRUTH_ORIF_EXPR, type,
11865 fold_convert (type, arg0),
11866 op2);
11868 return NULL_TREE;
11870 case CALL_EXPR:
11871 /* Check for a built-in function. */
11872 if (TREE_CODE (op0) == ADDR_EXPR
11873 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11874 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11875 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11876 return NULL_TREE;
11878 case BIT_FIELD_REF:
11879 if (TREE_CODE (arg0) == VECTOR_CST
11880 && type == TREE_TYPE (TREE_TYPE (arg0))
11881 && host_integerp (arg1, 1)
11882 && host_integerp (op2, 1))
11884 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11885 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11887 if (width != 0
11888 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11889 && (idx % width) == 0
11890 && (idx = idx / width)
11891 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11893 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11894 while (idx-- > 0 && elements)
11895 elements = TREE_CHAIN (elements);
11896 if (elements)
11897 return TREE_VALUE (elements);
11898 else
11899 return fold_convert (type, integer_zero_node);
11902 return NULL_TREE;
11904 default:
11905 return NULL_TREE;
11906 } /* switch (code) */
11909 /* Perform constant folding and related simplification of EXPR.
11910 The related simplifications include x*1 => x, x*0 => 0, etc.,
11911 and application of the associative law.
11912 NOP_EXPR conversions may be removed freely (as long as we
11913 are careful not to change the type of the overall expression).
11914 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11915 but we can constant-fold them if they have constant operands. */
11917 #ifdef ENABLE_FOLD_CHECKING
11918 # define fold(x) fold_1 (x)
11919 static tree fold_1 (tree);
11920 static
11921 #endif
11922 tree
11923 fold (tree expr)
11925 const tree t = expr;
11926 enum tree_code code = TREE_CODE (t);
11927 enum tree_code_class kind = TREE_CODE_CLASS (code);
11928 tree tem;
11930 /* Return right away if a constant. */
11931 if (kind == tcc_constant)
11932 return t;
11934 if (IS_EXPR_CODE_CLASS (kind)
11935 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11937 tree type = TREE_TYPE (t);
11938 tree op0, op1, op2;
11940 switch (TREE_CODE_LENGTH (code))
11942 case 1:
11943 op0 = TREE_OPERAND (t, 0);
11944 tem = fold_unary (code, type, op0);
11945 return tem ? tem : expr;
11946 case 2:
11947 op0 = TREE_OPERAND (t, 0);
11948 op1 = TREE_OPERAND (t, 1);
11949 tem = fold_binary (code, type, op0, op1);
11950 return tem ? tem : expr;
11951 case 3:
11952 op0 = TREE_OPERAND (t, 0);
11953 op1 = TREE_OPERAND (t, 1);
11954 op2 = TREE_OPERAND (t, 2);
11955 tem = fold_ternary (code, type, op0, op1, op2);
11956 return tem ? tem : expr;
11957 default:
11958 break;
11962 switch (code)
11964 case CONST_DECL:
11965 return fold (DECL_INITIAL (t));
11967 default:
11968 return t;
11969 } /* switch (code) */
11972 #ifdef ENABLE_FOLD_CHECKING
11973 #undef fold
11975 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11976 static void fold_check_failed (tree, tree);
11977 void print_fold_checksum (tree);
11979 /* When --enable-checking=fold, compute a digest of expr before
11980 and after actual fold call to see if fold did not accidentally
11981 change original expr. */
11983 tree
11984 fold (tree expr)
11986 tree ret;
11987 struct md5_ctx ctx;
11988 unsigned char checksum_before[16], checksum_after[16];
11989 htab_t ht;
11991 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11992 md5_init_ctx (&ctx);
11993 fold_checksum_tree (expr, &ctx, ht);
11994 md5_finish_ctx (&ctx, checksum_before);
11995 htab_empty (ht);
11997 ret = fold_1 (expr);
11999 md5_init_ctx (&ctx);
12000 fold_checksum_tree (expr, &ctx, ht);
12001 md5_finish_ctx (&ctx, checksum_after);
12002 htab_delete (ht);
12004 if (memcmp (checksum_before, checksum_after, 16))
12005 fold_check_failed (expr, ret);
12007 return ret;
12010 void
12011 print_fold_checksum (tree expr)
12013 struct md5_ctx ctx;
12014 unsigned char checksum[16], cnt;
12015 htab_t ht;
12017 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12018 md5_init_ctx (&ctx);
12019 fold_checksum_tree (expr, &ctx, ht);
12020 md5_finish_ctx (&ctx, checksum);
12021 htab_delete (ht);
12022 for (cnt = 0; cnt < 16; ++cnt)
12023 fprintf (stderr, "%02x", checksum[cnt]);
12024 putc ('\n', stderr);
12027 static void
12028 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12030 internal_error ("fold check: original tree changed by fold");
12033 static void
12034 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12036 void **slot;
12037 enum tree_code code;
12038 struct tree_function_decl buf;
12039 int i, len;
12041 recursive_label:
12043 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12044 <= sizeof (struct tree_function_decl))
12045 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12046 if (expr == NULL)
12047 return;
12048 slot = htab_find_slot (ht, expr, INSERT);
12049 if (*slot != NULL)
12050 return;
12051 *slot = expr;
12052 code = TREE_CODE (expr);
12053 if (TREE_CODE_CLASS (code) == tcc_declaration
12054 && DECL_ASSEMBLER_NAME_SET_P (expr))
12056 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12057 memcpy ((char *) &buf, expr, tree_size (expr));
12058 expr = (tree) &buf;
12059 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12061 else if (TREE_CODE_CLASS (code) == tcc_type
12062 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12063 || TYPE_CACHED_VALUES_P (expr)
12064 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12066 /* Allow these fields to be modified. */
12067 memcpy ((char *) &buf, expr, tree_size (expr));
12068 expr = (tree) &buf;
12069 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12070 TYPE_POINTER_TO (expr) = NULL;
12071 TYPE_REFERENCE_TO (expr) = NULL;
12072 if (TYPE_CACHED_VALUES_P (expr))
12074 TYPE_CACHED_VALUES_P (expr) = 0;
12075 TYPE_CACHED_VALUES (expr) = NULL;
12078 md5_process_bytes (expr, tree_size (expr), ctx);
12079 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12080 if (TREE_CODE_CLASS (code) != tcc_type
12081 && TREE_CODE_CLASS (code) != tcc_declaration
12082 && code != TREE_LIST)
12083 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12084 switch (TREE_CODE_CLASS (code))
12086 case tcc_constant:
12087 switch (code)
12089 case STRING_CST:
12090 md5_process_bytes (TREE_STRING_POINTER (expr),
12091 TREE_STRING_LENGTH (expr), ctx);
12092 break;
12093 case COMPLEX_CST:
12094 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12095 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12096 break;
12097 case VECTOR_CST:
12098 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12099 break;
12100 default:
12101 break;
12103 break;
12104 case tcc_exceptional:
12105 switch (code)
12107 case TREE_LIST:
12108 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12109 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12110 expr = TREE_CHAIN (expr);
12111 goto recursive_label;
12112 break;
12113 case TREE_VEC:
12114 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12115 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12116 break;
12117 default:
12118 break;
12120 break;
12121 case tcc_expression:
12122 case tcc_reference:
12123 case tcc_comparison:
12124 case tcc_unary:
12125 case tcc_binary:
12126 case tcc_statement:
12127 len = TREE_CODE_LENGTH (code);
12128 for (i = 0; i < len; ++i)
12129 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12130 break;
12131 case tcc_declaration:
12132 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12133 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12134 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12136 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12137 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12138 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12139 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12140 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12142 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12143 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12145 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12147 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12148 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12149 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12151 break;
12152 case tcc_type:
12153 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12154 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12155 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12156 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12157 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12158 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12159 if (INTEGRAL_TYPE_P (expr)
12160 || SCALAR_FLOAT_TYPE_P (expr))
12162 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12163 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12165 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12166 if (TREE_CODE (expr) == RECORD_TYPE
12167 || TREE_CODE (expr) == UNION_TYPE
12168 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12169 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12170 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12171 break;
12172 default:
12173 break;
12177 #endif
12179 /* Fold a unary tree expression with code CODE of type TYPE with an
12180 operand OP0. Return a folded expression if successful. Otherwise,
12181 return a tree expression with code CODE of type TYPE with an
12182 operand OP0. */
12184 tree
12185 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12187 tree tem;
12188 #ifdef ENABLE_FOLD_CHECKING
12189 unsigned char checksum_before[16], checksum_after[16];
12190 struct md5_ctx ctx;
12191 htab_t ht;
12193 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12194 md5_init_ctx (&ctx);
12195 fold_checksum_tree (op0, &ctx, ht);
12196 md5_finish_ctx (&ctx, checksum_before);
12197 htab_empty (ht);
12198 #endif
12200 tem = fold_unary (code, type, op0);
12201 if (!tem)
12202 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12204 #ifdef ENABLE_FOLD_CHECKING
12205 md5_init_ctx (&ctx);
12206 fold_checksum_tree (op0, &ctx, ht);
12207 md5_finish_ctx (&ctx, checksum_after);
12208 htab_delete (ht);
12210 if (memcmp (checksum_before, checksum_after, 16))
12211 fold_check_failed (op0, tem);
12212 #endif
12213 return tem;
12216 /* Fold a binary tree expression with code CODE of type TYPE with
12217 operands OP0 and OP1. Return a folded expression if successful.
12218 Otherwise, return a tree expression with code CODE of type TYPE
12219 with operands OP0 and OP1. */
12221 tree
12222 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12223 MEM_STAT_DECL)
12225 tree tem;
12226 #ifdef ENABLE_FOLD_CHECKING
12227 unsigned char checksum_before_op0[16],
12228 checksum_before_op1[16],
12229 checksum_after_op0[16],
12230 checksum_after_op1[16];
12231 struct md5_ctx ctx;
12232 htab_t ht;
12234 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12235 md5_init_ctx (&ctx);
12236 fold_checksum_tree (op0, &ctx, ht);
12237 md5_finish_ctx (&ctx, checksum_before_op0);
12238 htab_empty (ht);
12240 md5_init_ctx (&ctx);
12241 fold_checksum_tree (op1, &ctx, ht);
12242 md5_finish_ctx (&ctx, checksum_before_op1);
12243 htab_empty (ht);
12244 #endif
12246 tem = fold_binary (code, type, op0, op1);
12247 if (!tem)
12248 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12250 #ifdef ENABLE_FOLD_CHECKING
12251 md5_init_ctx (&ctx);
12252 fold_checksum_tree (op0, &ctx, ht);
12253 md5_finish_ctx (&ctx, checksum_after_op0);
12254 htab_empty (ht);
12256 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12257 fold_check_failed (op0, tem);
12259 md5_init_ctx (&ctx);
12260 fold_checksum_tree (op1, &ctx, ht);
12261 md5_finish_ctx (&ctx, checksum_after_op1);
12262 htab_delete (ht);
12264 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12265 fold_check_failed (op1, tem);
12266 #endif
12267 return tem;
12270 /* Fold a ternary tree expression with code CODE of type TYPE with
12271 operands OP0, OP1, and OP2. Return a folded expression if
12272 successful. Otherwise, return a tree expression with code CODE of
12273 type TYPE with operands OP0, OP1, and OP2. */
12275 tree
12276 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12277 MEM_STAT_DECL)
12279 tree tem;
12280 #ifdef ENABLE_FOLD_CHECKING
12281 unsigned char checksum_before_op0[16],
12282 checksum_before_op1[16],
12283 checksum_before_op2[16],
12284 checksum_after_op0[16],
12285 checksum_after_op1[16],
12286 checksum_after_op2[16];
12287 struct md5_ctx ctx;
12288 htab_t ht;
12290 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12291 md5_init_ctx (&ctx);
12292 fold_checksum_tree (op0, &ctx, ht);
12293 md5_finish_ctx (&ctx, checksum_before_op0);
12294 htab_empty (ht);
12296 md5_init_ctx (&ctx);
12297 fold_checksum_tree (op1, &ctx, ht);
12298 md5_finish_ctx (&ctx, checksum_before_op1);
12299 htab_empty (ht);
12301 md5_init_ctx (&ctx);
12302 fold_checksum_tree (op2, &ctx, ht);
12303 md5_finish_ctx (&ctx, checksum_before_op2);
12304 htab_empty (ht);
12305 #endif
12307 tem = fold_ternary (code, type, op0, op1, op2);
12308 if (!tem)
12309 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12311 #ifdef ENABLE_FOLD_CHECKING
12312 md5_init_ctx (&ctx);
12313 fold_checksum_tree (op0, &ctx, ht);
12314 md5_finish_ctx (&ctx, checksum_after_op0);
12315 htab_empty (ht);
12317 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12318 fold_check_failed (op0, tem);
12320 md5_init_ctx (&ctx);
12321 fold_checksum_tree (op1, &ctx, ht);
12322 md5_finish_ctx (&ctx, checksum_after_op1);
12323 htab_empty (ht);
12325 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12326 fold_check_failed (op1, tem);
12328 md5_init_ctx (&ctx);
12329 fold_checksum_tree (op2, &ctx, ht);
12330 md5_finish_ctx (&ctx, checksum_after_op2);
12331 htab_delete (ht);
12333 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12334 fold_check_failed (op2, tem);
12335 #endif
12336 return tem;
12339 /* Perform constant folding and related simplification of initializer
12340 expression EXPR. These behave identically to "fold_buildN" but ignore
12341 potential run-time traps and exceptions that fold must preserve. */
12343 #define START_FOLD_INIT \
12344 int saved_signaling_nans = flag_signaling_nans;\
12345 int saved_trapping_math = flag_trapping_math;\
12346 int saved_rounding_math = flag_rounding_math;\
12347 int saved_trapv = flag_trapv;\
12348 int saved_folding_initializer = folding_initializer;\
12349 flag_signaling_nans = 0;\
12350 flag_trapping_math = 0;\
12351 flag_rounding_math = 0;\
12352 flag_trapv = 0;\
12353 folding_initializer = 1;
12355 #define END_FOLD_INIT \
12356 flag_signaling_nans = saved_signaling_nans;\
12357 flag_trapping_math = saved_trapping_math;\
12358 flag_rounding_math = saved_rounding_math;\
12359 flag_trapv = saved_trapv;\
12360 folding_initializer = saved_folding_initializer;
12362 tree
12363 fold_build1_initializer (enum tree_code code, tree type, tree op)
12365 tree result;
12366 START_FOLD_INIT;
12368 result = fold_build1 (code, type, op);
12370 END_FOLD_INIT;
12371 return result;
12374 tree
12375 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12377 tree result;
12378 START_FOLD_INIT;
12380 result = fold_build2 (code, type, op0, op1);
12382 END_FOLD_INIT;
12383 return result;
12386 tree
12387 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12388 tree op2)
12390 tree result;
12391 START_FOLD_INIT;
12393 result = fold_build3 (code, type, op0, op1, op2);
12395 END_FOLD_INIT;
12396 return result;
12399 #undef START_FOLD_INIT
12400 #undef END_FOLD_INIT
12402 /* Determine if first argument is a multiple of second argument. Return 0 if
12403 it is not, or we cannot easily determined it to be.
12405 An example of the sort of thing we care about (at this point; this routine
12406 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12407 fold cases do now) is discovering that
12409 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12411 is a multiple of
12413 SAVE_EXPR (J * 8)
12415 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12417 This code also handles discovering that
12419 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12421 is a multiple of 8 so we don't have to worry about dealing with a
12422 possible remainder.
12424 Note that we *look* inside a SAVE_EXPR only to determine how it was
12425 calculated; it is not safe for fold to do much of anything else with the
12426 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12427 at run time. For example, the latter example above *cannot* be implemented
12428 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12429 evaluation time of the original SAVE_EXPR is not necessarily the same at
12430 the time the new expression is evaluated. The only optimization of this
12431 sort that would be valid is changing
12433 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12435 divided by 8 to
12437 SAVE_EXPR (I) * SAVE_EXPR (J)
12439 (where the same SAVE_EXPR (J) is used in the original and the
12440 transformed version). */
12442 static int
12443 multiple_of_p (tree type, tree top, tree bottom)
12445 if (operand_equal_p (top, bottom, 0))
12446 return 1;
12448 if (TREE_CODE (type) != INTEGER_TYPE)
12449 return 0;
12451 switch (TREE_CODE (top))
12453 case BIT_AND_EXPR:
12454 /* Bitwise and provides a power of two multiple. If the mask is
12455 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12456 if (!integer_pow2p (bottom))
12457 return 0;
12458 /* FALLTHRU */
12460 case MULT_EXPR:
12461 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12462 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12464 case PLUS_EXPR:
12465 case MINUS_EXPR:
12466 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12467 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12469 case LSHIFT_EXPR:
12470 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12472 tree op1, t1;
12474 op1 = TREE_OPERAND (top, 1);
12475 /* const_binop may not detect overflow correctly,
12476 so check for it explicitly here. */
12477 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12478 > TREE_INT_CST_LOW (op1)
12479 && TREE_INT_CST_HIGH (op1) == 0
12480 && 0 != (t1 = fold_convert (type,
12481 const_binop (LSHIFT_EXPR,
12482 size_one_node,
12483 op1, 0)))
12484 && ! TREE_OVERFLOW (t1))
12485 return multiple_of_p (type, t1, bottom);
12487 return 0;
12489 case NOP_EXPR:
12490 /* Can't handle conversions from non-integral or wider integral type. */
12491 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12492 || (TYPE_PRECISION (type)
12493 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12494 return 0;
12496 /* .. fall through ... */
12498 case SAVE_EXPR:
12499 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12501 case INTEGER_CST:
12502 if (TREE_CODE (bottom) != INTEGER_CST
12503 || (TYPE_UNSIGNED (type)
12504 && (tree_int_cst_sgn (top) < 0
12505 || tree_int_cst_sgn (bottom) < 0)))
12506 return 0;
12507 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12508 top, bottom, 0));
12510 default:
12511 return 0;
12515 /* Return true if `t' is known to be non-negative. */
12517 bool
12518 tree_expr_nonnegative_p (tree t)
12520 if (t == error_mark_node)
12521 return false;
12523 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12524 return true;
12526 switch (TREE_CODE (t))
12528 case SSA_NAME:
12529 /* Query VRP to see if it has recorded any information about
12530 the range of this object. */
12531 return ssa_name_nonnegative_p (t);
12533 case ABS_EXPR:
12534 /* We can't return 1 if flag_wrapv is set because
12535 ABS_EXPR<INT_MIN> = INT_MIN. */
12536 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12537 return true;
12538 break;
12540 case INTEGER_CST:
12541 return tree_int_cst_sgn (t) >= 0;
12543 case REAL_CST:
12544 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12546 case PLUS_EXPR:
12547 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12548 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12549 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12551 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12552 both unsigned and at least 2 bits shorter than the result. */
12553 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12554 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12555 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12557 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12558 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12559 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12560 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12562 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12563 TYPE_PRECISION (inner2)) + 1;
12564 return prec < TYPE_PRECISION (TREE_TYPE (t));
12567 break;
12569 case MULT_EXPR:
12570 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12572 /* x * x for floating point x is always non-negative. */
12573 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12574 return true;
12575 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12576 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12579 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12580 both unsigned and their total bits is shorter than the result. */
12581 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12582 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12583 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12585 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12586 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12587 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12588 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12589 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12590 < TYPE_PRECISION (TREE_TYPE (t));
12592 return false;
12594 case BIT_AND_EXPR:
12595 case MAX_EXPR:
12596 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12597 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12599 case BIT_IOR_EXPR:
12600 case BIT_XOR_EXPR:
12601 case MIN_EXPR:
12602 case RDIV_EXPR:
12603 case TRUNC_DIV_EXPR:
12604 case CEIL_DIV_EXPR:
12605 case FLOOR_DIV_EXPR:
12606 case ROUND_DIV_EXPR:
12607 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12608 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12610 case TRUNC_MOD_EXPR:
12611 case CEIL_MOD_EXPR:
12612 case FLOOR_MOD_EXPR:
12613 case ROUND_MOD_EXPR:
12614 case SAVE_EXPR:
12615 case NON_LVALUE_EXPR:
12616 case FLOAT_EXPR:
12617 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12619 case COMPOUND_EXPR:
12620 case MODIFY_EXPR:
12621 case GIMPLE_MODIFY_STMT:
12622 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12624 case BIND_EXPR:
12625 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12627 case COND_EXPR:
12628 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12629 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12631 case NOP_EXPR:
12633 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12634 tree outer_type = TREE_TYPE (t);
12636 if (TREE_CODE (outer_type) == REAL_TYPE)
12638 if (TREE_CODE (inner_type) == REAL_TYPE)
12639 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12640 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12642 if (TYPE_UNSIGNED (inner_type))
12643 return true;
12644 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12647 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12649 if (TREE_CODE (inner_type) == REAL_TYPE)
12650 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12651 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12652 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12653 && TYPE_UNSIGNED (inner_type);
12656 break;
12658 case TARGET_EXPR:
12660 tree temp = TARGET_EXPR_SLOT (t);
12661 t = TARGET_EXPR_INITIAL (t);
12663 /* If the initializer is non-void, then it's a normal expression
12664 that will be assigned to the slot. */
12665 if (!VOID_TYPE_P (t))
12666 return tree_expr_nonnegative_p (t);
12668 /* Otherwise, the initializer sets the slot in some way. One common
12669 way is an assignment statement at the end of the initializer. */
12670 while (1)
12672 if (TREE_CODE (t) == BIND_EXPR)
12673 t = expr_last (BIND_EXPR_BODY (t));
12674 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12675 || TREE_CODE (t) == TRY_CATCH_EXPR)
12676 t = expr_last (TREE_OPERAND (t, 0));
12677 else if (TREE_CODE (t) == STATEMENT_LIST)
12678 t = expr_last (t);
12679 else
12680 break;
12682 if ((TREE_CODE (t) == MODIFY_EXPR
12683 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12684 && GENERIC_TREE_OPERAND (t, 0) == temp)
12685 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12687 return false;
12690 case CALL_EXPR:
12692 tree fndecl = get_callee_fndecl (t);
12693 tree arglist = TREE_OPERAND (t, 1);
12694 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12695 switch (DECL_FUNCTION_CODE (fndecl))
12697 CASE_FLT_FN (BUILT_IN_ACOS):
12698 CASE_FLT_FN (BUILT_IN_ACOSH):
12699 CASE_FLT_FN (BUILT_IN_CABS):
12700 CASE_FLT_FN (BUILT_IN_COSH):
12701 CASE_FLT_FN (BUILT_IN_ERFC):
12702 CASE_FLT_FN (BUILT_IN_EXP):
12703 CASE_FLT_FN (BUILT_IN_EXP10):
12704 CASE_FLT_FN (BUILT_IN_EXP2):
12705 CASE_FLT_FN (BUILT_IN_FABS):
12706 CASE_FLT_FN (BUILT_IN_FDIM):
12707 CASE_FLT_FN (BUILT_IN_HYPOT):
12708 CASE_FLT_FN (BUILT_IN_POW10):
12709 CASE_INT_FN (BUILT_IN_FFS):
12710 CASE_INT_FN (BUILT_IN_PARITY):
12711 CASE_INT_FN (BUILT_IN_POPCOUNT):
12712 case BUILT_IN_BSWAP32:
12713 case BUILT_IN_BSWAP64:
12714 /* Always true. */
12715 return true;
12717 CASE_FLT_FN (BUILT_IN_SQRT):
12718 /* sqrt(-0.0) is -0.0. */
12719 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12720 return true;
12721 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12723 CASE_FLT_FN (BUILT_IN_ASINH):
12724 CASE_FLT_FN (BUILT_IN_ATAN):
12725 CASE_FLT_FN (BUILT_IN_ATANH):
12726 CASE_FLT_FN (BUILT_IN_CBRT):
12727 CASE_FLT_FN (BUILT_IN_CEIL):
12728 CASE_FLT_FN (BUILT_IN_ERF):
12729 CASE_FLT_FN (BUILT_IN_EXPM1):
12730 CASE_FLT_FN (BUILT_IN_FLOOR):
12731 CASE_FLT_FN (BUILT_IN_FMOD):
12732 CASE_FLT_FN (BUILT_IN_FREXP):
12733 CASE_FLT_FN (BUILT_IN_LCEIL):
12734 CASE_FLT_FN (BUILT_IN_LDEXP):
12735 CASE_FLT_FN (BUILT_IN_LFLOOR):
12736 CASE_FLT_FN (BUILT_IN_LLCEIL):
12737 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12738 CASE_FLT_FN (BUILT_IN_LLRINT):
12739 CASE_FLT_FN (BUILT_IN_LLROUND):
12740 CASE_FLT_FN (BUILT_IN_LRINT):
12741 CASE_FLT_FN (BUILT_IN_LROUND):
12742 CASE_FLT_FN (BUILT_IN_MODF):
12743 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12744 CASE_FLT_FN (BUILT_IN_RINT):
12745 CASE_FLT_FN (BUILT_IN_ROUND):
12746 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12747 CASE_FLT_FN (BUILT_IN_SINH):
12748 CASE_FLT_FN (BUILT_IN_TANH):
12749 CASE_FLT_FN (BUILT_IN_TRUNC):
12750 /* True if the 1st argument is nonnegative. */
12751 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12753 CASE_FLT_FN (BUILT_IN_FMAX):
12754 /* True if the 1st OR 2nd arguments are nonnegative. */
12755 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12756 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12758 CASE_FLT_FN (BUILT_IN_FMIN):
12759 /* True if the 1st AND 2nd arguments are nonnegative. */
12760 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12761 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12763 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12764 /* True if the 2nd argument is nonnegative. */
12765 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12767 CASE_FLT_FN (BUILT_IN_POWI):
12768 /* True if the 1st argument is nonnegative or the second
12769 argument is an even integer. */
12770 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12772 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12773 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12774 return true;
12776 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12778 CASE_FLT_FN (BUILT_IN_POW):
12779 /* True if the 1st argument is nonnegative or the second
12780 argument is an even integer valued real. */
12781 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12783 REAL_VALUE_TYPE c;
12784 HOST_WIDE_INT n;
12786 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12787 n = real_to_integer (&c);
12788 if ((n & 1) == 0)
12790 REAL_VALUE_TYPE cint;
12791 real_from_integer (&cint, VOIDmode, n,
12792 n < 0 ? -1 : 0, 0);
12793 if (real_identical (&c, &cint))
12794 return true;
12797 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12799 default:
12800 break;
12804 /* ... fall through ... */
12806 default:
12807 if (truth_value_p (TREE_CODE (t)))
12808 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12809 return true;
12812 /* We don't know sign of `t', so be conservative and return false. */
12813 return false;
12816 /* Return true when T is an address and is known to be nonzero.
12817 For floating point we further ensure that T is not denormal.
12818 Similar logic is present in nonzero_address in rtlanal.h. */
12820 bool
12821 tree_expr_nonzero_p (tree t)
12823 tree type = TREE_TYPE (t);
12825 /* Doing something useful for floating point would need more work. */
12826 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12827 return false;
12829 switch (TREE_CODE (t))
12831 case SSA_NAME:
12832 /* Query VRP to see if it has recorded any information about
12833 the range of this object. */
12834 return ssa_name_nonzero_p (t);
12836 case ABS_EXPR:
12837 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12839 case INTEGER_CST:
12840 /* We used to test for !integer_zerop here. This does not work correctly
12841 if TREE_CONSTANT_OVERFLOW (t). */
12842 return (TREE_INT_CST_LOW (t) != 0
12843 || TREE_INT_CST_HIGH (t) != 0);
12845 case PLUS_EXPR:
12846 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12848 /* With the presence of negative values it is hard
12849 to say something. */
12850 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12851 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12852 return false;
12853 /* One of operands must be positive and the other non-negative. */
12854 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12855 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12857 break;
12859 case MULT_EXPR:
12860 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12862 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12863 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12865 break;
12867 case NOP_EXPR:
12869 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12870 tree outer_type = TREE_TYPE (t);
12872 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12873 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12875 break;
12877 case ADDR_EXPR:
12879 tree base = get_base_address (TREE_OPERAND (t, 0));
12881 if (!base)
12882 return false;
12884 /* Weak declarations may link to NULL. */
12885 if (VAR_OR_FUNCTION_DECL_P (base))
12886 return !DECL_WEAK (base);
12888 /* Constants are never weak. */
12889 if (CONSTANT_CLASS_P (base))
12890 return true;
12892 return false;
12895 case COND_EXPR:
12896 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12897 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12899 case MIN_EXPR:
12900 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12901 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12903 case MAX_EXPR:
12904 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12906 /* When both operands are nonzero, then MAX must be too. */
12907 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12908 return true;
12910 /* MAX where operand 0 is positive is positive. */
12911 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12913 /* MAX where operand 1 is positive is positive. */
12914 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12915 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12916 return true;
12917 break;
12919 case COMPOUND_EXPR:
12920 case MODIFY_EXPR:
12921 case GIMPLE_MODIFY_STMT:
12922 case BIND_EXPR:
12923 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12925 case SAVE_EXPR:
12926 case NON_LVALUE_EXPR:
12927 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12929 case BIT_IOR_EXPR:
12930 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12931 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12933 case CALL_EXPR:
12934 return alloca_call_p (t);
12936 default:
12937 break;
12939 return false;
12942 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12943 attempt to fold the expression to a constant without modifying TYPE,
12944 OP0 or OP1.
12946 If the expression could be simplified to a constant, then return
12947 the constant. If the expression would not be simplified to a
12948 constant, then return NULL_TREE. */
12950 tree
12951 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12953 tree tem = fold_binary (code, type, op0, op1);
12954 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12957 /* Given the components of a unary expression CODE, TYPE and OP0,
12958 attempt to fold the expression to a constant without modifying
12959 TYPE or OP0.
12961 If the expression could be simplified to a constant, then return
12962 the constant. If the expression would not be simplified to a
12963 constant, then return NULL_TREE. */
12965 tree
12966 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12968 tree tem = fold_unary (code, type, op0);
12969 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12972 /* If EXP represents referencing an element in a constant string
12973 (either via pointer arithmetic or array indexing), return the
12974 tree representing the value accessed, otherwise return NULL. */
12976 tree
12977 fold_read_from_constant_string (tree exp)
12979 if ((TREE_CODE (exp) == INDIRECT_REF
12980 || TREE_CODE (exp) == ARRAY_REF)
12981 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12983 tree exp1 = TREE_OPERAND (exp, 0);
12984 tree index;
12985 tree string;
12987 if (TREE_CODE (exp) == INDIRECT_REF)
12988 string = string_constant (exp1, &index);
12989 else
12991 tree low_bound = array_ref_low_bound (exp);
12992 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12994 /* Optimize the special-case of a zero lower bound.
12996 We convert the low_bound to sizetype to avoid some problems
12997 with constant folding. (E.g. suppose the lower bound is 1,
12998 and its mode is QI. Without the conversion,l (ARRAY
12999 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13000 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13001 if (! integer_zerop (low_bound))
13002 index = size_diffop (index, fold_convert (sizetype, low_bound));
13004 string = exp1;
13007 if (string
13008 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13009 && TREE_CODE (string) == STRING_CST
13010 && TREE_CODE (index) == INTEGER_CST
13011 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13012 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13013 == MODE_INT)
13014 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13015 return fold_convert (TREE_TYPE (exp),
13016 build_int_cst (NULL_TREE,
13017 (TREE_STRING_POINTER (string)
13018 [TREE_INT_CST_LOW (index)])));
13020 return NULL;
13023 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13024 an integer constant or real constant.
13026 TYPE is the type of the result. */
13028 static tree
13029 fold_negate_const (tree arg0, tree type)
13031 tree t = NULL_TREE;
13033 switch (TREE_CODE (arg0))
13035 case INTEGER_CST:
13037 unsigned HOST_WIDE_INT low;
13038 HOST_WIDE_INT high;
13039 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13040 TREE_INT_CST_HIGH (arg0),
13041 &low, &high);
13042 t = build_int_cst_wide (type, low, high);
13043 t = force_fit_type (t, 1,
13044 (overflow | TREE_OVERFLOW (arg0))
13045 && !TYPE_UNSIGNED (type),
13046 TREE_CONSTANT_OVERFLOW (arg0));
13047 break;
13050 case REAL_CST:
13051 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13052 break;
13054 default:
13055 gcc_unreachable ();
13058 return t;
13061 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13062 an integer constant or real constant.
13064 TYPE is the type of the result. */
13066 tree
13067 fold_abs_const (tree arg0, tree type)
13069 tree t = NULL_TREE;
13071 switch (TREE_CODE (arg0))
13073 case INTEGER_CST:
13074 /* If the value is unsigned, then the absolute value is
13075 the same as the ordinary value. */
13076 if (TYPE_UNSIGNED (type))
13077 t = arg0;
13078 /* Similarly, if the value is non-negative. */
13079 else if (INT_CST_LT (integer_minus_one_node, arg0))
13080 t = arg0;
13081 /* If the value is negative, then the absolute value is
13082 its negation. */
13083 else
13085 unsigned HOST_WIDE_INT low;
13086 HOST_WIDE_INT high;
13087 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13088 TREE_INT_CST_HIGH (arg0),
13089 &low, &high);
13090 t = build_int_cst_wide (type, low, high);
13091 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13092 TREE_CONSTANT_OVERFLOW (arg0));
13094 break;
13096 case REAL_CST:
13097 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13098 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13099 else
13100 t = arg0;
13101 break;
13103 default:
13104 gcc_unreachable ();
13107 return t;
13110 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13111 constant. TYPE is the type of the result. */
13113 static tree
13114 fold_not_const (tree arg0, tree type)
13116 tree t = NULL_TREE;
13118 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13120 t = build_int_cst_wide (type,
13121 ~ TREE_INT_CST_LOW (arg0),
13122 ~ TREE_INT_CST_HIGH (arg0));
13123 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13124 TREE_CONSTANT_OVERFLOW (arg0));
13126 return t;
13129 /* Given CODE, a relational operator, the target type, TYPE and two
13130 constant operands OP0 and OP1, return the result of the
13131 relational operation. If the result is not a compile time
13132 constant, then return NULL_TREE. */
13134 static tree
13135 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13137 int result, invert;
13139 /* From here on, the only cases we handle are when the result is
13140 known to be a constant. */
13142 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13144 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13145 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13147 /* Handle the cases where either operand is a NaN. */
13148 if (real_isnan (c0) || real_isnan (c1))
13150 switch (code)
13152 case EQ_EXPR:
13153 case ORDERED_EXPR:
13154 result = 0;
13155 break;
13157 case NE_EXPR:
13158 case UNORDERED_EXPR:
13159 case UNLT_EXPR:
13160 case UNLE_EXPR:
13161 case UNGT_EXPR:
13162 case UNGE_EXPR:
13163 case UNEQ_EXPR:
13164 result = 1;
13165 break;
13167 case LT_EXPR:
13168 case LE_EXPR:
13169 case GT_EXPR:
13170 case GE_EXPR:
13171 case LTGT_EXPR:
13172 if (flag_trapping_math)
13173 return NULL_TREE;
13174 result = 0;
13175 break;
13177 default:
13178 gcc_unreachable ();
13181 return constant_boolean_node (result, type);
13184 return constant_boolean_node (real_compare (code, c0, c1), type);
13187 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13189 To compute GT, swap the arguments and do LT.
13190 To compute GE, do LT and invert the result.
13191 To compute LE, swap the arguments, do LT and invert the result.
13192 To compute NE, do EQ and invert the result.
13194 Therefore, the code below must handle only EQ and LT. */
13196 if (code == LE_EXPR || code == GT_EXPR)
13198 tree tem = op0;
13199 op0 = op1;
13200 op1 = tem;
13201 code = swap_tree_comparison (code);
13204 /* Note that it is safe to invert for real values here because we
13205 have already handled the one case that it matters. */
13207 invert = 0;
13208 if (code == NE_EXPR || code == GE_EXPR)
13210 invert = 1;
13211 code = invert_tree_comparison (code, false);
13214 /* Compute a result for LT or EQ if args permit;
13215 Otherwise return T. */
13216 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13218 if (code == EQ_EXPR)
13219 result = tree_int_cst_equal (op0, op1);
13220 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13221 result = INT_CST_LT_UNSIGNED (op0, op1);
13222 else
13223 result = INT_CST_LT (op0, op1);
13225 else
13226 return NULL_TREE;
13228 if (invert)
13229 result ^= 1;
13230 return constant_boolean_node (result, type);
13233 /* Build an expression for the a clean point containing EXPR with type TYPE.
13234 Don't build a cleanup point expression for EXPR which don't have side
13235 effects. */
13237 tree
13238 fold_build_cleanup_point_expr (tree type, tree expr)
13240 /* If the expression does not have side effects then we don't have to wrap
13241 it with a cleanup point expression. */
13242 if (!TREE_SIDE_EFFECTS (expr))
13243 return expr;
13245 /* If the expression is a return, check to see if the expression inside the
13246 return has no side effects or the right hand side of the modify expression
13247 inside the return. If either don't have side effects set we don't need to
13248 wrap the expression in a cleanup point expression. Note we don't check the
13249 left hand side of the modify because it should always be a return decl. */
13250 if (TREE_CODE (expr) == RETURN_EXPR)
13252 tree op = TREE_OPERAND (expr, 0);
13253 if (!op || !TREE_SIDE_EFFECTS (op))
13254 return expr;
13255 op = TREE_OPERAND (op, 1);
13256 if (!TREE_SIDE_EFFECTS (op))
13257 return expr;
13260 return build1 (CLEANUP_POINT_EXPR, type, expr);
13263 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13264 avoid confusing the gimplify process. */
13266 tree
13267 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13269 /* The size of the object is not relevant when talking about its address. */
13270 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13271 t = TREE_OPERAND (t, 0);
13273 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13274 if (TREE_CODE (t) == INDIRECT_REF
13275 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13277 t = TREE_OPERAND (t, 0);
13278 if (TREE_TYPE (t) != ptrtype)
13279 t = build1 (NOP_EXPR, ptrtype, t);
13281 else
13283 tree base = t;
13285 while (handled_component_p (base))
13286 base = TREE_OPERAND (base, 0);
13287 if (DECL_P (base))
13288 TREE_ADDRESSABLE (base) = 1;
13290 t = build1 (ADDR_EXPR, ptrtype, t);
13293 return t;
13296 tree
13297 build_fold_addr_expr (tree t)
13299 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13302 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13303 of an indirection through OP0, or NULL_TREE if no simplification is
13304 possible. */
13306 tree
13307 fold_indirect_ref_1 (tree type, tree op0)
13309 tree sub = op0;
13310 tree subtype;
13312 STRIP_NOPS (sub);
13313 subtype = TREE_TYPE (sub);
13314 if (!POINTER_TYPE_P (subtype))
13315 return NULL_TREE;
13317 if (TREE_CODE (sub) == ADDR_EXPR)
13319 tree op = TREE_OPERAND (sub, 0);
13320 tree optype = TREE_TYPE (op);
13321 /* *&CONST_DECL -> to the value of the const decl. */
13322 if (TREE_CODE (op) == CONST_DECL)
13323 return DECL_INITIAL (op);
13324 /* *&p => p; make sure to handle *&"str"[cst] here. */
13325 if (type == optype)
13327 tree fop = fold_read_from_constant_string (op);
13328 if (fop)
13329 return fop;
13330 else
13331 return op;
13333 /* *(foo *)&fooarray => fooarray[0] */
13334 else if (TREE_CODE (optype) == ARRAY_TYPE
13335 && type == TREE_TYPE (optype))
13337 tree type_domain = TYPE_DOMAIN (optype);
13338 tree min_val = size_zero_node;
13339 if (type_domain && TYPE_MIN_VALUE (type_domain))
13340 min_val = TYPE_MIN_VALUE (type_domain);
13341 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13343 /* *(foo *)&complexfoo => __real__ complexfoo */
13344 else if (TREE_CODE (optype) == COMPLEX_TYPE
13345 && type == TREE_TYPE (optype))
13346 return fold_build1 (REALPART_EXPR, type, op);
13347 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13348 else if (TREE_CODE (optype) == VECTOR_TYPE
13349 && type == TREE_TYPE (optype))
13351 tree part_width = TYPE_SIZE (type);
13352 tree index = bitsize_int (0);
13353 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13357 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13358 if (TREE_CODE (sub) == PLUS_EXPR
13359 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13361 tree op00 = TREE_OPERAND (sub, 0);
13362 tree op01 = TREE_OPERAND (sub, 1);
13363 tree op00type;
13365 STRIP_NOPS (op00);
13366 op00type = TREE_TYPE (op00);
13367 if (TREE_CODE (op00) == ADDR_EXPR
13368 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13369 && type == TREE_TYPE (TREE_TYPE (op00type)))
13371 tree size = TYPE_SIZE_UNIT (type);
13372 if (tree_int_cst_equal (size, op01))
13373 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13377 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13378 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13379 && type == TREE_TYPE (TREE_TYPE (subtype)))
13381 tree type_domain;
13382 tree min_val = size_zero_node;
13383 sub = build_fold_indirect_ref (sub);
13384 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13385 if (type_domain && TYPE_MIN_VALUE (type_domain))
13386 min_val = TYPE_MIN_VALUE (type_domain);
13387 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13390 return NULL_TREE;
13393 /* Builds an expression for an indirection through T, simplifying some
13394 cases. */
13396 tree
13397 build_fold_indirect_ref (tree t)
13399 tree type = TREE_TYPE (TREE_TYPE (t));
13400 tree sub = fold_indirect_ref_1 (type, t);
13402 if (sub)
13403 return sub;
13404 else
13405 return build1 (INDIRECT_REF, type, t);
13408 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13410 tree
13411 fold_indirect_ref (tree t)
13413 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13415 if (sub)
13416 return sub;
13417 else
13418 return t;
13421 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13422 whose result is ignored. The type of the returned tree need not be
13423 the same as the original expression. */
13425 tree
13426 fold_ignored_result (tree t)
13428 if (!TREE_SIDE_EFFECTS (t))
13429 return integer_zero_node;
13431 for (;;)
13432 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13434 case tcc_unary:
13435 t = TREE_OPERAND (t, 0);
13436 break;
13438 case tcc_binary:
13439 case tcc_comparison:
13440 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13441 t = TREE_OPERAND (t, 0);
13442 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13443 t = TREE_OPERAND (t, 1);
13444 else
13445 return t;
13446 break;
13448 case tcc_expression:
13449 switch (TREE_CODE (t))
13451 case COMPOUND_EXPR:
13452 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13453 return t;
13454 t = TREE_OPERAND (t, 0);
13455 break;
13457 case COND_EXPR:
13458 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13459 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13460 return t;
13461 t = TREE_OPERAND (t, 0);
13462 break;
13464 default:
13465 return t;
13467 break;
13469 default:
13470 return t;
13474 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13475 This can only be applied to objects of a sizetype. */
13477 tree
13478 round_up (tree value, int divisor)
13480 tree div = NULL_TREE;
13482 gcc_assert (divisor > 0);
13483 if (divisor == 1)
13484 return value;
13486 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13487 have to do anything. Only do this when we are not given a const,
13488 because in that case, this check is more expensive than just
13489 doing it. */
13490 if (TREE_CODE (value) != INTEGER_CST)
13492 div = build_int_cst (TREE_TYPE (value), divisor);
13494 if (multiple_of_p (TREE_TYPE (value), value, div))
13495 return value;
13498 /* If divisor is a power of two, simplify this to bit manipulation. */
13499 if (divisor == (divisor & -divisor))
13501 tree t;
13503 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13504 value = size_binop (PLUS_EXPR, value, t);
13505 t = build_int_cst (TREE_TYPE (value), -divisor);
13506 value = size_binop (BIT_AND_EXPR, value, t);
13508 else
13510 if (!div)
13511 div = build_int_cst (TREE_TYPE (value), divisor);
13512 value = size_binop (CEIL_DIV_EXPR, value, div);
13513 value = size_binop (MULT_EXPR, value, div);
13516 return value;
13519 /* Likewise, but round down. */
13521 tree
13522 round_down (tree value, int divisor)
13524 tree div = NULL_TREE;
13526 gcc_assert (divisor > 0);
13527 if (divisor == 1)
13528 return value;
13530 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13531 have to do anything. Only do this when we are not given a const,
13532 because in that case, this check is more expensive than just
13533 doing it. */
13534 if (TREE_CODE (value) != INTEGER_CST)
13536 div = build_int_cst (TREE_TYPE (value), divisor);
13538 if (multiple_of_p (TREE_TYPE (value), value, div))
13539 return value;
13542 /* If divisor is a power of two, simplify this to bit manipulation. */
13543 if (divisor == (divisor & -divisor))
13545 tree t;
13547 t = build_int_cst (TREE_TYPE (value), -divisor);
13548 value = size_binop (BIT_AND_EXPR, value, t);
13550 else
13552 if (!div)
13553 div = build_int_cst (TREE_TYPE (value), divisor);
13554 value = size_binop (FLOOR_DIV_EXPR, value, div);
13555 value = size_binop (MULT_EXPR, value, div);
13558 return value;
13561 /* Returns the pointer to the base of the object addressed by EXP and
13562 extracts the information about the offset of the access, storing it
13563 to PBITPOS and POFFSET. */
13565 static tree
13566 split_address_to_core_and_offset (tree exp,
13567 HOST_WIDE_INT *pbitpos, tree *poffset)
13569 tree core;
13570 enum machine_mode mode;
13571 int unsignedp, volatilep;
13572 HOST_WIDE_INT bitsize;
13574 if (TREE_CODE (exp) == ADDR_EXPR)
13576 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13577 poffset, &mode, &unsignedp, &volatilep,
13578 false);
13579 core = build_fold_addr_expr (core);
13581 else
13583 core = exp;
13584 *pbitpos = 0;
13585 *poffset = NULL_TREE;
13588 return core;
13591 /* Returns true if addresses of E1 and E2 differ by a constant, false
13592 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13594 bool
13595 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13597 tree core1, core2;
13598 HOST_WIDE_INT bitpos1, bitpos2;
13599 tree toffset1, toffset2, tdiff, type;
13601 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13602 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13604 if (bitpos1 % BITS_PER_UNIT != 0
13605 || bitpos2 % BITS_PER_UNIT != 0
13606 || !operand_equal_p (core1, core2, 0))
13607 return false;
13609 if (toffset1 && toffset2)
13611 type = TREE_TYPE (toffset1);
13612 if (type != TREE_TYPE (toffset2))
13613 toffset2 = fold_convert (type, toffset2);
13615 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13616 if (!cst_and_fits_in_hwi (tdiff))
13617 return false;
13619 *diff = int_cst_value (tdiff);
13621 else if (toffset1 || toffset2)
13623 /* If only one of the offsets is non-constant, the difference cannot
13624 be a constant. */
13625 return false;
13627 else
13628 *diff = 0;
13630 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13631 return true;
13634 /* Simplify the floating point expression EXP when the sign of the
13635 result is not significant. Return NULL_TREE if no simplification
13636 is possible. */
13638 tree
13639 fold_strip_sign_ops (tree exp)
13641 tree arg0, arg1;
13643 switch (TREE_CODE (exp))
13645 case ABS_EXPR:
13646 case NEGATE_EXPR:
13647 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13648 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13650 case MULT_EXPR:
13651 case RDIV_EXPR:
13652 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13653 return NULL_TREE;
13654 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13655 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13656 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13657 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13658 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13659 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13660 break;
13662 case COMPOUND_EXPR:
13663 arg0 = TREE_OPERAND (exp, 0);
13664 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13665 if (arg1)
13666 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13667 break;
13669 case COND_EXPR:
13670 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13671 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13672 if (arg0 || arg1)
13673 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13674 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13675 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13676 break;
13678 case CALL_EXPR:
13680 const enum built_in_function fcode = builtin_mathfn_code (exp);
13681 switch (fcode)
13683 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13684 /* Strip copysign function call, return the 1st argument. */
13685 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13686 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13687 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13689 default:
13690 /* Strip sign ops from the argument of "odd" math functions. */
13691 if (negate_mathfn_p (fcode))
13693 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13694 if (arg0)
13695 return build_function_call_expr (get_callee_fndecl (exp),
13696 build_tree_list (NULL_TREE,
13697 arg0));
13699 break;
13702 break;
13704 default:
13705 break;
13707 return NULL_TREE;