MAINTAINERS: Update my entry.
[official-gcc.git] / gcc / fold-const.c
blob0908e2840f2f0dbe69201ee34a6b62a16eb73f1c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "rtl.h"
58 #include "expr.h"
59 #include "tm_p.h"
60 #include "toplev.h"
61 #include "ggc.h"
62 #include "hashtab.h"
63 #include "langhooks.h"
64 #include "md5.h"
66 /* Non-zero if we are folding constants inside an initializer; zero
67 otherwise. */
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
74 COMPCODE_FALSE = 0,
75 COMPCODE_LT = 1,
76 COMPCODE_EQ = 2,
77 COMPCODE_LE = 3,
78 COMPCODE_GT = 4,
79 COMPCODE_LTGT = 5,
80 COMPCODE_GE = 6,
81 COMPCODE_ORD = 7,
82 COMPCODE_UNORD = 8,
83 COMPCODE_UNLT = 9,
84 COMPCODE_UNEQ = 10,
85 COMPCODE_UNLE = 11,
86 COMPCODE_UNGT = 12,
87 COMPCODE_NE = 13,
88 COMPCODE_UNGE = 14,
89 COMPCODE_TRUE = 15
92 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
93 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static tree combine_comparisons (enum tree_code, enum tree_code,
103 enum tree_code, tree, tree, tree);
104 static int truth_value_p (enum tree_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand (tree, tree, tree);
109 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (tree, tree, int, int, int);
111 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
114 tree *, tree *);
115 static int all_ones_mask_p (tree, int);
116 static tree sign_bit_p (tree, tree);
117 static int simple_operand_p (tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
132 static int multiple_of_p (tree, tree, tree);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static int native_encode_expr (tree, unsigned char *, int);
146 static tree native_interpret_expr (tree, unsigned char *, int);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
152 addition.
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 sign. */
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
160 We do that by representing the two-word integer in 4 words, with only
161 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
162 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 #define LOWPART(x) \
165 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
166 #define HIGHPART(x) \
167 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
168 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170 /* Unpack a two-word integer into 4 words.
171 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
172 WORDS points to the array of HOST_WIDE_INTs. */
174 static void
175 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 words[0] = LOWPART (low);
178 words[1] = HIGHPART (low);
179 words[2] = LOWPART (hi);
180 words[3] = HIGHPART (hi);
183 /* Pack an array of 4 words into a two-word integer.
184 WORDS points to the array of words.
185 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 static void
188 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 HOST_WIDE_INT *hi)
191 *low = words[0] + words[1] * BASE;
192 *hi = words[2] + words[3] * BASE;
195 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
196 in overflow of the value, when >0 we are only interested in signed
197 overflow, for <0 we are interested in any overflow. OVERFLOWED
198 indicates whether overflow has already occurred. CONST_OVERFLOWED
199 indicates whether constant overflow has already occurred. We force
200 T's value to be within range of T's type (by setting to 0 or 1 all
201 the bits outside the type's range). We set TREE_OVERFLOWED if,
202 OVERFLOWED is nonzero,
203 or OVERFLOWABLE is >0 and signed overflow occurs
204 or OVERFLOWABLE is <0 and any overflow occurs
205 We set TREE_CONSTANT_OVERFLOWED if,
206 CONST_OVERFLOWED is nonzero
207 or we set TREE_OVERFLOWED.
208 We return either the original T, or a copy. */
210 tree
211 force_fit_type (tree t, int overflowable,
212 bool overflowed, bool overflowed_const)
214 unsigned HOST_WIDE_INT low;
215 HOST_WIDE_INT high;
216 unsigned int prec;
217 int sign_extended_type;
219 gcc_assert (TREE_CODE (t) == INTEGER_CST);
221 low = TREE_INT_CST_LOW (t);
222 high = TREE_INT_CST_HIGH (t);
224 if (POINTER_TYPE_P (TREE_TYPE (t))
225 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
226 prec = POINTER_SIZE;
227 else
228 prec = TYPE_PRECISION (TREE_TYPE (t));
229 /* Size types *are* sign extended. */
230 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
231 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
232 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
234 /* First clear all bits that are beyond the type's precision. */
236 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
240 else
242 high = 0;
243 if (prec < HOST_BITS_PER_WIDE_INT)
244 low &= ~((HOST_WIDE_INT) (-1) << prec);
247 if (!sign_extended_type)
248 /* No sign extension */;
249 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
250 /* Correct width already. */;
251 else if (prec > HOST_BITS_PER_WIDE_INT)
253 /* Sign extend top half? */
254 if (high & ((unsigned HOST_WIDE_INT)1
255 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
256 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
258 else if (prec == HOST_BITS_PER_WIDE_INT)
260 if ((HOST_WIDE_INT)low < 0)
261 high = -1;
263 else
265 /* Sign extend bottom half? */
266 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
268 high = -1;
269 low |= (HOST_WIDE_INT)(-1) << prec;
273 /* If the value changed, return a new node. */
274 if (overflowed || overflowed_const
275 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
277 t = build_int_cst_wide (TREE_TYPE (t), low, high);
279 if (overflowed
280 || overflowable < 0
281 || (overflowable > 0 && sign_extended_type))
283 t = copy_node (t);
284 TREE_OVERFLOW (t) = 1;
285 TREE_CONSTANT_OVERFLOW (t) = 1;
287 else if (overflowed_const)
289 t = copy_node (t);
290 TREE_CONSTANT_OVERFLOW (t) = 1;
294 return t;
297 /* Add two doubleword integers with doubleword result.
298 Return nonzero if the operation overflows according to UNSIGNED_P.
299 Each argument is given as two `HOST_WIDE_INT' pieces.
300 One argument is L1 and H1; the other, L2 and H2.
301 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
304 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
305 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
306 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
307 bool unsigned_p)
309 unsigned HOST_WIDE_INT l;
310 HOST_WIDE_INT h;
312 l = l1 + l2;
313 h = h1 + h2 + (l < l1);
315 *lv = l;
316 *hv = h;
318 if (unsigned_p)
319 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
320 else
321 return OVERFLOW_SUM_SIGN (h1, h2, h);
324 /* Negate a doubleword integer with doubleword result.
325 Return nonzero if the operation overflows, assuming it's signed.
326 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
327 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
330 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
331 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
333 if (l1 == 0)
335 *lv = 0;
336 *hv = - h1;
337 return (*hv & h1) < 0;
339 else
341 *lv = -l1;
342 *hv = ~h1;
343 return 0;
347 /* Multiply two doubleword integers with doubleword result.
348 Return nonzero if the operation overflows according to UNSIGNED_P.
349 Each argument is given as two `HOST_WIDE_INT' pieces.
350 One argument is L1 and H1; the other, L2 and H2.
351 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
354 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
355 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
356 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
357 bool unsigned_p)
359 HOST_WIDE_INT arg1[4];
360 HOST_WIDE_INT arg2[4];
361 HOST_WIDE_INT prod[4 * 2];
362 unsigned HOST_WIDE_INT carry;
363 int i, j, k;
364 unsigned HOST_WIDE_INT toplow, neglow;
365 HOST_WIDE_INT tophigh, neghigh;
367 encode (arg1, l1, h1);
368 encode (arg2, l2, h2);
370 memset (prod, 0, sizeof prod);
372 for (i = 0; i < 4; i++)
374 carry = 0;
375 for (j = 0; j < 4; j++)
377 k = i + j;
378 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
379 carry += arg1[i] * arg2[j];
380 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
381 carry += prod[k];
382 prod[k] = LOWPART (carry);
383 carry = HIGHPART (carry);
385 prod[i + 4] = carry;
388 decode (prod, lv, hv);
389 decode (prod + 4, &toplow, &tophigh);
391 /* Unsigned overflow is immediate. */
392 if (unsigned_p)
393 return (toplow | tophigh) != 0;
395 /* Check for signed overflow by calculating the signed representation of the
396 top half of the result; it should agree with the low half's sign bit. */
397 if (h1 < 0)
399 neg_double (l2, h2, &neglow, &neghigh);
400 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
402 if (h2 < 0)
404 neg_double (l1, h1, &neglow, &neghigh);
405 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
407 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
410 /* Shift the doubleword integer in L1, H1 left by COUNT places
411 keeping only PREC bits of result.
412 Shift right if COUNT is negative.
413 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
414 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
416 void
417 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
418 HOST_WIDE_INT count, unsigned int prec,
419 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
421 unsigned HOST_WIDE_INT signmask;
423 if (count < 0)
425 rshift_double (l1, h1, -count, prec, lv, hv, arith);
426 return;
429 if (SHIFT_COUNT_TRUNCATED)
430 count %= prec;
432 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
434 /* Shifting by the host word size is undefined according to the
435 ANSI standard, so we must handle this as a special case. */
436 *hv = 0;
437 *lv = 0;
439 else if (count >= HOST_BITS_PER_WIDE_INT)
441 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
442 *lv = 0;
444 else
446 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
447 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
448 *lv = l1 << count;
451 /* Sign extend all bits that are beyond the precision. */
453 signmask = -((prec > HOST_BITS_PER_WIDE_INT
454 ? ((unsigned HOST_WIDE_INT) *hv
455 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
456 : (*lv >> (prec - 1))) & 1);
458 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
460 else if (prec >= HOST_BITS_PER_WIDE_INT)
462 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
463 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
465 else
467 *hv = signmask;
468 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
469 *lv |= signmask << prec;
473 /* Shift the doubleword integer in L1, H1 right by COUNT places
474 keeping only PREC bits of result. COUNT must be positive.
475 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
476 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
478 void
479 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
480 HOST_WIDE_INT count, unsigned int prec,
481 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
482 int arith)
484 unsigned HOST_WIDE_INT signmask;
486 signmask = (arith
487 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
488 : 0);
490 if (SHIFT_COUNT_TRUNCATED)
491 count %= prec;
493 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
495 /* Shifting by the host word size is undefined according to the
496 ANSI standard, so we must handle this as a special case. */
497 *hv = 0;
498 *lv = 0;
500 else if (count >= HOST_BITS_PER_WIDE_INT)
502 *hv = 0;
503 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
505 else
507 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
508 *lv = ((l1 >> count)
509 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
512 /* Zero / sign extend all bits that are beyond the precision. */
514 if (count >= (HOST_WIDE_INT)prec)
516 *hv = signmask;
517 *lv = signmask;
519 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
521 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
523 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
524 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
526 else
528 *hv = signmask;
529 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
530 *lv |= signmask << (prec - count);
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result.
536 Rotate right if COUNT is negative.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
539 void
540 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
541 HOST_WIDE_INT count, unsigned int prec,
542 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
544 unsigned HOST_WIDE_INT s1l, s2l;
545 HOST_WIDE_INT s1h, s2h;
547 count %= prec;
548 if (count < 0)
549 count += prec;
551 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
552 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 *lv = s1l | s2l;
554 *hv = s1h | s2h;
557 /* Rotate the doubleword integer in L1, H1 left by COUNT places
558 keeping only PREC bits of result. COUNT must be positive.
559 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
561 void
562 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
563 HOST_WIDE_INT count, unsigned int prec,
564 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
566 unsigned HOST_WIDE_INT s1l, s2l;
567 HOST_WIDE_INT s1h, s2h;
569 count %= prec;
570 if (count < 0)
571 count += prec;
573 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
574 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
575 *lv = s1l | s2l;
576 *hv = s1h | s2h;
579 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
580 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
581 CODE is a tree code for a kind of division, one of
582 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
583 or EXACT_DIV_EXPR
584 It controls how the quotient is rounded to an integer.
585 Return nonzero if the operation overflows.
586 UNS nonzero says do unsigned division. */
589 div_and_round_double (enum tree_code code, int uns,
590 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
591 HOST_WIDE_INT hnum_orig,
592 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
593 HOST_WIDE_INT hden_orig,
594 unsigned HOST_WIDE_INT *lquo,
595 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
596 HOST_WIDE_INT *hrem)
598 int quo_neg = 0;
599 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
600 HOST_WIDE_INT den[4], quo[4];
601 int i, j;
602 unsigned HOST_WIDE_INT work;
603 unsigned HOST_WIDE_INT carry = 0;
604 unsigned HOST_WIDE_INT lnum = lnum_orig;
605 HOST_WIDE_INT hnum = hnum_orig;
606 unsigned HOST_WIDE_INT lden = lden_orig;
607 HOST_WIDE_INT hden = hden_orig;
608 int overflow = 0;
610 if (hden == 0 && lden == 0)
611 overflow = 1, lden = 1;
613 /* Calculate quotient sign and convert operands to unsigned. */
614 if (!uns)
616 if (hnum < 0)
618 quo_neg = ~ quo_neg;
619 /* (minimum integer) / (-1) is the only overflow case. */
620 if (neg_double (lnum, hnum, &lnum, &hnum)
621 && ((HOST_WIDE_INT) lden & hden) == -1)
622 overflow = 1;
624 if (hden < 0)
626 quo_neg = ~ quo_neg;
627 neg_double (lden, hden, &lden, &hden);
631 if (hnum == 0 && hden == 0)
632 { /* single precision */
633 *hquo = *hrem = 0;
634 /* This unsigned division rounds toward zero. */
635 *lquo = lnum / lden;
636 goto finish_up;
639 if (hnum == 0)
640 { /* trivial case: dividend < divisor */
641 /* hden != 0 already checked. */
642 *hquo = *lquo = 0;
643 *hrem = hnum;
644 *lrem = lnum;
645 goto finish_up;
648 memset (quo, 0, sizeof quo);
650 memset (num, 0, sizeof num); /* to zero 9th element */
651 memset (den, 0, sizeof den);
653 encode (num, lnum, hnum);
654 encode (den, lden, hden);
656 /* Special code for when the divisor < BASE. */
657 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
659 /* hnum != 0 already checked. */
660 for (i = 4 - 1; i >= 0; i--)
662 work = num[i] + carry * BASE;
663 quo[i] = work / lden;
664 carry = work % lden;
667 else
669 /* Full double precision division,
670 with thanks to Don Knuth's "Seminumerical Algorithms". */
671 int num_hi_sig, den_hi_sig;
672 unsigned HOST_WIDE_INT quo_est, scale;
674 /* Find the highest nonzero divisor digit. */
675 for (i = 4 - 1;; i--)
676 if (den[i] != 0)
678 den_hi_sig = i;
679 break;
682 /* Insure that the first digit of the divisor is at least BASE/2.
683 This is required by the quotient digit estimation algorithm. */
685 scale = BASE / (den[den_hi_sig] + 1);
686 if (scale > 1)
687 { /* scale divisor and dividend */
688 carry = 0;
689 for (i = 0; i <= 4 - 1; i++)
691 work = (num[i] * scale) + carry;
692 num[i] = LOWPART (work);
693 carry = HIGHPART (work);
696 num[4] = carry;
697 carry = 0;
698 for (i = 0; i <= 4 - 1; i++)
700 work = (den[i] * scale) + carry;
701 den[i] = LOWPART (work);
702 carry = HIGHPART (work);
703 if (den[i] != 0) den_hi_sig = i;
707 num_hi_sig = 4;
709 /* Main loop */
710 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
712 /* Guess the next quotient digit, quo_est, by dividing the first
713 two remaining dividend digits by the high order quotient digit.
714 quo_est is never low and is at most 2 high. */
715 unsigned HOST_WIDE_INT tmp;
717 num_hi_sig = i + den_hi_sig + 1;
718 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
719 if (num[num_hi_sig] != den[den_hi_sig])
720 quo_est = work / den[den_hi_sig];
721 else
722 quo_est = BASE - 1;
724 /* Refine quo_est so it's usually correct, and at most one high. */
725 tmp = work - quo_est * den[den_hi_sig];
726 if (tmp < BASE
727 && (den[den_hi_sig - 1] * quo_est
728 > (tmp * BASE + num[num_hi_sig - 2])))
729 quo_est--;
731 /* Try QUO_EST as the quotient digit, by multiplying the
732 divisor by QUO_EST and subtracting from the remaining dividend.
733 Keep in mind that QUO_EST is the I - 1st digit. */
735 carry = 0;
736 for (j = 0; j <= den_hi_sig; j++)
738 work = quo_est * den[j] + carry;
739 carry = HIGHPART (work);
740 work = num[i + j] - LOWPART (work);
741 num[i + j] = LOWPART (work);
742 carry += HIGHPART (work) != 0;
745 /* If quo_est was high by one, then num[i] went negative and
746 we need to correct things. */
747 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
749 quo_est--;
750 carry = 0; /* add divisor back in */
751 for (j = 0; j <= den_hi_sig; j++)
753 work = num[i + j] + den[j] + carry;
754 carry = HIGHPART (work);
755 num[i + j] = LOWPART (work);
758 num [num_hi_sig] += carry;
761 /* Store the quotient digit. */
762 quo[i] = quo_est;
766 decode (quo, lquo, hquo);
768 finish_up:
769 /* If result is negative, make it so. */
770 if (quo_neg)
771 neg_double (*lquo, *hquo, lquo, hquo);
773 /* Compute trial remainder: rem = num - (quo * den) */
774 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
775 neg_double (*lrem, *hrem, lrem, hrem);
776 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
778 switch (code)
780 case TRUNC_DIV_EXPR:
781 case TRUNC_MOD_EXPR: /* round toward zero */
782 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
783 return overflow;
785 case FLOOR_DIV_EXPR:
786 case FLOOR_MOD_EXPR: /* round toward negative infinity */
787 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
789 /* quo = quo - 1; */
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
791 lquo, hquo);
793 else
794 return overflow;
795 break;
797 case CEIL_DIV_EXPR:
798 case CEIL_MOD_EXPR: /* round toward positive infinity */
799 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
801 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 lquo, hquo);
804 else
805 return overflow;
806 break;
808 case ROUND_DIV_EXPR:
809 case ROUND_MOD_EXPR: /* round to closest integer */
811 unsigned HOST_WIDE_INT labs_rem = *lrem;
812 HOST_WIDE_INT habs_rem = *hrem;
813 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
814 HOST_WIDE_INT habs_den = hden, htwice;
816 /* Get absolute values. */
817 if (*hrem < 0)
818 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
819 if (hden < 0)
820 neg_double (lden, hden, &labs_den, &habs_den);
822 /* If (2 * abs (lrem) >= abs (lden)) */
823 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
824 labs_rem, habs_rem, &ltwice, &htwice);
826 if (((unsigned HOST_WIDE_INT) habs_den
827 < (unsigned HOST_WIDE_INT) htwice)
828 || (((unsigned HOST_WIDE_INT) habs_den
829 == (unsigned HOST_WIDE_INT) htwice)
830 && (labs_den < ltwice)))
832 if (*hquo < 0)
833 /* quo = quo - 1; */
834 add_double (*lquo, *hquo,
835 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
836 else
837 /* quo = quo + 1; */
838 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
839 lquo, hquo);
841 else
842 return overflow;
844 break;
846 default:
847 gcc_unreachable ();
850 /* Compute true remainder: rem = num - (quo * den) */
851 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
852 neg_double (*lrem, *hrem, lrem, hrem);
853 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
854 return overflow;
857 /* If ARG2 divides ARG1 with zero remainder, carries out the division
858 of type CODE and returns the quotient.
859 Otherwise returns NULL_TREE. */
861 static tree
862 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
864 unsigned HOST_WIDE_INT int1l, int2l;
865 HOST_WIDE_INT int1h, int2h;
866 unsigned HOST_WIDE_INT quol, reml;
867 HOST_WIDE_INT quoh, remh;
868 tree type = TREE_TYPE (arg1);
869 int uns = TYPE_UNSIGNED (type);
871 int1l = TREE_INT_CST_LOW (arg1);
872 int1h = TREE_INT_CST_HIGH (arg1);
873 int2l = TREE_INT_CST_LOW (arg2);
874 int2h = TREE_INT_CST_HIGH (arg2);
876 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
877 &quol, &quoh, &reml, &remh);
878 if (remh != 0 || reml != 0)
879 return NULL_TREE;
881 return build_int_cst_wide (type, quol, quoh);
884 /* Return true if the built-in mathematical function specified by CODE
885 is odd, i.e. -f(x) == f(-x). */
887 static bool
888 negate_mathfn_p (enum built_in_function code)
890 switch (code)
892 CASE_FLT_FN (BUILT_IN_ASIN):
893 CASE_FLT_FN (BUILT_IN_ASINH):
894 CASE_FLT_FN (BUILT_IN_ATAN):
895 CASE_FLT_FN (BUILT_IN_ATANH):
896 CASE_FLT_FN (BUILT_IN_CBRT):
897 CASE_FLT_FN (BUILT_IN_ERF):
898 CASE_FLT_FN (BUILT_IN_LLROUND):
899 CASE_FLT_FN (BUILT_IN_LROUND):
900 CASE_FLT_FN (BUILT_IN_ROUND):
901 CASE_FLT_FN (BUILT_IN_SIN):
902 CASE_FLT_FN (BUILT_IN_SINH):
903 CASE_FLT_FN (BUILT_IN_TAN):
904 CASE_FLT_FN (BUILT_IN_TANH):
905 CASE_FLT_FN (BUILT_IN_TRUNC):
906 return true;
908 CASE_FLT_FN (BUILT_IN_LLRINT):
909 CASE_FLT_FN (BUILT_IN_LRINT):
910 CASE_FLT_FN (BUILT_IN_NEARBYINT):
911 CASE_FLT_FN (BUILT_IN_RINT):
912 return !flag_rounding_math;
914 default:
915 break;
917 return false;
920 /* Check whether we may negate an integer constant T without causing
921 overflow. */
923 bool
924 may_negate_without_overflow_p (tree t)
926 unsigned HOST_WIDE_INT val;
927 unsigned int prec;
928 tree type;
930 gcc_assert (TREE_CODE (t) == INTEGER_CST);
932 type = TREE_TYPE (t);
933 if (TYPE_UNSIGNED (type))
934 return false;
936 prec = TYPE_PRECISION (type);
937 if (prec > HOST_BITS_PER_WIDE_INT)
939 if (TREE_INT_CST_LOW (t) != 0)
940 return true;
941 prec -= HOST_BITS_PER_WIDE_INT;
942 val = TREE_INT_CST_HIGH (t);
944 else
945 val = TREE_INT_CST_LOW (t);
946 if (prec < HOST_BITS_PER_WIDE_INT)
947 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
948 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
951 /* Determine whether an expression T can be cheaply negated using
952 the function negate_expr without introducing undefined overflow. */
954 static bool
955 negate_expr_p (tree t)
957 tree type;
959 if (t == 0)
960 return false;
962 type = TREE_TYPE (t);
964 STRIP_SIGN_NOPS (t);
965 switch (TREE_CODE (t))
967 case INTEGER_CST:
968 if (TYPE_UNSIGNED (type)
969 || (flag_wrapv && ! flag_trapv))
970 return true;
972 /* Check that -CST will not overflow type. */
973 return may_negate_without_overflow_p (t);
974 case BIT_NOT_EXPR:
975 return INTEGRAL_TYPE_P (type)
976 && (TYPE_UNSIGNED (type)
977 || (flag_wrapv && !flag_trapv));
979 case REAL_CST:
980 case NEGATE_EXPR:
981 return true;
983 case COMPLEX_CST:
984 return negate_expr_p (TREE_REALPART (t))
985 && negate_expr_p (TREE_IMAGPART (t));
987 case PLUS_EXPR:
988 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
989 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
990 return false;
991 /* -(A + B) -> (-B) - A. */
992 if (negate_expr_p (TREE_OPERAND (t, 1))
993 && reorder_operands_p (TREE_OPERAND (t, 0),
994 TREE_OPERAND (t, 1)))
995 return true;
996 /* -(A + B) -> (-A) - B. */
997 return negate_expr_p (TREE_OPERAND (t, 0));
999 case MINUS_EXPR:
1000 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1001 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1002 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1003 && reorder_operands_p (TREE_OPERAND (t, 0),
1004 TREE_OPERAND (t, 1));
1006 case MULT_EXPR:
1007 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1008 break;
1010 /* Fall through. */
1012 case RDIV_EXPR:
1013 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1014 return negate_expr_p (TREE_OPERAND (t, 1))
1015 || negate_expr_p (TREE_OPERAND (t, 0));
1016 break;
1018 case TRUNC_DIV_EXPR:
1019 case ROUND_DIV_EXPR:
1020 case FLOOR_DIV_EXPR:
1021 case CEIL_DIV_EXPR:
1022 case EXACT_DIV_EXPR:
1023 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1024 break;
1025 return negate_expr_p (TREE_OPERAND (t, 1))
1026 || negate_expr_p (TREE_OPERAND (t, 0));
1028 case NOP_EXPR:
1029 /* Negate -((double)float) as (double)(-float). */
1030 if (TREE_CODE (type) == REAL_TYPE)
1032 tree tem = strip_float_extensions (t);
1033 if (tem != t)
1034 return negate_expr_p (tem);
1036 break;
1038 case CALL_EXPR:
1039 /* Negate -f(x) as f(-x). */
1040 if (negate_mathfn_p (builtin_mathfn_code (t)))
1041 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1042 break;
1044 case RSHIFT_EXPR:
1045 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1046 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1048 tree op1 = TREE_OPERAND (t, 1);
1049 if (TREE_INT_CST_HIGH (op1) == 0
1050 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1051 == TREE_INT_CST_LOW (op1))
1052 return true;
1054 break;
1056 default:
1057 break;
1059 return false;
1062 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1063 simplification is possible.
1064 If negate_expr_p would return true for T, NULL_TREE will never be
1065 returned. */
1067 static tree
1068 fold_negate_expr (tree t)
1070 tree type = TREE_TYPE (t);
1071 tree tem;
1073 switch (TREE_CODE (t))
1075 /* Convert - (~A) to A + 1. */
1076 case BIT_NOT_EXPR:
1077 if (INTEGRAL_TYPE_P (type))
1078 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1079 build_int_cst (type, 1));
1080 break;
1082 case INTEGER_CST:
1083 tem = fold_negate_const (t, type);
1084 if (! TREE_OVERFLOW (tem)
1085 || TYPE_UNSIGNED (type)
1086 || ! flag_trapv)
1087 return tem;
1088 break;
1090 case REAL_CST:
1091 tem = fold_negate_const (t, type);
1092 /* Two's complement FP formats, such as c4x, may overflow. */
1093 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1094 return tem;
1095 break;
1097 case COMPLEX_CST:
1099 tree rpart = negate_expr (TREE_REALPART (t));
1100 tree ipart = negate_expr (TREE_IMAGPART (t));
1102 if ((TREE_CODE (rpart) == REAL_CST
1103 && TREE_CODE (ipart) == REAL_CST)
1104 || (TREE_CODE (rpart) == INTEGER_CST
1105 && TREE_CODE (ipart) == INTEGER_CST))
1106 return build_complex (type, rpart, ipart);
1108 break;
1110 case NEGATE_EXPR:
1111 return TREE_OPERAND (t, 0);
1113 case PLUS_EXPR:
1114 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1115 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1117 /* -(A + B) -> (-B) - A. */
1118 if (negate_expr_p (TREE_OPERAND (t, 1))
1119 && reorder_operands_p (TREE_OPERAND (t, 0),
1120 TREE_OPERAND (t, 1)))
1122 tem = negate_expr (TREE_OPERAND (t, 1));
1123 return fold_build2 (MINUS_EXPR, type,
1124 tem, TREE_OPERAND (t, 0));
1127 /* -(A + B) -> (-A) - B. */
1128 if (negate_expr_p (TREE_OPERAND (t, 0)))
1130 tem = negate_expr (TREE_OPERAND (t, 0));
1131 return fold_build2 (MINUS_EXPR, type,
1132 tem, TREE_OPERAND (t, 1));
1135 break;
1137 case MINUS_EXPR:
1138 /* - (A - B) -> B - A */
1139 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1140 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1141 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1142 return fold_build2 (MINUS_EXPR, type,
1143 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1144 break;
1146 case MULT_EXPR:
1147 if (TYPE_UNSIGNED (type))
1148 break;
1150 /* Fall through. */
1152 case RDIV_EXPR:
1153 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1155 tem = TREE_OPERAND (t, 1);
1156 if (negate_expr_p (tem))
1157 return fold_build2 (TREE_CODE (t), type,
1158 TREE_OPERAND (t, 0), negate_expr (tem));
1159 tem = TREE_OPERAND (t, 0);
1160 if (negate_expr_p (tem))
1161 return fold_build2 (TREE_CODE (t), type,
1162 negate_expr (tem), TREE_OPERAND (t, 1));
1164 break;
1166 case TRUNC_DIV_EXPR:
1167 case ROUND_DIV_EXPR:
1168 case FLOOR_DIV_EXPR:
1169 case CEIL_DIV_EXPR:
1170 case EXACT_DIV_EXPR:
1171 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1173 tem = TREE_OPERAND (t, 1);
1174 if (negate_expr_p (tem))
1175 return fold_build2 (TREE_CODE (t), type,
1176 TREE_OPERAND (t, 0), negate_expr (tem));
1177 tem = TREE_OPERAND (t, 0);
1178 if (negate_expr_p (tem))
1179 return fold_build2 (TREE_CODE (t), type,
1180 negate_expr (tem), TREE_OPERAND (t, 1));
1182 break;
1184 case NOP_EXPR:
1185 /* Convert -((double)float) into (double)(-float). */
1186 if (TREE_CODE (type) == REAL_TYPE)
1188 tem = strip_float_extensions (t);
1189 if (tem != t && negate_expr_p (tem))
1190 return negate_expr (tem);
1192 break;
1194 case CALL_EXPR:
1195 /* Negate -f(x) as f(-x). */
1196 if (negate_mathfn_p (builtin_mathfn_code (t))
1197 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1199 tree fndecl, arg, arglist;
1201 fndecl = get_callee_fndecl (t);
1202 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1203 arglist = build_tree_list (NULL_TREE, arg);
1204 return build_function_call_expr (fndecl, arglist);
1206 break;
1208 case RSHIFT_EXPR:
1209 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1210 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1212 tree op1 = TREE_OPERAND (t, 1);
1213 if (TREE_INT_CST_HIGH (op1) == 0
1214 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1215 == TREE_INT_CST_LOW (op1))
1217 tree ntype = TYPE_UNSIGNED (type)
1218 ? lang_hooks.types.signed_type (type)
1219 : lang_hooks.types.unsigned_type (type);
1220 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1221 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1222 return fold_convert (type, temp);
1225 break;
1227 default:
1228 break;
1231 return NULL_TREE;
1234 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1235 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1236 return NULL_TREE. */
1238 static tree
1239 negate_expr (tree t)
1241 tree type, tem;
1243 if (t == NULL_TREE)
1244 return NULL_TREE;
1246 type = TREE_TYPE (t);
1247 STRIP_SIGN_NOPS (t);
1249 tem = fold_negate_expr (t);
1250 if (!tem)
1251 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1252 return fold_convert (type, tem);
1255 /* Split a tree IN into a constant, literal and variable parts that could be
1256 combined with CODE to make IN. "constant" means an expression with
1257 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1258 commutative arithmetic operation. Store the constant part into *CONP,
1259 the literal in *LITP and return the variable part. If a part isn't
1260 present, set it to null. If the tree does not decompose in this way,
1261 return the entire tree as the variable part and the other parts as null.
1263 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1264 case, we negate an operand that was subtracted. Except if it is a
1265 literal for which we use *MINUS_LITP instead.
1267 If NEGATE_P is true, we are negating all of IN, again except a literal
1268 for which we use *MINUS_LITP instead.
1270 If IN is itself a literal or constant, return it as appropriate.
1272 Note that we do not guarantee that any of the three values will be the
1273 same type as IN, but they will have the same signedness and mode. */
1275 static tree
1276 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1277 tree *minus_litp, int negate_p)
1279 tree var = 0;
1281 *conp = 0;
1282 *litp = 0;
1283 *minus_litp = 0;
1285 /* Strip any conversions that don't change the machine mode or signedness. */
1286 STRIP_SIGN_NOPS (in);
1288 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1289 *litp = in;
1290 else if (TREE_CODE (in) == code
1291 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1292 /* We can associate addition and subtraction together (even
1293 though the C standard doesn't say so) for integers because
1294 the value is not affected. For reals, the value might be
1295 affected, so we can't. */
1296 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1297 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1299 tree op0 = TREE_OPERAND (in, 0);
1300 tree op1 = TREE_OPERAND (in, 1);
1301 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1302 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1304 /* First see if either of the operands is a literal, then a constant. */
1305 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1306 *litp = op0, op0 = 0;
1307 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1308 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1310 if (op0 != 0 && TREE_CONSTANT (op0))
1311 *conp = op0, op0 = 0;
1312 else if (op1 != 0 && TREE_CONSTANT (op1))
1313 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1315 /* If we haven't dealt with either operand, this is not a case we can
1316 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1317 if (op0 != 0 && op1 != 0)
1318 var = in;
1319 else if (op0 != 0)
1320 var = op0;
1321 else
1322 var = op1, neg_var_p = neg1_p;
1324 /* Now do any needed negations. */
1325 if (neg_litp_p)
1326 *minus_litp = *litp, *litp = 0;
1327 if (neg_conp_p)
1328 *conp = negate_expr (*conp);
1329 if (neg_var_p)
1330 var = negate_expr (var);
1332 else if (TREE_CONSTANT (in))
1333 *conp = in;
1334 else
1335 var = in;
1337 if (negate_p)
1339 if (*litp)
1340 *minus_litp = *litp, *litp = 0;
1341 else if (*minus_litp)
1342 *litp = *minus_litp, *minus_litp = 0;
1343 *conp = negate_expr (*conp);
1344 var = negate_expr (var);
1347 return var;
1350 /* Re-associate trees split by the above function. T1 and T2 are either
1351 expressions to associate or null. Return the new expression, if any. If
1352 we build an operation, do it in TYPE and with CODE. */
1354 static tree
1355 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1357 if (t1 == 0)
1358 return t2;
1359 else if (t2 == 0)
1360 return t1;
1362 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1363 try to fold this since we will have infinite recursion. But do
1364 deal with any NEGATE_EXPRs. */
1365 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1366 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1368 if (code == PLUS_EXPR)
1370 if (TREE_CODE (t1) == NEGATE_EXPR)
1371 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1372 fold_convert (type, TREE_OPERAND (t1, 0)));
1373 else if (TREE_CODE (t2) == NEGATE_EXPR)
1374 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1375 fold_convert (type, TREE_OPERAND (t2, 0)));
1376 else if (integer_zerop (t2))
1377 return fold_convert (type, t1);
1379 else if (code == MINUS_EXPR)
1381 if (integer_zerop (t2))
1382 return fold_convert (type, t1);
1385 return build2 (code, type, fold_convert (type, t1),
1386 fold_convert (type, t2));
1389 return fold_build2 (code, type, fold_convert (type, t1),
1390 fold_convert (type, t2));
1393 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1394 for use in int_const_binop, size_binop and size_diffop. */
1396 static bool
1397 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1399 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1400 return false;
1401 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1402 return false;
1404 switch (code)
1406 case LSHIFT_EXPR:
1407 case RSHIFT_EXPR:
1408 case LROTATE_EXPR:
1409 case RROTATE_EXPR:
1410 return true;
1412 default:
1413 break;
1416 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1417 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1418 && TYPE_MODE (type1) == TYPE_MODE (type2);
1422 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1423 to produce a new constant. Return NULL_TREE if we don't know how
1424 to evaluate CODE at compile-time.
1426 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1428 tree
1429 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1431 unsigned HOST_WIDE_INT int1l, int2l;
1432 HOST_WIDE_INT int1h, int2h;
1433 unsigned HOST_WIDE_INT low;
1434 HOST_WIDE_INT hi;
1435 unsigned HOST_WIDE_INT garbagel;
1436 HOST_WIDE_INT garbageh;
1437 tree t;
1438 tree type = TREE_TYPE (arg1);
1439 int uns = TYPE_UNSIGNED (type);
1440 int is_sizetype
1441 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1442 int overflow = 0;
1444 int1l = TREE_INT_CST_LOW (arg1);
1445 int1h = TREE_INT_CST_HIGH (arg1);
1446 int2l = TREE_INT_CST_LOW (arg2);
1447 int2h = TREE_INT_CST_HIGH (arg2);
1449 switch (code)
1451 case BIT_IOR_EXPR:
1452 low = int1l | int2l, hi = int1h | int2h;
1453 break;
1455 case BIT_XOR_EXPR:
1456 low = int1l ^ int2l, hi = int1h ^ int2h;
1457 break;
1459 case BIT_AND_EXPR:
1460 low = int1l & int2l, hi = int1h & int2h;
1461 break;
1463 case RSHIFT_EXPR:
1464 int2l = -int2l;
1465 case LSHIFT_EXPR:
1466 /* It's unclear from the C standard whether shifts can overflow.
1467 The following code ignores overflow; perhaps a C standard
1468 interpretation ruling is needed. */
1469 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1470 &low, &hi, !uns);
1471 break;
1473 case RROTATE_EXPR:
1474 int2l = - int2l;
1475 case LROTATE_EXPR:
1476 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1477 &low, &hi);
1478 break;
1480 case PLUS_EXPR:
1481 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1482 break;
1484 case MINUS_EXPR:
1485 neg_double (int2l, int2h, &low, &hi);
1486 add_double (int1l, int1h, low, hi, &low, &hi);
1487 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1488 break;
1490 case MULT_EXPR:
1491 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1492 break;
1494 case TRUNC_DIV_EXPR:
1495 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1496 case EXACT_DIV_EXPR:
1497 /* This is a shortcut for a common special case. */
1498 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1499 && ! TREE_CONSTANT_OVERFLOW (arg1)
1500 && ! TREE_CONSTANT_OVERFLOW (arg2)
1501 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1503 if (code == CEIL_DIV_EXPR)
1504 int1l += int2l - 1;
1506 low = int1l / int2l, hi = 0;
1507 break;
1510 /* ... fall through ... */
1512 case ROUND_DIV_EXPR:
1513 if (int2h == 0 && int2l == 0)
1514 return NULL_TREE;
1515 if (int2h == 0 && int2l == 1)
1517 low = int1l, hi = int1h;
1518 break;
1520 if (int1l == int2l && int1h == int2h
1521 && ! (int1l == 0 && int1h == 0))
1523 low = 1, hi = 0;
1524 break;
1526 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1527 &low, &hi, &garbagel, &garbageh);
1528 break;
1530 case TRUNC_MOD_EXPR:
1531 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1532 /* This is a shortcut for a common special case. */
1533 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1534 && ! TREE_CONSTANT_OVERFLOW (arg1)
1535 && ! TREE_CONSTANT_OVERFLOW (arg2)
1536 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1538 if (code == CEIL_MOD_EXPR)
1539 int1l += int2l - 1;
1540 low = int1l % int2l, hi = 0;
1541 break;
1544 /* ... fall through ... */
1546 case ROUND_MOD_EXPR:
1547 if (int2h == 0 && int2l == 0)
1548 return NULL_TREE;
1549 overflow = div_and_round_double (code, uns,
1550 int1l, int1h, int2l, int2h,
1551 &garbagel, &garbageh, &low, &hi);
1552 break;
1554 case MIN_EXPR:
1555 case MAX_EXPR:
1556 if (uns)
1557 low = (((unsigned HOST_WIDE_INT) int1h
1558 < (unsigned HOST_WIDE_INT) int2h)
1559 || (((unsigned HOST_WIDE_INT) int1h
1560 == (unsigned HOST_WIDE_INT) int2h)
1561 && int1l < int2l));
1562 else
1563 low = (int1h < int2h
1564 || (int1h == int2h && int1l < int2l));
1566 if (low == (code == MIN_EXPR))
1567 low = int1l, hi = int1h;
1568 else
1569 low = int2l, hi = int2h;
1570 break;
1572 default:
1573 return NULL_TREE;
1576 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1578 if (notrunc)
1580 /* Propagate overflow flags ourselves. */
1581 if (((!uns || is_sizetype) && overflow)
1582 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1584 t = copy_node (t);
1585 TREE_OVERFLOW (t) = 1;
1586 TREE_CONSTANT_OVERFLOW (t) = 1;
1588 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1590 t = copy_node (t);
1591 TREE_CONSTANT_OVERFLOW (t) = 1;
1594 else
1595 t = force_fit_type (t, 1,
1596 ((!uns || is_sizetype) && overflow)
1597 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1598 TREE_CONSTANT_OVERFLOW (arg1)
1599 | TREE_CONSTANT_OVERFLOW (arg2));
1601 return t;
1604 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1605 constant. We assume ARG1 and ARG2 have the same data type, or at least
1606 are the same kind of constant and the same machine mode. Return zero if
1607 combining the constants is not allowed in the current operating mode.
1609 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1611 static tree
1612 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1614 /* Sanity check for the recursive cases. */
1615 if (!arg1 || !arg2)
1616 return NULL_TREE;
1618 STRIP_NOPS (arg1);
1619 STRIP_NOPS (arg2);
1621 if (TREE_CODE (arg1) == INTEGER_CST)
1622 return int_const_binop (code, arg1, arg2, notrunc);
1624 if (TREE_CODE (arg1) == REAL_CST)
1626 enum machine_mode mode;
1627 REAL_VALUE_TYPE d1;
1628 REAL_VALUE_TYPE d2;
1629 REAL_VALUE_TYPE value;
1630 REAL_VALUE_TYPE result;
1631 bool inexact;
1632 tree t, type;
1634 /* The following codes are handled by real_arithmetic. */
1635 switch (code)
1637 case PLUS_EXPR:
1638 case MINUS_EXPR:
1639 case MULT_EXPR:
1640 case RDIV_EXPR:
1641 case MIN_EXPR:
1642 case MAX_EXPR:
1643 break;
1645 default:
1646 return NULL_TREE;
1649 d1 = TREE_REAL_CST (arg1);
1650 d2 = TREE_REAL_CST (arg2);
1652 type = TREE_TYPE (arg1);
1653 mode = TYPE_MODE (type);
1655 /* Don't perform operation if we honor signaling NaNs and
1656 either operand is a NaN. */
1657 if (HONOR_SNANS (mode)
1658 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1659 return NULL_TREE;
1661 /* Don't perform operation if it would raise a division
1662 by zero exception. */
1663 if (code == RDIV_EXPR
1664 && REAL_VALUES_EQUAL (d2, dconst0)
1665 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1666 return NULL_TREE;
1668 /* If either operand is a NaN, just return it. Otherwise, set up
1669 for floating-point trap; we return an overflow. */
1670 if (REAL_VALUE_ISNAN (d1))
1671 return arg1;
1672 else if (REAL_VALUE_ISNAN (d2))
1673 return arg2;
1675 inexact = real_arithmetic (&value, code, &d1, &d2);
1676 real_convert (&result, mode, &value);
1678 /* Don't constant fold this floating point operation if
1679 the result has overflowed and flag_trapping_math. */
1680 if (flag_trapping_math
1681 && MODE_HAS_INFINITIES (mode)
1682 && REAL_VALUE_ISINF (result)
1683 && !REAL_VALUE_ISINF (d1)
1684 && !REAL_VALUE_ISINF (d2))
1685 return NULL_TREE;
1687 /* Don't constant fold this floating point operation if the
1688 result may dependent upon the run-time rounding mode and
1689 flag_rounding_math is set, or if GCC's software emulation
1690 is unable to accurately represent the result. */
1691 if ((flag_rounding_math
1692 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1693 && !flag_unsafe_math_optimizations))
1694 && (inexact || !real_identical (&result, &value)))
1695 return NULL_TREE;
1697 t = build_real (type, result);
1699 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1700 TREE_CONSTANT_OVERFLOW (t)
1701 = TREE_OVERFLOW (t)
1702 | TREE_CONSTANT_OVERFLOW (arg1)
1703 | TREE_CONSTANT_OVERFLOW (arg2);
1704 return t;
1707 if (TREE_CODE (arg1) == COMPLEX_CST)
1709 tree type = TREE_TYPE (arg1);
1710 tree r1 = TREE_REALPART (arg1);
1711 tree i1 = TREE_IMAGPART (arg1);
1712 tree r2 = TREE_REALPART (arg2);
1713 tree i2 = TREE_IMAGPART (arg2);
1714 tree real, imag;
1716 switch (code)
1718 case PLUS_EXPR:
1719 case MINUS_EXPR:
1720 real = const_binop (code, r1, r2, notrunc);
1721 imag = const_binop (code, i1, i2, notrunc);
1722 break;
1724 case MULT_EXPR:
1725 real = const_binop (MINUS_EXPR,
1726 const_binop (MULT_EXPR, r1, r2, notrunc),
1727 const_binop (MULT_EXPR, i1, i2, notrunc),
1728 notrunc);
1729 imag = const_binop (PLUS_EXPR,
1730 const_binop (MULT_EXPR, r1, i2, notrunc),
1731 const_binop (MULT_EXPR, i1, r2, notrunc),
1732 notrunc);
1733 break;
1735 case RDIV_EXPR:
1737 tree magsquared
1738 = const_binop (PLUS_EXPR,
1739 const_binop (MULT_EXPR, r2, r2, notrunc),
1740 const_binop (MULT_EXPR, i2, i2, notrunc),
1741 notrunc);
1742 tree t1
1743 = const_binop (PLUS_EXPR,
1744 const_binop (MULT_EXPR, r1, r2, notrunc),
1745 const_binop (MULT_EXPR, i1, i2, notrunc),
1746 notrunc);
1747 tree t2
1748 = const_binop (MINUS_EXPR,
1749 const_binop (MULT_EXPR, i1, r2, notrunc),
1750 const_binop (MULT_EXPR, r1, i2, notrunc),
1751 notrunc);
1753 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1754 code = TRUNC_DIV_EXPR;
1756 real = const_binop (code, t1, magsquared, notrunc);
1757 imag = const_binop (code, t2, magsquared, notrunc);
1759 break;
1761 default:
1762 return NULL_TREE;
1765 if (real && imag)
1766 return build_complex (type, real, imag);
1769 return NULL_TREE;
1772 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1773 indicates which particular sizetype to create. */
1775 tree
1776 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1778 return build_int_cst (sizetype_tab[(int) kind], number);
1781 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1782 is a tree code. The type of the result is taken from the operands.
1783 Both must be equivalent integer types, ala int_binop_types_match_p.
1784 If the operands are constant, so is the result. */
1786 tree
1787 size_binop (enum tree_code code, tree arg0, tree arg1)
1789 tree type = TREE_TYPE (arg0);
1791 if (arg0 == error_mark_node || arg1 == error_mark_node)
1792 return error_mark_node;
1794 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1795 TREE_TYPE (arg1)));
1797 /* Handle the special case of two integer constants faster. */
1798 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1800 /* And some specific cases even faster than that. */
1801 if (code == PLUS_EXPR && integer_zerop (arg0))
1802 return arg1;
1803 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1804 && integer_zerop (arg1))
1805 return arg0;
1806 else if (code == MULT_EXPR && integer_onep (arg0))
1807 return arg1;
1809 /* Handle general case of two integer constants. */
1810 return int_const_binop (code, arg0, arg1, 0);
1813 return fold_build2 (code, type, arg0, arg1);
1816 /* Given two values, either both of sizetype or both of bitsizetype,
1817 compute the difference between the two values. Return the value
1818 in signed type corresponding to the type of the operands. */
1820 tree
1821 size_diffop (tree arg0, tree arg1)
1823 tree type = TREE_TYPE (arg0);
1824 tree ctype;
1826 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1827 TREE_TYPE (arg1)));
1829 /* If the type is already signed, just do the simple thing. */
1830 if (!TYPE_UNSIGNED (type))
1831 return size_binop (MINUS_EXPR, arg0, arg1);
1833 if (type == sizetype)
1834 ctype = ssizetype;
1835 else if (type == bitsizetype)
1836 ctype = sbitsizetype;
1837 else
1838 ctype = lang_hooks.types.signed_type (type);
1840 /* If either operand is not a constant, do the conversions to the signed
1841 type and subtract. The hardware will do the right thing with any
1842 overflow in the subtraction. */
1843 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1844 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1845 fold_convert (ctype, arg1));
1847 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1848 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1849 overflow) and negate (which can't either). Special-case a result
1850 of zero while we're here. */
1851 if (tree_int_cst_equal (arg0, arg1))
1852 return build_int_cst (ctype, 0);
1853 else if (tree_int_cst_lt (arg1, arg0))
1854 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1855 else
1856 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1857 fold_convert (ctype, size_binop (MINUS_EXPR,
1858 arg1, arg0)));
1861 /* A subroutine of fold_convert_const handling conversions of an
1862 INTEGER_CST to another integer type. */
1864 static tree
1865 fold_convert_const_int_from_int (tree type, tree arg1)
1867 tree t;
1869 /* Given an integer constant, make new constant with new type,
1870 appropriately sign-extended or truncated. */
1871 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1872 TREE_INT_CST_HIGH (arg1));
1874 t = force_fit_type (t,
1875 /* Don't set the overflow when
1876 converting a pointer */
1877 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1878 (TREE_INT_CST_HIGH (arg1) < 0
1879 && (TYPE_UNSIGNED (type)
1880 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1881 | TREE_OVERFLOW (arg1),
1882 TREE_CONSTANT_OVERFLOW (arg1));
1884 return t;
1887 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1888 to an integer type. */
1890 static tree
1891 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1893 int overflow = 0;
1894 tree t;
1896 /* The following code implements the floating point to integer
1897 conversion rules required by the Java Language Specification,
1898 that IEEE NaNs are mapped to zero and values that overflow
1899 the target precision saturate, i.e. values greater than
1900 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1901 are mapped to INT_MIN. These semantics are allowed by the
1902 C and C++ standards that simply state that the behavior of
1903 FP-to-integer conversion is unspecified upon overflow. */
1905 HOST_WIDE_INT high, low;
1906 REAL_VALUE_TYPE r;
1907 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1909 switch (code)
1911 case FIX_TRUNC_EXPR:
1912 real_trunc (&r, VOIDmode, &x);
1913 break;
1915 default:
1916 gcc_unreachable ();
1919 /* If R is NaN, return zero and show we have an overflow. */
1920 if (REAL_VALUE_ISNAN (r))
1922 overflow = 1;
1923 high = 0;
1924 low = 0;
1927 /* See if R is less than the lower bound or greater than the
1928 upper bound. */
1930 if (! overflow)
1932 tree lt = TYPE_MIN_VALUE (type);
1933 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1934 if (REAL_VALUES_LESS (r, l))
1936 overflow = 1;
1937 high = TREE_INT_CST_HIGH (lt);
1938 low = TREE_INT_CST_LOW (lt);
1942 if (! overflow)
1944 tree ut = TYPE_MAX_VALUE (type);
1945 if (ut)
1947 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1948 if (REAL_VALUES_LESS (u, r))
1950 overflow = 1;
1951 high = TREE_INT_CST_HIGH (ut);
1952 low = TREE_INT_CST_LOW (ut);
1957 if (! overflow)
1958 REAL_VALUE_TO_INT (&low, &high, r);
1960 t = build_int_cst_wide (type, low, high);
1962 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1963 TREE_CONSTANT_OVERFLOW (arg1));
1964 return t;
1967 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1968 to another floating point type. */
1970 static tree
1971 fold_convert_const_real_from_real (tree type, tree arg1)
1973 REAL_VALUE_TYPE value;
1974 tree t;
1976 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1977 t = build_real (type, value);
1979 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1980 TREE_CONSTANT_OVERFLOW (t)
1981 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1982 return t;
1985 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1986 type TYPE. If no simplification can be done return NULL_TREE. */
1988 static tree
1989 fold_convert_const (enum tree_code code, tree type, tree arg1)
1991 if (TREE_TYPE (arg1) == type)
1992 return arg1;
1994 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1996 if (TREE_CODE (arg1) == INTEGER_CST)
1997 return fold_convert_const_int_from_int (type, arg1);
1998 else if (TREE_CODE (arg1) == REAL_CST)
1999 return fold_convert_const_int_from_real (code, type, arg1);
2001 else if (TREE_CODE (type) == REAL_TYPE)
2003 if (TREE_CODE (arg1) == INTEGER_CST)
2004 return build_real_from_int_cst (type, arg1);
2005 if (TREE_CODE (arg1) == REAL_CST)
2006 return fold_convert_const_real_from_real (type, arg1);
2008 return NULL_TREE;
2011 /* Construct a vector of zero elements of vector type TYPE. */
2013 static tree
2014 build_zero_vector (tree type)
2016 tree elem, list;
2017 int i, units;
2019 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2020 units = TYPE_VECTOR_SUBPARTS (type);
2022 list = NULL_TREE;
2023 for (i = 0; i < units; i++)
2024 list = tree_cons (NULL_TREE, elem, list);
2025 return build_vector (type, list);
2028 /* Convert expression ARG to type TYPE. Used by the middle-end for
2029 simple conversions in preference to calling the front-end's convert. */
2031 tree
2032 fold_convert (tree type, tree arg)
2034 tree orig = TREE_TYPE (arg);
2035 tree tem;
2037 if (type == orig)
2038 return arg;
2040 if (TREE_CODE (arg) == ERROR_MARK
2041 || TREE_CODE (type) == ERROR_MARK
2042 || TREE_CODE (orig) == ERROR_MARK)
2043 return error_mark_node;
2045 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2046 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2047 TYPE_MAIN_VARIANT (orig)))
2048 return fold_build1 (NOP_EXPR, type, arg);
2050 switch (TREE_CODE (type))
2052 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2053 case POINTER_TYPE: case REFERENCE_TYPE:
2054 case OFFSET_TYPE:
2055 if (TREE_CODE (arg) == INTEGER_CST)
2057 tem = fold_convert_const (NOP_EXPR, type, arg);
2058 if (tem != NULL_TREE)
2059 return tem;
2061 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2062 || TREE_CODE (orig) == OFFSET_TYPE)
2063 return fold_build1 (NOP_EXPR, type, arg);
2064 if (TREE_CODE (orig) == COMPLEX_TYPE)
2066 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2067 return fold_convert (type, tem);
2069 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2070 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2071 return fold_build1 (NOP_EXPR, type, arg);
2073 case REAL_TYPE:
2074 if (TREE_CODE (arg) == INTEGER_CST)
2076 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2077 if (tem != NULL_TREE)
2078 return tem;
2080 else if (TREE_CODE (arg) == REAL_CST)
2082 tem = fold_convert_const (NOP_EXPR, type, arg);
2083 if (tem != NULL_TREE)
2084 return tem;
2087 switch (TREE_CODE (orig))
2089 case INTEGER_TYPE:
2090 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2091 case POINTER_TYPE: case REFERENCE_TYPE:
2092 return fold_build1 (FLOAT_EXPR, type, arg);
2094 case REAL_TYPE:
2095 return fold_build1 (NOP_EXPR, type, arg);
2097 case COMPLEX_TYPE:
2098 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2099 return fold_convert (type, tem);
2101 default:
2102 gcc_unreachable ();
2105 case COMPLEX_TYPE:
2106 switch (TREE_CODE (orig))
2108 case INTEGER_TYPE:
2109 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2110 case POINTER_TYPE: case REFERENCE_TYPE:
2111 case REAL_TYPE:
2112 return build2 (COMPLEX_EXPR, type,
2113 fold_convert (TREE_TYPE (type), arg),
2114 fold_convert (TREE_TYPE (type), integer_zero_node));
2115 case COMPLEX_TYPE:
2117 tree rpart, ipart;
2119 if (TREE_CODE (arg) == COMPLEX_EXPR)
2121 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2122 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2123 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2126 arg = save_expr (arg);
2127 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2128 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2129 rpart = fold_convert (TREE_TYPE (type), rpart);
2130 ipart = fold_convert (TREE_TYPE (type), ipart);
2131 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2134 default:
2135 gcc_unreachable ();
2138 case VECTOR_TYPE:
2139 if (integer_zerop (arg))
2140 return build_zero_vector (type);
2141 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2142 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2143 || TREE_CODE (orig) == VECTOR_TYPE);
2144 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2146 case VOID_TYPE:
2147 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2149 default:
2150 gcc_unreachable ();
2154 /* Return false if expr can be assumed not to be an lvalue, true
2155 otherwise. */
2157 static bool
2158 maybe_lvalue_p (tree x)
2160 /* We only need to wrap lvalue tree codes. */
2161 switch (TREE_CODE (x))
2163 case VAR_DECL:
2164 case PARM_DECL:
2165 case RESULT_DECL:
2166 case LABEL_DECL:
2167 case FUNCTION_DECL:
2168 case SSA_NAME:
2170 case COMPONENT_REF:
2171 case INDIRECT_REF:
2172 case ALIGN_INDIRECT_REF:
2173 case MISALIGNED_INDIRECT_REF:
2174 case ARRAY_REF:
2175 case ARRAY_RANGE_REF:
2176 case BIT_FIELD_REF:
2177 case OBJ_TYPE_REF:
2179 case REALPART_EXPR:
2180 case IMAGPART_EXPR:
2181 case PREINCREMENT_EXPR:
2182 case PREDECREMENT_EXPR:
2183 case SAVE_EXPR:
2184 case TRY_CATCH_EXPR:
2185 case WITH_CLEANUP_EXPR:
2186 case COMPOUND_EXPR:
2187 case MODIFY_EXPR:
2188 case GIMPLE_MODIFY_STMT:
2189 case TARGET_EXPR:
2190 case COND_EXPR:
2191 case BIND_EXPR:
2192 case MIN_EXPR:
2193 case MAX_EXPR:
2194 break;
2196 default:
2197 /* Assume the worst for front-end tree codes. */
2198 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2199 break;
2200 return false;
2203 return true;
2206 /* Return an expr equal to X but certainly not valid as an lvalue. */
2208 tree
2209 non_lvalue (tree x)
2211 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2212 us. */
2213 if (in_gimple_form)
2214 return x;
2216 if (! maybe_lvalue_p (x))
2217 return x;
2218 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2221 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2222 Zero means allow extended lvalues. */
2224 int pedantic_lvalues;
2226 /* When pedantic, return an expr equal to X but certainly not valid as a
2227 pedantic lvalue. Otherwise, return X. */
2229 static tree
2230 pedantic_non_lvalue (tree x)
2232 if (pedantic_lvalues)
2233 return non_lvalue (x);
2234 else
2235 return x;
2238 /* Given a tree comparison code, return the code that is the logical inverse
2239 of the given code. It is not safe to do this for floating-point
2240 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2241 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2243 enum tree_code
2244 invert_tree_comparison (enum tree_code code, bool honor_nans)
2246 if (honor_nans && flag_trapping_math)
2247 return ERROR_MARK;
2249 switch (code)
2251 case EQ_EXPR:
2252 return NE_EXPR;
2253 case NE_EXPR:
2254 return EQ_EXPR;
2255 case GT_EXPR:
2256 return honor_nans ? UNLE_EXPR : LE_EXPR;
2257 case GE_EXPR:
2258 return honor_nans ? UNLT_EXPR : LT_EXPR;
2259 case LT_EXPR:
2260 return honor_nans ? UNGE_EXPR : GE_EXPR;
2261 case LE_EXPR:
2262 return honor_nans ? UNGT_EXPR : GT_EXPR;
2263 case LTGT_EXPR:
2264 return UNEQ_EXPR;
2265 case UNEQ_EXPR:
2266 return LTGT_EXPR;
2267 case UNGT_EXPR:
2268 return LE_EXPR;
2269 case UNGE_EXPR:
2270 return LT_EXPR;
2271 case UNLT_EXPR:
2272 return GE_EXPR;
2273 case UNLE_EXPR:
2274 return GT_EXPR;
2275 case ORDERED_EXPR:
2276 return UNORDERED_EXPR;
2277 case UNORDERED_EXPR:
2278 return ORDERED_EXPR;
2279 default:
2280 gcc_unreachable ();
2284 /* Similar, but return the comparison that results if the operands are
2285 swapped. This is safe for floating-point. */
2287 enum tree_code
2288 swap_tree_comparison (enum tree_code code)
2290 switch (code)
2292 case EQ_EXPR:
2293 case NE_EXPR:
2294 case ORDERED_EXPR:
2295 case UNORDERED_EXPR:
2296 case LTGT_EXPR:
2297 case UNEQ_EXPR:
2298 return code;
2299 case GT_EXPR:
2300 return LT_EXPR;
2301 case GE_EXPR:
2302 return LE_EXPR;
2303 case LT_EXPR:
2304 return GT_EXPR;
2305 case LE_EXPR:
2306 return GE_EXPR;
2307 case UNGT_EXPR:
2308 return UNLT_EXPR;
2309 case UNGE_EXPR:
2310 return UNLE_EXPR;
2311 case UNLT_EXPR:
2312 return UNGT_EXPR;
2313 case UNLE_EXPR:
2314 return UNGE_EXPR;
2315 default:
2316 gcc_unreachable ();
2321 /* Convert a comparison tree code from an enum tree_code representation
2322 into a compcode bit-based encoding. This function is the inverse of
2323 compcode_to_comparison. */
2325 static enum comparison_code
2326 comparison_to_compcode (enum tree_code code)
2328 switch (code)
2330 case LT_EXPR:
2331 return COMPCODE_LT;
2332 case EQ_EXPR:
2333 return COMPCODE_EQ;
2334 case LE_EXPR:
2335 return COMPCODE_LE;
2336 case GT_EXPR:
2337 return COMPCODE_GT;
2338 case NE_EXPR:
2339 return COMPCODE_NE;
2340 case GE_EXPR:
2341 return COMPCODE_GE;
2342 case ORDERED_EXPR:
2343 return COMPCODE_ORD;
2344 case UNORDERED_EXPR:
2345 return COMPCODE_UNORD;
2346 case UNLT_EXPR:
2347 return COMPCODE_UNLT;
2348 case UNEQ_EXPR:
2349 return COMPCODE_UNEQ;
2350 case UNLE_EXPR:
2351 return COMPCODE_UNLE;
2352 case UNGT_EXPR:
2353 return COMPCODE_UNGT;
2354 case LTGT_EXPR:
2355 return COMPCODE_LTGT;
2356 case UNGE_EXPR:
2357 return COMPCODE_UNGE;
2358 default:
2359 gcc_unreachable ();
2363 /* Convert a compcode bit-based encoding of a comparison operator back
2364 to GCC's enum tree_code representation. This function is the
2365 inverse of comparison_to_compcode. */
2367 static enum tree_code
2368 compcode_to_comparison (enum comparison_code code)
2370 switch (code)
2372 case COMPCODE_LT:
2373 return LT_EXPR;
2374 case COMPCODE_EQ:
2375 return EQ_EXPR;
2376 case COMPCODE_LE:
2377 return LE_EXPR;
2378 case COMPCODE_GT:
2379 return GT_EXPR;
2380 case COMPCODE_NE:
2381 return NE_EXPR;
2382 case COMPCODE_GE:
2383 return GE_EXPR;
2384 case COMPCODE_ORD:
2385 return ORDERED_EXPR;
2386 case COMPCODE_UNORD:
2387 return UNORDERED_EXPR;
2388 case COMPCODE_UNLT:
2389 return UNLT_EXPR;
2390 case COMPCODE_UNEQ:
2391 return UNEQ_EXPR;
2392 case COMPCODE_UNLE:
2393 return UNLE_EXPR;
2394 case COMPCODE_UNGT:
2395 return UNGT_EXPR;
2396 case COMPCODE_LTGT:
2397 return LTGT_EXPR;
2398 case COMPCODE_UNGE:
2399 return UNGE_EXPR;
2400 default:
2401 gcc_unreachable ();
2405 /* Return a tree for the comparison which is the combination of
2406 doing the AND or OR (depending on CODE) of the two operations LCODE
2407 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2408 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2409 if this makes the transformation invalid. */
2411 tree
2412 combine_comparisons (enum tree_code code, enum tree_code lcode,
2413 enum tree_code rcode, tree truth_type,
2414 tree ll_arg, tree lr_arg)
2416 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2417 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2418 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2419 enum comparison_code compcode;
2421 switch (code)
2423 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2424 compcode = lcompcode & rcompcode;
2425 break;
2427 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2428 compcode = lcompcode | rcompcode;
2429 break;
2431 default:
2432 return NULL_TREE;
2435 if (!honor_nans)
2437 /* Eliminate unordered comparisons, as well as LTGT and ORD
2438 which are not used unless the mode has NaNs. */
2439 compcode &= ~COMPCODE_UNORD;
2440 if (compcode == COMPCODE_LTGT)
2441 compcode = COMPCODE_NE;
2442 else if (compcode == COMPCODE_ORD)
2443 compcode = COMPCODE_TRUE;
2445 else if (flag_trapping_math)
2447 /* Check that the original operation and the optimized ones will trap
2448 under the same condition. */
2449 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2450 && (lcompcode != COMPCODE_EQ)
2451 && (lcompcode != COMPCODE_ORD);
2452 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2453 && (rcompcode != COMPCODE_EQ)
2454 && (rcompcode != COMPCODE_ORD);
2455 bool trap = (compcode & COMPCODE_UNORD) == 0
2456 && (compcode != COMPCODE_EQ)
2457 && (compcode != COMPCODE_ORD);
2459 /* In a short-circuited boolean expression the LHS might be
2460 such that the RHS, if evaluated, will never trap. For
2461 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2462 if neither x nor y is NaN. (This is a mixed blessing: for
2463 example, the expression above will never trap, hence
2464 optimizing it to x < y would be invalid). */
2465 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2466 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2467 rtrap = false;
2469 /* If the comparison was short-circuited, and only the RHS
2470 trapped, we may now generate a spurious trap. */
2471 if (rtrap && !ltrap
2472 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2473 return NULL_TREE;
2475 /* If we changed the conditions that cause a trap, we lose. */
2476 if ((ltrap || rtrap) != trap)
2477 return NULL_TREE;
2480 if (compcode == COMPCODE_TRUE)
2481 return constant_boolean_node (true, truth_type);
2482 else if (compcode == COMPCODE_FALSE)
2483 return constant_boolean_node (false, truth_type);
2484 else
2485 return fold_build2 (compcode_to_comparison (compcode),
2486 truth_type, ll_arg, lr_arg);
2489 /* Return nonzero if CODE is a tree code that represents a truth value. */
2491 static int
2492 truth_value_p (enum tree_code code)
2494 return (TREE_CODE_CLASS (code) == tcc_comparison
2495 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2496 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2497 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2500 /* Return nonzero if two operands (typically of the same tree node)
2501 are necessarily equal. If either argument has side-effects this
2502 function returns zero. FLAGS modifies behavior as follows:
2504 If OEP_ONLY_CONST is set, only return nonzero for constants.
2505 This function tests whether the operands are indistinguishable;
2506 it does not test whether they are equal using C's == operation.
2507 The distinction is important for IEEE floating point, because
2508 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2509 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2511 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2512 even though it may hold multiple values during a function.
2513 This is because a GCC tree node guarantees that nothing else is
2514 executed between the evaluation of its "operands" (which may often
2515 be evaluated in arbitrary order). Hence if the operands themselves
2516 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2517 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2518 unset means assuming isochronic (or instantaneous) tree equivalence.
2519 Unless comparing arbitrary expression trees, such as from different
2520 statements, this flag can usually be left unset.
2522 If OEP_PURE_SAME is set, then pure functions with identical arguments
2523 are considered the same. It is used when the caller has other ways
2524 to ensure that global memory is unchanged in between. */
2527 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2529 /* If either is ERROR_MARK, they aren't equal. */
2530 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2531 return 0;
2533 /* If both types don't have the same signedness, then we can't consider
2534 them equal. We must check this before the STRIP_NOPS calls
2535 because they may change the signedness of the arguments. */
2536 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2537 return 0;
2539 /* If both types don't have the same precision, then it is not safe
2540 to strip NOPs. */
2541 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2542 return 0;
2544 STRIP_NOPS (arg0);
2545 STRIP_NOPS (arg1);
2547 /* In case both args are comparisons but with different comparison
2548 code, try to swap the comparison operands of one arg to produce
2549 a match and compare that variant. */
2550 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2551 && COMPARISON_CLASS_P (arg0)
2552 && COMPARISON_CLASS_P (arg1))
2554 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2556 if (TREE_CODE (arg0) == swap_code)
2557 return operand_equal_p (TREE_OPERAND (arg0, 0),
2558 TREE_OPERAND (arg1, 1), flags)
2559 && operand_equal_p (TREE_OPERAND (arg0, 1),
2560 TREE_OPERAND (arg1, 0), flags);
2563 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2564 /* This is needed for conversions and for COMPONENT_REF.
2565 Might as well play it safe and always test this. */
2566 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2567 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2568 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2569 return 0;
2571 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2572 We don't care about side effects in that case because the SAVE_EXPR
2573 takes care of that for us. In all other cases, two expressions are
2574 equal if they have no side effects. If we have two identical
2575 expressions with side effects that should be treated the same due
2576 to the only side effects being identical SAVE_EXPR's, that will
2577 be detected in the recursive calls below. */
2578 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2579 && (TREE_CODE (arg0) == SAVE_EXPR
2580 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2581 return 1;
2583 /* Next handle constant cases, those for which we can return 1 even
2584 if ONLY_CONST is set. */
2585 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2586 switch (TREE_CODE (arg0))
2588 case INTEGER_CST:
2589 return tree_int_cst_equal (arg0, arg1);
2591 case REAL_CST:
2592 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2593 TREE_REAL_CST (arg1)))
2594 return 1;
2597 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2599 /* If we do not distinguish between signed and unsigned zero,
2600 consider them equal. */
2601 if (real_zerop (arg0) && real_zerop (arg1))
2602 return 1;
2604 return 0;
2606 case VECTOR_CST:
2608 tree v1, v2;
2610 v1 = TREE_VECTOR_CST_ELTS (arg0);
2611 v2 = TREE_VECTOR_CST_ELTS (arg1);
2612 while (v1 && v2)
2614 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2615 flags))
2616 return 0;
2617 v1 = TREE_CHAIN (v1);
2618 v2 = TREE_CHAIN (v2);
2621 return v1 == v2;
2624 case COMPLEX_CST:
2625 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2626 flags)
2627 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2628 flags));
2630 case STRING_CST:
2631 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2632 && ! memcmp (TREE_STRING_POINTER (arg0),
2633 TREE_STRING_POINTER (arg1),
2634 TREE_STRING_LENGTH (arg0)));
2636 case ADDR_EXPR:
2637 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2639 default:
2640 break;
2643 if (flags & OEP_ONLY_CONST)
2644 return 0;
2646 /* Define macros to test an operand from arg0 and arg1 for equality and a
2647 variant that allows null and views null as being different from any
2648 non-null value. In the latter case, if either is null, the both
2649 must be; otherwise, do the normal comparison. */
2650 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2651 TREE_OPERAND (arg1, N), flags)
2653 #define OP_SAME_WITH_NULL(N) \
2654 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2655 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2657 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2659 case tcc_unary:
2660 /* Two conversions are equal only if signedness and modes match. */
2661 switch (TREE_CODE (arg0))
2663 case NOP_EXPR:
2664 case CONVERT_EXPR:
2665 case FIX_TRUNC_EXPR:
2666 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2667 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2668 return 0;
2669 break;
2670 default:
2671 break;
2674 return OP_SAME (0);
2677 case tcc_comparison:
2678 case tcc_binary:
2679 if (OP_SAME (0) && OP_SAME (1))
2680 return 1;
2682 /* For commutative ops, allow the other order. */
2683 return (commutative_tree_code (TREE_CODE (arg0))
2684 && operand_equal_p (TREE_OPERAND (arg0, 0),
2685 TREE_OPERAND (arg1, 1), flags)
2686 && operand_equal_p (TREE_OPERAND (arg0, 1),
2687 TREE_OPERAND (arg1, 0), flags));
2689 case tcc_reference:
2690 /* If either of the pointer (or reference) expressions we are
2691 dereferencing contain a side effect, these cannot be equal. */
2692 if (TREE_SIDE_EFFECTS (arg0)
2693 || TREE_SIDE_EFFECTS (arg1))
2694 return 0;
2696 switch (TREE_CODE (arg0))
2698 case INDIRECT_REF:
2699 case ALIGN_INDIRECT_REF:
2700 case MISALIGNED_INDIRECT_REF:
2701 case REALPART_EXPR:
2702 case IMAGPART_EXPR:
2703 return OP_SAME (0);
2705 case ARRAY_REF:
2706 case ARRAY_RANGE_REF:
2707 /* Operands 2 and 3 may be null. */
2708 return (OP_SAME (0)
2709 && OP_SAME (1)
2710 && OP_SAME_WITH_NULL (2)
2711 && OP_SAME_WITH_NULL (3));
2713 case COMPONENT_REF:
2714 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2715 may be NULL when we're called to compare MEM_EXPRs. */
2716 return OP_SAME_WITH_NULL (0)
2717 && OP_SAME (1)
2718 && OP_SAME_WITH_NULL (2);
2720 case BIT_FIELD_REF:
2721 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2723 default:
2724 return 0;
2727 case tcc_expression:
2728 switch (TREE_CODE (arg0))
2730 case ADDR_EXPR:
2731 case TRUTH_NOT_EXPR:
2732 return OP_SAME (0);
2734 case TRUTH_ANDIF_EXPR:
2735 case TRUTH_ORIF_EXPR:
2736 return OP_SAME (0) && OP_SAME (1);
2738 case TRUTH_AND_EXPR:
2739 case TRUTH_OR_EXPR:
2740 case TRUTH_XOR_EXPR:
2741 if (OP_SAME (0) && OP_SAME (1))
2742 return 1;
2744 /* Otherwise take into account this is a commutative operation. */
2745 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2746 TREE_OPERAND (arg1, 1), flags)
2747 && operand_equal_p (TREE_OPERAND (arg0, 1),
2748 TREE_OPERAND (arg1, 0), flags));
2750 case CALL_EXPR:
2751 /* If the CALL_EXPRs call different functions, then they
2752 clearly can not be equal. */
2753 if (!OP_SAME (0))
2754 return 0;
2757 unsigned int cef = call_expr_flags (arg0);
2758 if (flags & OEP_PURE_SAME)
2759 cef &= ECF_CONST | ECF_PURE;
2760 else
2761 cef &= ECF_CONST;
2762 if (!cef)
2763 return 0;
2766 /* Now see if all the arguments are the same. operand_equal_p
2767 does not handle TREE_LIST, so we walk the operands here
2768 feeding them to operand_equal_p. */
2769 arg0 = TREE_OPERAND (arg0, 1);
2770 arg1 = TREE_OPERAND (arg1, 1);
2771 while (arg0 && arg1)
2773 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2774 flags))
2775 return 0;
2777 arg0 = TREE_CHAIN (arg0);
2778 arg1 = TREE_CHAIN (arg1);
2781 /* If we get here and both argument lists are exhausted
2782 then the CALL_EXPRs are equal. */
2783 return ! (arg0 || arg1);
2785 default:
2786 return 0;
2789 case tcc_declaration:
2790 /* Consider __builtin_sqrt equal to sqrt. */
2791 return (TREE_CODE (arg0) == FUNCTION_DECL
2792 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2793 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2794 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2796 default:
2797 return 0;
2800 #undef OP_SAME
2801 #undef OP_SAME_WITH_NULL
2804 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2805 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2807 When in doubt, return 0. */
2809 static int
2810 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2812 int unsignedp1, unsignedpo;
2813 tree primarg0, primarg1, primother;
2814 unsigned int correct_width;
2816 if (operand_equal_p (arg0, arg1, 0))
2817 return 1;
2819 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2820 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2821 return 0;
2823 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2824 and see if the inner values are the same. This removes any
2825 signedness comparison, which doesn't matter here. */
2826 primarg0 = arg0, primarg1 = arg1;
2827 STRIP_NOPS (primarg0);
2828 STRIP_NOPS (primarg1);
2829 if (operand_equal_p (primarg0, primarg1, 0))
2830 return 1;
2832 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2833 actual comparison operand, ARG0.
2835 First throw away any conversions to wider types
2836 already present in the operands. */
2838 primarg1 = get_narrower (arg1, &unsignedp1);
2839 primother = get_narrower (other, &unsignedpo);
2841 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2842 if (unsignedp1 == unsignedpo
2843 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2844 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2846 tree type = TREE_TYPE (arg0);
2848 /* Make sure shorter operand is extended the right way
2849 to match the longer operand. */
2850 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2851 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2853 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2854 return 1;
2857 return 0;
2860 /* See if ARG is an expression that is either a comparison or is performing
2861 arithmetic on comparisons. The comparisons must only be comparing
2862 two different values, which will be stored in *CVAL1 and *CVAL2; if
2863 they are nonzero it means that some operands have already been found.
2864 No variables may be used anywhere else in the expression except in the
2865 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2866 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2868 If this is true, return 1. Otherwise, return zero. */
2870 static int
2871 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2873 enum tree_code code = TREE_CODE (arg);
2874 enum tree_code_class class = TREE_CODE_CLASS (code);
2876 /* We can handle some of the tcc_expression cases here. */
2877 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2878 class = tcc_unary;
2879 else if (class == tcc_expression
2880 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2881 || code == COMPOUND_EXPR))
2882 class = tcc_binary;
2884 else if (class == tcc_expression && code == SAVE_EXPR
2885 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2887 /* If we've already found a CVAL1 or CVAL2, this expression is
2888 two complex to handle. */
2889 if (*cval1 || *cval2)
2890 return 0;
2892 class = tcc_unary;
2893 *save_p = 1;
2896 switch (class)
2898 case tcc_unary:
2899 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2901 case tcc_binary:
2902 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2903 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2904 cval1, cval2, save_p));
2906 case tcc_constant:
2907 return 1;
2909 case tcc_expression:
2910 if (code == COND_EXPR)
2911 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2912 cval1, cval2, save_p)
2913 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2914 cval1, cval2, save_p)
2915 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2916 cval1, cval2, save_p));
2917 return 0;
2919 case tcc_comparison:
2920 /* First see if we can handle the first operand, then the second. For
2921 the second operand, we know *CVAL1 can't be zero. It must be that
2922 one side of the comparison is each of the values; test for the
2923 case where this isn't true by failing if the two operands
2924 are the same. */
2926 if (operand_equal_p (TREE_OPERAND (arg, 0),
2927 TREE_OPERAND (arg, 1), 0))
2928 return 0;
2930 if (*cval1 == 0)
2931 *cval1 = TREE_OPERAND (arg, 0);
2932 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2934 else if (*cval2 == 0)
2935 *cval2 = TREE_OPERAND (arg, 0);
2936 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2938 else
2939 return 0;
2941 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2943 else if (*cval2 == 0)
2944 *cval2 = TREE_OPERAND (arg, 1);
2945 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2947 else
2948 return 0;
2950 return 1;
2952 default:
2953 return 0;
2957 /* ARG is a tree that is known to contain just arithmetic operations and
2958 comparisons. Evaluate the operations in the tree substituting NEW0 for
2959 any occurrence of OLD0 as an operand of a comparison and likewise for
2960 NEW1 and OLD1. */
2962 static tree
2963 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2965 tree type = TREE_TYPE (arg);
2966 enum tree_code code = TREE_CODE (arg);
2967 enum tree_code_class class = TREE_CODE_CLASS (code);
2969 /* We can handle some of the tcc_expression cases here. */
2970 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2971 class = tcc_unary;
2972 else if (class == tcc_expression
2973 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2974 class = tcc_binary;
2976 switch (class)
2978 case tcc_unary:
2979 return fold_build1 (code, type,
2980 eval_subst (TREE_OPERAND (arg, 0),
2981 old0, new0, old1, new1));
2983 case tcc_binary:
2984 return fold_build2 (code, type,
2985 eval_subst (TREE_OPERAND (arg, 0),
2986 old0, new0, old1, new1),
2987 eval_subst (TREE_OPERAND (arg, 1),
2988 old0, new0, old1, new1));
2990 case tcc_expression:
2991 switch (code)
2993 case SAVE_EXPR:
2994 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2996 case COMPOUND_EXPR:
2997 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2999 case COND_EXPR:
3000 return fold_build3 (code, type,
3001 eval_subst (TREE_OPERAND (arg, 0),
3002 old0, new0, old1, new1),
3003 eval_subst (TREE_OPERAND (arg, 1),
3004 old0, new0, old1, new1),
3005 eval_subst (TREE_OPERAND (arg, 2),
3006 old0, new0, old1, new1));
3007 default:
3008 break;
3010 /* Fall through - ??? */
3012 case tcc_comparison:
3014 tree arg0 = TREE_OPERAND (arg, 0);
3015 tree arg1 = TREE_OPERAND (arg, 1);
3017 /* We need to check both for exact equality and tree equality. The
3018 former will be true if the operand has a side-effect. In that
3019 case, we know the operand occurred exactly once. */
3021 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3022 arg0 = new0;
3023 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3024 arg0 = new1;
3026 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3027 arg1 = new0;
3028 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3029 arg1 = new1;
3031 return fold_build2 (code, type, arg0, arg1);
3034 default:
3035 return arg;
3039 /* Return a tree for the case when the result of an expression is RESULT
3040 converted to TYPE and OMITTED was previously an operand of the expression
3041 but is now not needed (e.g., we folded OMITTED * 0).
3043 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3044 the conversion of RESULT to TYPE. */
3046 tree
3047 omit_one_operand (tree type, tree result, tree omitted)
3049 tree t = fold_convert (type, result);
3051 if (TREE_SIDE_EFFECTS (omitted))
3052 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3054 return non_lvalue (t);
3057 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3059 static tree
3060 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3062 tree t = fold_convert (type, result);
3064 if (TREE_SIDE_EFFECTS (omitted))
3065 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3067 return pedantic_non_lvalue (t);
3070 /* Return a tree for the case when the result of an expression is RESULT
3071 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3072 of the expression but are now not needed.
3074 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3075 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3076 evaluated before OMITTED2. Otherwise, if neither has side effects,
3077 just do the conversion of RESULT to TYPE. */
3079 tree
3080 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3082 tree t = fold_convert (type, result);
3084 if (TREE_SIDE_EFFECTS (omitted2))
3085 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3086 if (TREE_SIDE_EFFECTS (omitted1))
3087 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3089 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3093 /* Return a simplified tree node for the truth-negation of ARG. This
3094 never alters ARG itself. We assume that ARG is an operation that
3095 returns a truth value (0 or 1).
3097 FIXME: one would think we would fold the result, but it causes
3098 problems with the dominator optimizer. */
3100 tree
3101 fold_truth_not_expr (tree arg)
3103 tree type = TREE_TYPE (arg);
3104 enum tree_code code = TREE_CODE (arg);
3106 /* If this is a comparison, we can simply invert it, except for
3107 floating-point non-equality comparisons, in which case we just
3108 enclose a TRUTH_NOT_EXPR around what we have. */
3110 if (TREE_CODE_CLASS (code) == tcc_comparison)
3112 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3113 if (FLOAT_TYPE_P (op_type)
3114 && flag_trapping_math
3115 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3116 && code != NE_EXPR && code != EQ_EXPR)
3117 return NULL_TREE;
3118 else
3120 code = invert_tree_comparison (code,
3121 HONOR_NANS (TYPE_MODE (op_type)));
3122 if (code == ERROR_MARK)
3123 return NULL_TREE;
3124 else
3125 return build2 (code, type,
3126 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3130 switch (code)
3132 case INTEGER_CST:
3133 return constant_boolean_node (integer_zerop (arg), type);
3135 case TRUTH_AND_EXPR:
3136 return build2 (TRUTH_OR_EXPR, type,
3137 invert_truthvalue (TREE_OPERAND (arg, 0)),
3138 invert_truthvalue (TREE_OPERAND (arg, 1)));
3140 case TRUTH_OR_EXPR:
3141 return build2 (TRUTH_AND_EXPR, type,
3142 invert_truthvalue (TREE_OPERAND (arg, 0)),
3143 invert_truthvalue (TREE_OPERAND (arg, 1)));
3145 case TRUTH_XOR_EXPR:
3146 /* Here we can invert either operand. We invert the first operand
3147 unless the second operand is a TRUTH_NOT_EXPR in which case our
3148 result is the XOR of the first operand with the inside of the
3149 negation of the second operand. */
3151 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3152 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3153 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3154 else
3155 return build2 (TRUTH_XOR_EXPR, type,
3156 invert_truthvalue (TREE_OPERAND (arg, 0)),
3157 TREE_OPERAND (arg, 1));
3159 case TRUTH_ANDIF_EXPR:
3160 return build2 (TRUTH_ORIF_EXPR, type,
3161 invert_truthvalue (TREE_OPERAND (arg, 0)),
3162 invert_truthvalue (TREE_OPERAND (arg, 1)));
3164 case TRUTH_ORIF_EXPR:
3165 return build2 (TRUTH_ANDIF_EXPR, type,
3166 invert_truthvalue (TREE_OPERAND (arg, 0)),
3167 invert_truthvalue (TREE_OPERAND (arg, 1)));
3169 case TRUTH_NOT_EXPR:
3170 return TREE_OPERAND (arg, 0);
3172 case COND_EXPR:
3174 tree arg1 = TREE_OPERAND (arg, 1);
3175 tree arg2 = TREE_OPERAND (arg, 2);
3176 /* A COND_EXPR may have a throw as one operand, which
3177 then has void type. Just leave void operands
3178 as they are. */
3179 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3180 VOID_TYPE_P (TREE_TYPE (arg1))
3181 ? arg1 : invert_truthvalue (arg1),
3182 VOID_TYPE_P (TREE_TYPE (arg2))
3183 ? arg2 : invert_truthvalue (arg2));
3186 case COMPOUND_EXPR:
3187 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3188 invert_truthvalue (TREE_OPERAND (arg, 1)));
3190 case NON_LVALUE_EXPR:
3191 return invert_truthvalue (TREE_OPERAND (arg, 0));
3193 case NOP_EXPR:
3194 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3195 return build1 (TRUTH_NOT_EXPR, type, arg);
3197 case CONVERT_EXPR:
3198 case FLOAT_EXPR:
3199 return build1 (TREE_CODE (arg), type,
3200 invert_truthvalue (TREE_OPERAND (arg, 0)));
3202 case BIT_AND_EXPR:
3203 if (!integer_onep (TREE_OPERAND (arg, 1)))
3204 break;
3205 return build2 (EQ_EXPR, type, arg,
3206 build_int_cst (type, 0));
3208 case SAVE_EXPR:
3209 return build1 (TRUTH_NOT_EXPR, type, arg);
3211 case CLEANUP_POINT_EXPR:
3212 return build1 (CLEANUP_POINT_EXPR, type,
3213 invert_truthvalue (TREE_OPERAND (arg, 0)));
3215 default:
3216 break;
3219 return NULL_TREE;
3222 /* Return a simplified tree node for the truth-negation of ARG. This
3223 never alters ARG itself. We assume that ARG is an operation that
3224 returns a truth value (0 or 1).
3226 FIXME: one would think we would fold the result, but it causes
3227 problems with the dominator optimizer. */
3229 tree
3230 invert_truthvalue (tree arg)
3232 tree tem;
3234 if (TREE_CODE (arg) == ERROR_MARK)
3235 return arg;
3237 tem = fold_truth_not_expr (arg);
3238 if (!tem)
3239 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3241 return tem;
3244 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3245 operands are another bit-wise operation with a common input. If so,
3246 distribute the bit operations to save an operation and possibly two if
3247 constants are involved. For example, convert
3248 (A | B) & (A | C) into A | (B & C)
3249 Further simplification will occur if B and C are constants.
3251 If this optimization cannot be done, 0 will be returned. */
3253 static tree
3254 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3256 tree common;
3257 tree left, right;
3259 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3260 || TREE_CODE (arg0) == code
3261 || (TREE_CODE (arg0) != BIT_AND_EXPR
3262 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3263 return 0;
3265 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3267 common = TREE_OPERAND (arg0, 0);
3268 left = TREE_OPERAND (arg0, 1);
3269 right = TREE_OPERAND (arg1, 1);
3271 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3273 common = TREE_OPERAND (arg0, 0);
3274 left = TREE_OPERAND (arg0, 1);
3275 right = TREE_OPERAND (arg1, 0);
3277 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3279 common = TREE_OPERAND (arg0, 1);
3280 left = TREE_OPERAND (arg0, 0);
3281 right = TREE_OPERAND (arg1, 1);
3283 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3285 common = TREE_OPERAND (arg0, 1);
3286 left = TREE_OPERAND (arg0, 0);
3287 right = TREE_OPERAND (arg1, 0);
3289 else
3290 return 0;
3292 return fold_build2 (TREE_CODE (arg0), type, common,
3293 fold_build2 (code, type, left, right));
3296 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3297 with code CODE. This optimization is unsafe. */
3298 static tree
3299 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3301 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3302 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3304 /* (A / C) +- (B / C) -> (A +- B) / C. */
3305 if (mul0 == mul1
3306 && operand_equal_p (TREE_OPERAND (arg0, 1),
3307 TREE_OPERAND (arg1, 1), 0))
3308 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3309 fold_build2 (code, type,
3310 TREE_OPERAND (arg0, 0),
3311 TREE_OPERAND (arg1, 0)),
3312 TREE_OPERAND (arg0, 1));
3314 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3315 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3316 TREE_OPERAND (arg1, 0), 0)
3317 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3318 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3320 REAL_VALUE_TYPE r0, r1;
3321 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3322 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3323 if (!mul0)
3324 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3325 if (!mul1)
3326 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3327 real_arithmetic (&r0, code, &r0, &r1);
3328 return fold_build2 (MULT_EXPR, type,
3329 TREE_OPERAND (arg0, 0),
3330 build_real (type, r0));
3333 return NULL_TREE;
3336 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3337 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3339 static tree
3340 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3341 int unsignedp)
3343 tree result;
3345 if (bitpos == 0)
3347 tree size = TYPE_SIZE (TREE_TYPE (inner));
3348 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3349 || POINTER_TYPE_P (TREE_TYPE (inner)))
3350 && host_integerp (size, 0)
3351 && tree_low_cst (size, 0) == bitsize)
3352 return fold_convert (type, inner);
3355 result = build3 (BIT_FIELD_REF, type, inner,
3356 size_int (bitsize), bitsize_int (bitpos));
3358 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3360 return result;
3363 /* Optimize a bit-field compare.
3365 There are two cases: First is a compare against a constant and the
3366 second is a comparison of two items where the fields are at the same
3367 bit position relative to the start of a chunk (byte, halfword, word)
3368 large enough to contain it. In these cases we can avoid the shift
3369 implicit in bitfield extractions.
3371 For constants, we emit a compare of the shifted constant with the
3372 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3373 compared. For two fields at the same position, we do the ANDs with the
3374 similar mask and compare the result of the ANDs.
3376 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3377 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3378 are the left and right operands of the comparison, respectively.
3380 If the optimization described above can be done, we return the resulting
3381 tree. Otherwise we return zero. */
3383 static tree
3384 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3385 tree lhs, tree rhs)
3387 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3388 tree type = TREE_TYPE (lhs);
3389 tree signed_type, unsigned_type;
3390 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3391 enum machine_mode lmode, rmode, nmode;
3392 int lunsignedp, runsignedp;
3393 int lvolatilep = 0, rvolatilep = 0;
3394 tree linner, rinner = NULL_TREE;
3395 tree mask;
3396 tree offset;
3398 /* Get all the information about the extractions being done. If the bit size
3399 if the same as the size of the underlying object, we aren't doing an
3400 extraction at all and so can do nothing. We also don't want to
3401 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3402 then will no longer be able to replace it. */
3403 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3404 &lunsignedp, &lvolatilep, false);
3405 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3406 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3407 return 0;
3409 if (!const_p)
3411 /* If this is not a constant, we can only do something if bit positions,
3412 sizes, and signedness are the same. */
3413 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3414 &runsignedp, &rvolatilep, false);
3416 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3417 || lunsignedp != runsignedp || offset != 0
3418 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3419 return 0;
3422 /* See if we can find a mode to refer to this field. We should be able to,
3423 but fail if we can't. */
3424 nmode = get_best_mode (lbitsize, lbitpos,
3425 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3426 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3427 TYPE_ALIGN (TREE_TYPE (rinner))),
3428 word_mode, lvolatilep || rvolatilep);
3429 if (nmode == VOIDmode)
3430 return 0;
3432 /* Set signed and unsigned types of the precision of this mode for the
3433 shifts below. */
3434 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3435 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3437 /* Compute the bit position and size for the new reference and our offset
3438 within it. If the new reference is the same size as the original, we
3439 won't optimize anything, so return zero. */
3440 nbitsize = GET_MODE_BITSIZE (nmode);
3441 nbitpos = lbitpos & ~ (nbitsize - 1);
3442 lbitpos -= nbitpos;
3443 if (nbitsize == lbitsize)
3444 return 0;
3446 if (BYTES_BIG_ENDIAN)
3447 lbitpos = nbitsize - lbitsize - lbitpos;
3449 /* Make the mask to be used against the extracted field. */
3450 mask = build_int_cst (unsigned_type, -1);
3451 mask = force_fit_type (mask, 0, false, false);
3452 mask = fold_convert (unsigned_type, mask);
3453 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3454 mask = const_binop (RSHIFT_EXPR, mask,
3455 size_int (nbitsize - lbitsize - lbitpos), 0);
3457 if (! const_p)
3458 /* If not comparing with constant, just rework the comparison
3459 and return. */
3460 return fold_build2 (code, compare_type,
3461 fold_build2 (BIT_AND_EXPR, unsigned_type,
3462 make_bit_field_ref (linner,
3463 unsigned_type,
3464 nbitsize, nbitpos,
3466 mask),
3467 fold_build2 (BIT_AND_EXPR, unsigned_type,
3468 make_bit_field_ref (rinner,
3469 unsigned_type,
3470 nbitsize, nbitpos,
3472 mask));
3474 /* Otherwise, we are handling the constant case. See if the constant is too
3475 big for the field. Warn and return a tree of for 0 (false) if so. We do
3476 this not only for its own sake, but to avoid having to test for this
3477 error case below. If we didn't, we might generate wrong code.
3479 For unsigned fields, the constant shifted right by the field length should
3480 be all zero. For signed fields, the high-order bits should agree with
3481 the sign bit. */
3483 if (lunsignedp)
3485 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3486 fold_convert (unsigned_type, rhs),
3487 size_int (lbitsize), 0)))
3489 warning (0, "comparison is always %d due to width of bit-field",
3490 code == NE_EXPR);
3491 return constant_boolean_node (code == NE_EXPR, compare_type);
3494 else
3496 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3497 size_int (lbitsize - 1), 0);
3498 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3500 warning (0, "comparison is always %d due to width of bit-field",
3501 code == NE_EXPR);
3502 return constant_boolean_node (code == NE_EXPR, compare_type);
3506 /* Single-bit compares should always be against zero. */
3507 if (lbitsize == 1 && ! integer_zerop (rhs))
3509 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3510 rhs = build_int_cst (type, 0);
3513 /* Make a new bitfield reference, shift the constant over the
3514 appropriate number of bits and mask it with the computed mask
3515 (in case this was a signed field). If we changed it, make a new one. */
3516 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3517 if (lvolatilep)
3519 TREE_SIDE_EFFECTS (lhs) = 1;
3520 TREE_THIS_VOLATILE (lhs) = 1;
3523 rhs = const_binop (BIT_AND_EXPR,
3524 const_binop (LSHIFT_EXPR,
3525 fold_convert (unsigned_type, rhs),
3526 size_int (lbitpos), 0),
3527 mask, 0);
3529 return build2 (code, compare_type,
3530 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3531 rhs);
3534 /* Subroutine for fold_truthop: decode a field reference.
3536 If EXP is a comparison reference, we return the innermost reference.
3538 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3539 set to the starting bit number.
3541 If the innermost field can be completely contained in a mode-sized
3542 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3544 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3545 otherwise it is not changed.
3547 *PUNSIGNEDP is set to the signedness of the field.
3549 *PMASK is set to the mask used. This is either contained in a
3550 BIT_AND_EXPR or derived from the width of the field.
3552 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3554 Return 0 if this is not a component reference or is one that we can't
3555 do anything with. */
3557 static tree
3558 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3559 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3560 int *punsignedp, int *pvolatilep,
3561 tree *pmask, tree *pand_mask)
3563 tree outer_type = 0;
3564 tree and_mask = 0;
3565 tree mask, inner, offset;
3566 tree unsigned_type;
3567 unsigned int precision;
3569 /* All the optimizations using this function assume integer fields.
3570 There are problems with FP fields since the type_for_size call
3571 below can fail for, e.g., XFmode. */
3572 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3573 return 0;
3575 /* We are interested in the bare arrangement of bits, so strip everything
3576 that doesn't affect the machine mode. However, record the type of the
3577 outermost expression if it may matter below. */
3578 if (TREE_CODE (exp) == NOP_EXPR
3579 || TREE_CODE (exp) == CONVERT_EXPR
3580 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3581 outer_type = TREE_TYPE (exp);
3582 STRIP_NOPS (exp);
3584 if (TREE_CODE (exp) == BIT_AND_EXPR)
3586 and_mask = TREE_OPERAND (exp, 1);
3587 exp = TREE_OPERAND (exp, 0);
3588 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3589 if (TREE_CODE (and_mask) != INTEGER_CST)
3590 return 0;
3593 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3594 punsignedp, pvolatilep, false);
3595 if ((inner == exp && and_mask == 0)
3596 || *pbitsize < 0 || offset != 0
3597 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3598 return 0;
3600 /* If the number of bits in the reference is the same as the bitsize of
3601 the outer type, then the outer type gives the signedness. Otherwise
3602 (in case of a small bitfield) the signedness is unchanged. */
3603 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3604 *punsignedp = TYPE_UNSIGNED (outer_type);
3606 /* Compute the mask to access the bitfield. */
3607 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3608 precision = TYPE_PRECISION (unsigned_type);
3610 mask = build_int_cst (unsigned_type, -1);
3611 mask = force_fit_type (mask, 0, false, false);
3613 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3614 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3616 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3617 if (and_mask != 0)
3618 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3619 fold_convert (unsigned_type, and_mask), mask);
3621 *pmask = mask;
3622 *pand_mask = and_mask;
3623 return inner;
3626 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3627 bit positions. */
3629 static int
3630 all_ones_mask_p (tree mask, int size)
3632 tree type = TREE_TYPE (mask);
3633 unsigned int precision = TYPE_PRECISION (type);
3634 tree tmask;
3636 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3637 tmask = force_fit_type (tmask, 0, false, false);
3639 return
3640 tree_int_cst_equal (mask,
3641 const_binop (RSHIFT_EXPR,
3642 const_binop (LSHIFT_EXPR, tmask,
3643 size_int (precision - size),
3645 size_int (precision - size), 0));
3648 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3649 represents the sign bit of EXP's type. If EXP represents a sign
3650 or zero extension, also test VAL against the unextended type.
3651 The return value is the (sub)expression whose sign bit is VAL,
3652 or NULL_TREE otherwise. */
3654 static tree
3655 sign_bit_p (tree exp, tree val)
3657 unsigned HOST_WIDE_INT mask_lo, lo;
3658 HOST_WIDE_INT mask_hi, hi;
3659 int width;
3660 tree t;
3662 /* Tree EXP must have an integral type. */
3663 t = TREE_TYPE (exp);
3664 if (! INTEGRAL_TYPE_P (t))
3665 return NULL_TREE;
3667 /* Tree VAL must be an integer constant. */
3668 if (TREE_CODE (val) != INTEGER_CST
3669 || TREE_CONSTANT_OVERFLOW (val))
3670 return NULL_TREE;
3672 width = TYPE_PRECISION (t);
3673 if (width > HOST_BITS_PER_WIDE_INT)
3675 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3676 lo = 0;
3678 mask_hi = ((unsigned HOST_WIDE_INT) -1
3679 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3680 mask_lo = -1;
3682 else
3684 hi = 0;
3685 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3687 mask_hi = 0;
3688 mask_lo = ((unsigned HOST_WIDE_INT) -1
3689 >> (HOST_BITS_PER_WIDE_INT - width));
3692 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3693 treat VAL as if it were unsigned. */
3694 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3695 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3696 return exp;
3698 /* Handle extension from a narrower type. */
3699 if (TREE_CODE (exp) == NOP_EXPR
3700 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3701 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3703 return NULL_TREE;
3706 /* Subroutine for fold_truthop: determine if an operand is simple enough
3707 to be evaluated unconditionally. */
3709 static int
3710 simple_operand_p (tree exp)
3712 /* Strip any conversions that don't change the machine mode. */
3713 STRIP_NOPS (exp);
3715 return (CONSTANT_CLASS_P (exp)
3716 || TREE_CODE (exp) == SSA_NAME
3717 || (DECL_P (exp)
3718 && ! TREE_ADDRESSABLE (exp)
3719 && ! TREE_THIS_VOLATILE (exp)
3720 && ! DECL_NONLOCAL (exp)
3721 /* Don't regard global variables as simple. They may be
3722 allocated in ways unknown to the compiler (shared memory,
3723 #pragma weak, etc). */
3724 && ! TREE_PUBLIC (exp)
3725 && ! DECL_EXTERNAL (exp)
3726 /* Loading a static variable is unduly expensive, but global
3727 registers aren't expensive. */
3728 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3731 /* The following functions are subroutines to fold_range_test and allow it to
3732 try to change a logical combination of comparisons into a range test.
3734 For example, both
3735 X == 2 || X == 3 || X == 4 || X == 5
3737 X >= 2 && X <= 5
3738 are converted to
3739 (unsigned) (X - 2) <= 3
3741 We describe each set of comparisons as being either inside or outside
3742 a range, using a variable named like IN_P, and then describe the
3743 range with a lower and upper bound. If one of the bounds is omitted,
3744 it represents either the highest or lowest value of the type.
3746 In the comments below, we represent a range by two numbers in brackets
3747 preceded by a "+" to designate being inside that range, or a "-" to
3748 designate being outside that range, so the condition can be inverted by
3749 flipping the prefix. An omitted bound is represented by a "-". For
3750 example, "- [-, 10]" means being outside the range starting at the lowest
3751 possible value and ending at 10, in other words, being greater than 10.
3752 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3753 always false.
3755 We set up things so that the missing bounds are handled in a consistent
3756 manner so neither a missing bound nor "true" and "false" need to be
3757 handled using a special case. */
3759 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3760 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3761 and UPPER1_P are nonzero if the respective argument is an upper bound
3762 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3763 must be specified for a comparison. ARG1 will be converted to ARG0's
3764 type if both are specified. */
3766 static tree
3767 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3768 tree arg1, int upper1_p)
3770 tree tem;
3771 int result;
3772 int sgn0, sgn1;
3774 /* If neither arg represents infinity, do the normal operation.
3775 Else, if not a comparison, return infinity. Else handle the special
3776 comparison rules. Note that most of the cases below won't occur, but
3777 are handled for consistency. */
3779 if (arg0 != 0 && arg1 != 0)
3781 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3782 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3783 STRIP_NOPS (tem);
3784 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3787 if (TREE_CODE_CLASS (code) != tcc_comparison)
3788 return 0;
3790 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3791 for neither. In real maths, we cannot assume open ended ranges are
3792 the same. But, this is computer arithmetic, where numbers are finite.
3793 We can therefore make the transformation of any unbounded range with
3794 the value Z, Z being greater than any representable number. This permits
3795 us to treat unbounded ranges as equal. */
3796 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3797 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3798 switch (code)
3800 case EQ_EXPR:
3801 result = sgn0 == sgn1;
3802 break;
3803 case NE_EXPR:
3804 result = sgn0 != sgn1;
3805 break;
3806 case LT_EXPR:
3807 result = sgn0 < sgn1;
3808 break;
3809 case LE_EXPR:
3810 result = sgn0 <= sgn1;
3811 break;
3812 case GT_EXPR:
3813 result = sgn0 > sgn1;
3814 break;
3815 case GE_EXPR:
3816 result = sgn0 >= sgn1;
3817 break;
3818 default:
3819 gcc_unreachable ();
3822 return constant_boolean_node (result, type);
3825 /* Given EXP, a logical expression, set the range it is testing into
3826 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3827 actually being tested. *PLOW and *PHIGH will be made of the same type
3828 as the returned expression. If EXP is not a comparison, we will most
3829 likely not be returning a useful value and range. */
3831 static tree
3832 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3834 enum tree_code code;
3835 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3836 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3837 int in_p, n_in_p;
3838 tree low, high, n_low, n_high;
3840 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3841 and see if we can refine the range. Some of the cases below may not
3842 happen, but it doesn't seem worth worrying about this. We "continue"
3843 the outer loop when we've changed something; otherwise we "break"
3844 the switch, which will "break" the while. */
3846 in_p = 0;
3847 low = high = build_int_cst (TREE_TYPE (exp), 0);
3849 while (1)
3851 code = TREE_CODE (exp);
3852 exp_type = TREE_TYPE (exp);
3854 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3856 if (TREE_CODE_LENGTH (code) > 0)
3857 arg0 = TREE_OPERAND (exp, 0);
3858 if (TREE_CODE_CLASS (code) == tcc_comparison
3859 || TREE_CODE_CLASS (code) == tcc_unary
3860 || TREE_CODE_CLASS (code) == tcc_binary)
3861 arg0_type = TREE_TYPE (arg0);
3862 if (TREE_CODE_CLASS (code) == tcc_binary
3863 || TREE_CODE_CLASS (code) == tcc_comparison
3864 || (TREE_CODE_CLASS (code) == tcc_expression
3865 && TREE_CODE_LENGTH (code) > 1))
3866 arg1 = TREE_OPERAND (exp, 1);
3869 switch (code)
3871 case TRUTH_NOT_EXPR:
3872 in_p = ! in_p, exp = arg0;
3873 continue;
3875 case EQ_EXPR: case NE_EXPR:
3876 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3877 /* We can only do something if the range is testing for zero
3878 and if the second operand is an integer constant. Note that
3879 saying something is "in" the range we make is done by
3880 complementing IN_P since it will set in the initial case of
3881 being not equal to zero; "out" is leaving it alone. */
3882 if (low == 0 || high == 0
3883 || ! integer_zerop (low) || ! integer_zerop (high)
3884 || TREE_CODE (arg1) != INTEGER_CST)
3885 break;
3887 switch (code)
3889 case NE_EXPR: /* - [c, c] */
3890 low = high = arg1;
3891 break;
3892 case EQ_EXPR: /* + [c, c] */
3893 in_p = ! in_p, low = high = arg1;
3894 break;
3895 case GT_EXPR: /* - [-, c] */
3896 low = 0, high = arg1;
3897 break;
3898 case GE_EXPR: /* + [c, -] */
3899 in_p = ! in_p, low = arg1, high = 0;
3900 break;
3901 case LT_EXPR: /* - [c, -] */
3902 low = arg1, high = 0;
3903 break;
3904 case LE_EXPR: /* + [-, c] */
3905 in_p = ! in_p, low = 0, high = arg1;
3906 break;
3907 default:
3908 gcc_unreachable ();
3911 /* If this is an unsigned comparison, we also know that EXP is
3912 greater than or equal to zero. We base the range tests we make
3913 on that fact, so we record it here so we can parse existing
3914 range tests. We test arg0_type since often the return type
3915 of, e.g. EQ_EXPR, is boolean. */
3916 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3918 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3919 in_p, low, high, 1,
3920 build_int_cst (arg0_type, 0),
3921 NULL_TREE))
3922 break;
3924 in_p = n_in_p, low = n_low, high = n_high;
3926 /* If the high bound is missing, but we have a nonzero low
3927 bound, reverse the range so it goes from zero to the low bound
3928 minus 1. */
3929 if (high == 0 && low && ! integer_zerop (low))
3931 in_p = ! in_p;
3932 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3933 integer_one_node, 0);
3934 low = build_int_cst (arg0_type, 0);
3938 exp = arg0;
3939 continue;
3941 case NEGATE_EXPR:
3942 /* (-x) IN [a,b] -> x in [-b, -a] */
3943 n_low = range_binop (MINUS_EXPR, exp_type,
3944 build_int_cst (exp_type, 0),
3945 0, high, 1);
3946 n_high = range_binop (MINUS_EXPR, exp_type,
3947 build_int_cst (exp_type, 0),
3948 0, low, 0);
3949 low = n_low, high = n_high;
3950 exp = arg0;
3951 continue;
3953 case BIT_NOT_EXPR:
3954 /* ~ X -> -X - 1 */
3955 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3956 build_int_cst (exp_type, 1));
3957 continue;
3959 case PLUS_EXPR: case MINUS_EXPR:
3960 if (TREE_CODE (arg1) != INTEGER_CST)
3961 break;
3963 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3964 move a constant to the other side. */
3965 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3966 break;
3968 /* If EXP is signed, any overflow in the computation is undefined,
3969 so we don't worry about it so long as our computations on
3970 the bounds don't overflow. For unsigned, overflow is defined
3971 and this is exactly the right thing. */
3972 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3973 arg0_type, low, 0, arg1, 0);
3974 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3975 arg0_type, high, 1, arg1, 0);
3976 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3977 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3978 break;
3980 /* Check for an unsigned range which has wrapped around the maximum
3981 value thus making n_high < n_low, and normalize it. */
3982 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3984 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3985 integer_one_node, 0);
3986 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3987 integer_one_node, 0);
3989 /* If the range is of the form +/- [ x+1, x ], we won't
3990 be able to normalize it. But then, it represents the
3991 whole range or the empty set, so make it
3992 +/- [ -, - ]. */
3993 if (tree_int_cst_equal (n_low, low)
3994 && tree_int_cst_equal (n_high, high))
3995 low = high = 0;
3996 else
3997 in_p = ! in_p;
3999 else
4000 low = n_low, high = n_high;
4002 exp = arg0;
4003 continue;
4005 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4006 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4007 break;
4009 if (! INTEGRAL_TYPE_P (arg0_type)
4010 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4011 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4012 break;
4014 n_low = low, n_high = high;
4016 if (n_low != 0)
4017 n_low = fold_convert (arg0_type, n_low);
4019 if (n_high != 0)
4020 n_high = fold_convert (arg0_type, n_high);
4023 /* If we're converting arg0 from an unsigned type, to exp,
4024 a signed type, we will be doing the comparison as unsigned.
4025 The tests above have already verified that LOW and HIGH
4026 are both positive.
4028 So we have to ensure that we will handle large unsigned
4029 values the same way that the current signed bounds treat
4030 negative values. */
4032 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4034 tree high_positive;
4035 tree equiv_type = lang_hooks.types.type_for_mode
4036 (TYPE_MODE (arg0_type), 1);
4038 /* A range without an upper bound is, naturally, unbounded.
4039 Since convert would have cropped a very large value, use
4040 the max value for the destination type. */
4041 high_positive
4042 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4043 : TYPE_MAX_VALUE (arg0_type);
4045 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4046 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4047 fold_convert (arg0_type,
4048 high_positive),
4049 build_int_cst (arg0_type, 1));
4051 /* If the low bound is specified, "and" the range with the
4052 range for which the original unsigned value will be
4053 positive. */
4054 if (low != 0)
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 1, n_low, n_high, 1,
4058 fold_convert (arg0_type,
4059 integer_zero_node),
4060 high_positive))
4061 break;
4063 in_p = (n_in_p == in_p);
4065 else
4067 /* Otherwise, "or" the range with the range of the input
4068 that will be interpreted as negative. */
4069 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4070 0, n_low, n_high, 1,
4071 fold_convert (arg0_type,
4072 integer_zero_node),
4073 high_positive))
4074 break;
4076 in_p = (in_p != n_in_p);
4080 exp = arg0;
4081 low = n_low, high = n_high;
4082 continue;
4084 default:
4085 break;
4088 break;
4091 /* If EXP is a constant, we can evaluate whether this is true or false. */
4092 if (TREE_CODE (exp) == INTEGER_CST)
4094 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4095 exp, 0, low, 0))
4096 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4097 exp, 1, high, 1)));
4098 low = high = 0;
4099 exp = 0;
4102 *pin_p = in_p, *plow = low, *phigh = high;
4103 return exp;
4106 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4107 type, TYPE, return an expression to test if EXP is in (or out of, depending
4108 on IN_P) the range. Return 0 if the test couldn't be created. */
4110 static tree
4111 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4113 tree etype = TREE_TYPE (exp);
4114 tree value;
4116 #ifdef HAVE_canonicalize_funcptr_for_compare
4117 /* Disable this optimization for function pointer expressions
4118 on targets that require function pointer canonicalization. */
4119 if (HAVE_canonicalize_funcptr_for_compare
4120 && TREE_CODE (etype) == POINTER_TYPE
4121 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4122 return NULL_TREE;
4123 #endif
4125 if (! in_p)
4127 value = build_range_check (type, exp, 1, low, high);
4128 if (value != 0)
4129 return invert_truthvalue (value);
4131 return 0;
4134 if (low == 0 && high == 0)
4135 return build_int_cst (type, 1);
4137 if (low == 0)
4138 return fold_build2 (LE_EXPR, type, exp,
4139 fold_convert (etype, high));
4141 if (high == 0)
4142 return fold_build2 (GE_EXPR, type, exp,
4143 fold_convert (etype, low));
4145 if (operand_equal_p (low, high, 0))
4146 return fold_build2 (EQ_EXPR, type, exp,
4147 fold_convert (etype, low));
4149 if (integer_zerop (low))
4151 if (! TYPE_UNSIGNED (etype))
4153 etype = lang_hooks.types.unsigned_type (etype);
4154 high = fold_convert (etype, high);
4155 exp = fold_convert (etype, exp);
4157 return build_range_check (type, exp, 1, 0, high);
4160 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4161 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4163 unsigned HOST_WIDE_INT lo;
4164 HOST_WIDE_INT hi;
4165 int prec;
4167 prec = TYPE_PRECISION (etype);
4168 if (prec <= HOST_BITS_PER_WIDE_INT)
4170 hi = 0;
4171 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4173 else
4175 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4176 lo = (unsigned HOST_WIDE_INT) -1;
4179 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4181 if (TYPE_UNSIGNED (etype))
4183 etype = lang_hooks.types.signed_type (etype);
4184 exp = fold_convert (etype, exp);
4186 return fold_build2 (GT_EXPR, type, exp,
4187 build_int_cst (etype, 0));
4191 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4192 This requires wrap-around arithmetics for the type of the expression. */
4193 switch (TREE_CODE (etype))
4195 case INTEGER_TYPE:
4196 /* There is no requirement that LOW be within the range of ETYPE
4197 if the latter is a subtype. It must, however, be within the base
4198 type of ETYPE. So be sure we do the subtraction in that type. */
4199 if (TREE_TYPE (etype))
4200 etype = TREE_TYPE (etype);
4201 break;
4203 case ENUMERAL_TYPE:
4204 case BOOLEAN_TYPE:
4205 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4206 TYPE_UNSIGNED (etype));
4207 break;
4209 default:
4210 break;
4213 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4214 if (TREE_CODE (etype) == INTEGER_TYPE
4215 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4217 tree utype, minv, maxv;
4219 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4220 for the type in question, as we rely on this here. */
4221 utype = lang_hooks.types.unsigned_type (etype);
4222 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4223 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4224 integer_one_node, 1);
4225 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4227 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4228 minv, 1, maxv, 1)))
4229 etype = utype;
4230 else
4231 return 0;
4234 high = fold_convert (etype, high);
4235 low = fold_convert (etype, low);
4236 exp = fold_convert (etype, exp);
4238 value = const_binop (MINUS_EXPR, high, low, 0);
4240 if (value != 0 && !TREE_OVERFLOW (value))
4241 return build_range_check (type,
4242 fold_build2 (MINUS_EXPR, etype, exp, low),
4243 1, build_int_cst (etype, 0), value);
4245 return 0;
4248 /* Return the predecessor of VAL in its type, handling the infinite case. */
4250 static tree
4251 range_predecessor (tree val)
4253 tree type = TREE_TYPE (val);
4255 if (INTEGRAL_TYPE_P (type)
4256 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4257 return 0;
4258 else
4259 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4262 /* Return the successor of VAL in its type, handling the infinite case. */
4264 static tree
4265 range_successor (tree val)
4267 tree type = TREE_TYPE (val);
4269 if (INTEGRAL_TYPE_P (type)
4270 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4271 return 0;
4272 else
4273 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4276 /* Given two ranges, see if we can merge them into one. Return 1 if we
4277 can, 0 if we can't. Set the output range into the specified parameters. */
4279 static int
4280 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4281 tree high0, int in1_p, tree low1, tree high1)
4283 int no_overlap;
4284 int subset;
4285 int temp;
4286 tree tem;
4287 int in_p;
4288 tree low, high;
4289 int lowequal = ((low0 == 0 && low1 == 0)
4290 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4291 low0, 0, low1, 0)));
4292 int highequal = ((high0 == 0 && high1 == 0)
4293 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4294 high0, 1, high1, 1)));
4296 /* Make range 0 be the range that starts first, or ends last if they
4297 start at the same value. Swap them if it isn't. */
4298 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4299 low0, 0, low1, 0))
4300 || (lowequal
4301 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4302 high1, 1, high0, 1))))
4304 temp = in0_p, in0_p = in1_p, in1_p = temp;
4305 tem = low0, low0 = low1, low1 = tem;
4306 tem = high0, high0 = high1, high1 = tem;
4309 /* Now flag two cases, whether the ranges are disjoint or whether the
4310 second range is totally subsumed in the first. Note that the tests
4311 below are simplified by the ones above. */
4312 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4313 high0, 1, low1, 0));
4314 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4315 high1, 1, high0, 1));
4317 /* We now have four cases, depending on whether we are including or
4318 excluding the two ranges. */
4319 if (in0_p && in1_p)
4321 /* If they don't overlap, the result is false. If the second range
4322 is a subset it is the result. Otherwise, the range is from the start
4323 of the second to the end of the first. */
4324 if (no_overlap)
4325 in_p = 0, low = high = 0;
4326 else if (subset)
4327 in_p = 1, low = low1, high = high1;
4328 else
4329 in_p = 1, low = low1, high = high0;
4332 else if (in0_p && ! in1_p)
4334 /* If they don't overlap, the result is the first range. If they are
4335 equal, the result is false. If the second range is a subset of the
4336 first, and the ranges begin at the same place, we go from just after
4337 the end of the second range to the end of the first. If the second
4338 range is not a subset of the first, or if it is a subset and both
4339 ranges end at the same place, the range starts at the start of the
4340 first range and ends just before the second range.
4341 Otherwise, we can't describe this as a single range. */
4342 if (no_overlap)
4343 in_p = 1, low = low0, high = high0;
4344 else if (lowequal && highequal)
4345 in_p = 0, low = high = 0;
4346 else if (subset && lowequal)
4348 low = range_successor (high1);
4349 high = high0;
4350 in_p = (low != 0);
4352 else if (! subset || highequal)
4354 low = low0;
4355 high = range_predecessor (low1);
4356 in_p = (high != 0);
4358 else
4359 return 0;
4362 else if (! in0_p && in1_p)
4364 /* If they don't overlap, the result is the second range. If the second
4365 is a subset of the first, the result is false. Otherwise,
4366 the range starts just after the first range and ends at the
4367 end of the second. */
4368 if (no_overlap)
4369 in_p = 1, low = low1, high = high1;
4370 else if (subset || highequal)
4371 in_p = 0, low = high = 0;
4372 else
4374 low = range_successor (high0);
4375 high = high1;
4376 in_p = (low != 0);
4380 else
4382 /* The case where we are excluding both ranges. Here the complex case
4383 is if they don't overlap. In that case, the only time we have a
4384 range is if they are adjacent. If the second is a subset of the
4385 first, the result is the first. Otherwise, the range to exclude
4386 starts at the beginning of the first range and ends at the end of the
4387 second. */
4388 if (no_overlap)
4390 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4391 range_successor (high0),
4392 1, low1, 0)))
4393 in_p = 0, low = low0, high = high1;
4394 else
4396 /* Canonicalize - [min, x] into - [-, x]. */
4397 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4398 switch (TREE_CODE (TREE_TYPE (low0)))
4400 case ENUMERAL_TYPE:
4401 if (TYPE_PRECISION (TREE_TYPE (low0))
4402 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4403 break;
4404 /* FALLTHROUGH */
4405 case INTEGER_TYPE:
4406 if (tree_int_cst_equal (low0,
4407 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4408 low0 = 0;
4409 break;
4410 case POINTER_TYPE:
4411 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4412 && integer_zerop (low0))
4413 low0 = 0;
4414 break;
4415 default:
4416 break;
4419 /* Canonicalize - [x, max] into - [x, -]. */
4420 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4421 switch (TREE_CODE (TREE_TYPE (high1)))
4423 case ENUMERAL_TYPE:
4424 if (TYPE_PRECISION (TREE_TYPE (high1))
4425 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4426 break;
4427 /* FALLTHROUGH */
4428 case INTEGER_TYPE:
4429 if (tree_int_cst_equal (high1,
4430 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4431 high1 = 0;
4432 break;
4433 case POINTER_TYPE:
4434 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4435 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4436 high1, 1,
4437 integer_one_node, 1)))
4438 high1 = 0;
4439 break;
4440 default:
4441 break;
4444 /* The ranges might be also adjacent between the maximum and
4445 minimum values of the given type. For
4446 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4447 return + [x + 1, y - 1]. */
4448 if (low0 == 0 && high1 == 0)
4450 low = range_successor (high0);
4451 high = range_predecessor (low1);
4452 if (low == 0 || high == 0)
4453 return 0;
4455 in_p = 1;
4457 else
4458 return 0;
4461 else if (subset)
4462 in_p = 0, low = low0, high = high0;
4463 else
4464 in_p = 0, low = low0, high = high1;
4467 *pin_p = in_p, *plow = low, *phigh = high;
4468 return 1;
4472 /* Subroutine of fold, looking inside expressions of the form
4473 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4474 of the COND_EXPR. This function is being used also to optimize
4475 A op B ? C : A, by reversing the comparison first.
4477 Return a folded expression whose code is not a COND_EXPR
4478 anymore, or NULL_TREE if no folding opportunity is found. */
4480 static tree
4481 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4483 enum tree_code comp_code = TREE_CODE (arg0);
4484 tree arg00 = TREE_OPERAND (arg0, 0);
4485 tree arg01 = TREE_OPERAND (arg0, 1);
4486 tree arg1_type = TREE_TYPE (arg1);
4487 tree tem;
4489 STRIP_NOPS (arg1);
4490 STRIP_NOPS (arg2);
4492 /* If we have A op 0 ? A : -A, consider applying the following
4493 transformations:
4495 A == 0? A : -A same as -A
4496 A != 0? A : -A same as A
4497 A >= 0? A : -A same as abs (A)
4498 A > 0? A : -A same as abs (A)
4499 A <= 0? A : -A same as -abs (A)
4500 A < 0? A : -A same as -abs (A)
4502 None of these transformations work for modes with signed
4503 zeros. If A is +/-0, the first two transformations will
4504 change the sign of the result (from +0 to -0, or vice
4505 versa). The last four will fix the sign of the result,
4506 even though the original expressions could be positive or
4507 negative, depending on the sign of A.
4509 Note that all these transformations are correct if A is
4510 NaN, since the two alternatives (A and -A) are also NaNs. */
4511 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4512 ? real_zerop (arg01)
4513 : integer_zerop (arg01))
4514 && ((TREE_CODE (arg2) == NEGATE_EXPR
4515 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4516 /* In the case that A is of the form X-Y, '-A' (arg2) may
4517 have already been folded to Y-X, check for that. */
4518 || (TREE_CODE (arg1) == MINUS_EXPR
4519 && TREE_CODE (arg2) == MINUS_EXPR
4520 && operand_equal_p (TREE_OPERAND (arg1, 0),
4521 TREE_OPERAND (arg2, 1), 0)
4522 && operand_equal_p (TREE_OPERAND (arg1, 1),
4523 TREE_OPERAND (arg2, 0), 0))))
4524 switch (comp_code)
4526 case EQ_EXPR:
4527 case UNEQ_EXPR:
4528 tem = fold_convert (arg1_type, arg1);
4529 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4530 case NE_EXPR:
4531 case LTGT_EXPR:
4532 return pedantic_non_lvalue (fold_convert (type, arg1));
4533 case UNGE_EXPR:
4534 case UNGT_EXPR:
4535 if (flag_trapping_math)
4536 break;
4537 /* Fall through. */
4538 case GE_EXPR:
4539 case GT_EXPR:
4540 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4541 arg1 = fold_convert (lang_hooks.types.signed_type
4542 (TREE_TYPE (arg1)), arg1);
4543 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4544 return pedantic_non_lvalue (fold_convert (type, tem));
4545 case UNLE_EXPR:
4546 case UNLT_EXPR:
4547 if (flag_trapping_math)
4548 break;
4549 case LE_EXPR:
4550 case LT_EXPR:
4551 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4552 arg1 = fold_convert (lang_hooks.types.signed_type
4553 (TREE_TYPE (arg1)), arg1);
4554 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4555 return negate_expr (fold_convert (type, tem));
4556 default:
4557 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4558 break;
4561 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4562 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4563 both transformations are correct when A is NaN: A != 0
4564 is then true, and A == 0 is false. */
4566 if (integer_zerop (arg01) && integer_zerop (arg2))
4568 if (comp_code == NE_EXPR)
4569 return pedantic_non_lvalue (fold_convert (type, arg1));
4570 else if (comp_code == EQ_EXPR)
4571 return build_int_cst (type, 0);
4574 /* Try some transformations of A op B ? A : B.
4576 A == B? A : B same as B
4577 A != B? A : B same as A
4578 A >= B? A : B same as max (A, B)
4579 A > B? A : B same as max (B, A)
4580 A <= B? A : B same as min (A, B)
4581 A < B? A : B same as min (B, A)
4583 As above, these transformations don't work in the presence
4584 of signed zeros. For example, if A and B are zeros of
4585 opposite sign, the first two transformations will change
4586 the sign of the result. In the last four, the original
4587 expressions give different results for (A=+0, B=-0) and
4588 (A=-0, B=+0), but the transformed expressions do not.
4590 The first two transformations are correct if either A or B
4591 is a NaN. In the first transformation, the condition will
4592 be false, and B will indeed be chosen. In the case of the
4593 second transformation, the condition A != B will be true,
4594 and A will be chosen.
4596 The conversions to max() and min() are not correct if B is
4597 a number and A is not. The conditions in the original
4598 expressions will be false, so all four give B. The min()
4599 and max() versions would give a NaN instead. */
4600 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4601 /* Avoid these transformations if the COND_EXPR may be used
4602 as an lvalue in the C++ front-end. PR c++/19199. */
4603 && (in_gimple_form
4604 || (strcmp (lang_hooks.name, "GNU C++") != 0
4605 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4606 || ! maybe_lvalue_p (arg1)
4607 || ! maybe_lvalue_p (arg2)))
4609 tree comp_op0 = arg00;
4610 tree comp_op1 = arg01;
4611 tree comp_type = TREE_TYPE (comp_op0);
4613 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4614 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4616 comp_type = type;
4617 comp_op0 = arg1;
4618 comp_op1 = arg2;
4621 switch (comp_code)
4623 case EQ_EXPR:
4624 return pedantic_non_lvalue (fold_convert (type, arg2));
4625 case NE_EXPR:
4626 return pedantic_non_lvalue (fold_convert (type, arg1));
4627 case LE_EXPR:
4628 case LT_EXPR:
4629 case UNLE_EXPR:
4630 case UNLT_EXPR:
4631 /* In C++ a ?: expression can be an lvalue, so put the
4632 operand which will be used if they are equal first
4633 so that we can convert this back to the
4634 corresponding COND_EXPR. */
4635 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4637 comp_op0 = fold_convert (comp_type, comp_op0);
4638 comp_op1 = fold_convert (comp_type, comp_op1);
4639 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4640 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4641 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4642 return pedantic_non_lvalue (fold_convert (type, tem));
4644 break;
4645 case GE_EXPR:
4646 case GT_EXPR:
4647 case UNGE_EXPR:
4648 case UNGT_EXPR:
4649 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4651 comp_op0 = fold_convert (comp_type, comp_op0);
4652 comp_op1 = fold_convert (comp_type, comp_op1);
4653 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4654 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4655 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4656 return pedantic_non_lvalue (fold_convert (type, tem));
4658 break;
4659 case UNEQ_EXPR:
4660 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4661 return pedantic_non_lvalue (fold_convert (type, arg2));
4662 break;
4663 case LTGT_EXPR:
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4665 return pedantic_non_lvalue (fold_convert (type, arg1));
4666 break;
4667 default:
4668 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4669 break;
4673 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4674 we might still be able to simplify this. For example,
4675 if C1 is one less or one more than C2, this might have started
4676 out as a MIN or MAX and been transformed by this function.
4677 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4679 if (INTEGRAL_TYPE_P (type)
4680 && TREE_CODE (arg01) == INTEGER_CST
4681 && TREE_CODE (arg2) == INTEGER_CST)
4682 switch (comp_code)
4684 case EQ_EXPR:
4685 /* We can replace A with C1 in this case. */
4686 arg1 = fold_convert (type, arg01);
4687 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4689 case LT_EXPR:
4690 /* If C1 is C2 + 1, this is min(A, C2). */
4691 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4692 OEP_ONLY_CONST)
4693 && operand_equal_p (arg01,
4694 const_binop (PLUS_EXPR, arg2,
4695 build_int_cst (type, 1), 0),
4696 OEP_ONLY_CONST))
4697 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4698 type, arg1, arg2));
4699 break;
4701 case LE_EXPR:
4702 /* If C1 is C2 - 1, this is min(A, C2). */
4703 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4704 OEP_ONLY_CONST)
4705 && operand_equal_p (arg01,
4706 const_binop (MINUS_EXPR, arg2,
4707 build_int_cst (type, 1), 0),
4708 OEP_ONLY_CONST))
4709 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4710 type, arg1, arg2));
4711 break;
4713 case GT_EXPR:
4714 /* If C1 is C2 - 1, this is max(A, C2). */
4715 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4716 OEP_ONLY_CONST)
4717 && operand_equal_p (arg01,
4718 const_binop (MINUS_EXPR, arg2,
4719 build_int_cst (type, 1), 0),
4720 OEP_ONLY_CONST))
4721 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4722 type, arg1, arg2));
4723 break;
4725 case GE_EXPR:
4726 /* If C1 is C2 + 1, this is max(A, C2). */
4727 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4728 OEP_ONLY_CONST)
4729 && operand_equal_p (arg01,
4730 const_binop (PLUS_EXPR, arg2,
4731 build_int_cst (type, 1), 0),
4732 OEP_ONLY_CONST))
4733 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4734 type, arg1, arg2));
4735 break;
4736 case NE_EXPR:
4737 break;
4738 default:
4739 gcc_unreachable ();
4742 return NULL_TREE;
4747 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4748 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4749 #endif
4751 /* EXP is some logical combination of boolean tests. See if we can
4752 merge it into some range test. Return the new tree if so. */
4754 static tree
4755 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4757 int or_op = (code == TRUTH_ORIF_EXPR
4758 || code == TRUTH_OR_EXPR);
4759 int in0_p, in1_p, in_p;
4760 tree low0, low1, low, high0, high1, high;
4761 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4762 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4763 tree tem;
4765 /* If this is an OR operation, invert both sides; we will invert
4766 again at the end. */
4767 if (or_op)
4768 in0_p = ! in0_p, in1_p = ! in1_p;
4770 /* If both expressions are the same, if we can merge the ranges, and we
4771 can build the range test, return it or it inverted. If one of the
4772 ranges is always true or always false, consider it to be the same
4773 expression as the other. */
4774 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4775 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4776 in1_p, low1, high1)
4777 && 0 != (tem = (build_range_check (type,
4778 lhs != 0 ? lhs
4779 : rhs != 0 ? rhs : integer_zero_node,
4780 in_p, low, high))))
4781 return or_op ? invert_truthvalue (tem) : tem;
4783 /* On machines where the branch cost is expensive, if this is a
4784 short-circuited branch and the underlying object on both sides
4785 is the same, make a non-short-circuit operation. */
4786 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4787 && lhs != 0 && rhs != 0
4788 && (code == TRUTH_ANDIF_EXPR
4789 || code == TRUTH_ORIF_EXPR)
4790 && operand_equal_p (lhs, rhs, 0))
4792 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4793 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4794 which cases we can't do this. */
4795 if (simple_operand_p (lhs))
4796 return build2 (code == TRUTH_ANDIF_EXPR
4797 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4798 type, op0, op1);
4800 else if (lang_hooks.decls.global_bindings_p () == 0
4801 && ! CONTAINS_PLACEHOLDER_P (lhs))
4803 tree common = save_expr (lhs);
4805 if (0 != (lhs = build_range_check (type, common,
4806 or_op ? ! in0_p : in0_p,
4807 low0, high0))
4808 && (0 != (rhs = build_range_check (type, common,
4809 or_op ? ! in1_p : in1_p,
4810 low1, high1))))
4811 return build2 (code == TRUTH_ANDIF_EXPR
4812 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4813 type, lhs, rhs);
4817 return 0;
4820 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4821 bit value. Arrange things so the extra bits will be set to zero if and
4822 only if C is signed-extended to its full width. If MASK is nonzero,
4823 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4825 static tree
4826 unextend (tree c, int p, int unsignedp, tree mask)
4828 tree type = TREE_TYPE (c);
4829 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4830 tree temp;
4832 if (p == modesize || unsignedp)
4833 return c;
4835 /* We work by getting just the sign bit into the low-order bit, then
4836 into the high-order bit, then sign-extend. We then XOR that value
4837 with C. */
4838 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4839 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4841 /* We must use a signed type in order to get an arithmetic right shift.
4842 However, we must also avoid introducing accidental overflows, so that
4843 a subsequent call to integer_zerop will work. Hence we must
4844 do the type conversion here. At this point, the constant is either
4845 zero or one, and the conversion to a signed type can never overflow.
4846 We could get an overflow if this conversion is done anywhere else. */
4847 if (TYPE_UNSIGNED (type))
4848 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4850 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4851 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4852 if (mask != 0)
4853 temp = const_binop (BIT_AND_EXPR, temp,
4854 fold_convert (TREE_TYPE (c), mask), 0);
4855 /* If necessary, convert the type back to match the type of C. */
4856 if (TYPE_UNSIGNED (type))
4857 temp = fold_convert (type, temp);
4859 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4862 /* Find ways of folding logical expressions of LHS and RHS:
4863 Try to merge two comparisons to the same innermost item.
4864 Look for range tests like "ch >= '0' && ch <= '9'".
4865 Look for combinations of simple terms on machines with expensive branches
4866 and evaluate the RHS unconditionally.
4868 For example, if we have p->a == 2 && p->b == 4 and we can make an
4869 object large enough to span both A and B, we can do this with a comparison
4870 against the object ANDed with the a mask.
4872 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4873 operations to do this with one comparison.
4875 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4876 function and the one above.
4878 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4879 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4881 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4882 two operands.
4884 We return the simplified tree or 0 if no optimization is possible. */
4886 static tree
4887 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4889 /* If this is the "or" of two comparisons, we can do something if
4890 the comparisons are NE_EXPR. If this is the "and", we can do something
4891 if the comparisons are EQ_EXPR. I.e.,
4892 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4894 WANTED_CODE is this operation code. For single bit fields, we can
4895 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4896 comparison for one-bit fields. */
4898 enum tree_code wanted_code;
4899 enum tree_code lcode, rcode;
4900 tree ll_arg, lr_arg, rl_arg, rr_arg;
4901 tree ll_inner, lr_inner, rl_inner, rr_inner;
4902 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4903 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4904 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4905 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4906 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4907 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4908 enum machine_mode lnmode, rnmode;
4909 tree ll_mask, lr_mask, rl_mask, rr_mask;
4910 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4911 tree l_const, r_const;
4912 tree lntype, rntype, result;
4913 int first_bit, end_bit;
4914 int volatilep;
4915 tree orig_lhs = lhs, orig_rhs = rhs;
4916 enum tree_code orig_code = code;
4918 /* Start by getting the comparison codes. Fail if anything is volatile.
4919 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4920 it were surrounded with a NE_EXPR. */
4922 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4923 return 0;
4925 lcode = TREE_CODE (lhs);
4926 rcode = TREE_CODE (rhs);
4928 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4930 lhs = build2 (NE_EXPR, truth_type, lhs,
4931 build_int_cst (TREE_TYPE (lhs), 0));
4932 lcode = NE_EXPR;
4935 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4937 rhs = build2 (NE_EXPR, truth_type, rhs,
4938 build_int_cst (TREE_TYPE (rhs), 0));
4939 rcode = NE_EXPR;
4942 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4943 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4944 return 0;
4946 ll_arg = TREE_OPERAND (lhs, 0);
4947 lr_arg = TREE_OPERAND (lhs, 1);
4948 rl_arg = TREE_OPERAND (rhs, 0);
4949 rr_arg = TREE_OPERAND (rhs, 1);
4951 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4952 if (simple_operand_p (ll_arg)
4953 && simple_operand_p (lr_arg))
4955 tree result;
4956 if (operand_equal_p (ll_arg, rl_arg, 0)
4957 && operand_equal_p (lr_arg, rr_arg, 0))
4959 result = combine_comparisons (code, lcode, rcode,
4960 truth_type, ll_arg, lr_arg);
4961 if (result)
4962 return result;
4964 else if (operand_equal_p (ll_arg, rr_arg, 0)
4965 && operand_equal_p (lr_arg, rl_arg, 0))
4967 result = combine_comparisons (code, lcode,
4968 swap_tree_comparison (rcode),
4969 truth_type, ll_arg, lr_arg);
4970 if (result)
4971 return result;
4975 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4976 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4978 /* If the RHS can be evaluated unconditionally and its operands are
4979 simple, it wins to evaluate the RHS unconditionally on machines
4980 with expensive branches. In this case, this isn't a comparison
4981 that can be merged. Avoid doing this if the RHS is a floating-point
4982 comparison since those can trap. */
4984 if (BRANCH_COST >= 2
4985 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4986 && simple_operand_p (rl_arg)
4987 && simple_operand_p (rr_arg))
4989 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4990 if (code == TRUTH_OR_EXPR
4991 && lcode == NE_EXPR && integer_zerop (lr_arg)
4992 && rcode == NE_EXPR && integer_zerop (rr_arg)
4993 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4994 return build2 (NE_EXPR, truth_type,
4995 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4996 ll_arg, rl_arg),
4997 build_int_cst (TREE_TYPE (ll_arg), 0));
4999 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5000 if (code == TRUTH_AND_EXPR
5001 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5002 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5003 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5004 return build2 (EQ_EXPR, truth_type,
5005 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5006 ll_arg, rl_arg),
5007 build_int_cst (TREE_TYPE (ll_arg), 0));
5009 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5011 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5012 return build2 (code, truth_type, lhs, rhs);
5013 return NULL_TREE;
5017 /* See if the comparisons can be merged. Then get all the parameters for
5018 each side. */
5020 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5021 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5022 return 0;
5024 volatilep = 0;
5025 ll_inner = decode_field_reference (ll_arg,
5026 &ll_bitsize, &ll_bitpos, &ll_mode,
5027 &ll_unsignedp, &volatilep, &ll_mask,
5028 &ll_and_mask);
5029 lr_inner = decode_field_reference (lr_arg,
5030 &lr_bitsize, &lr_bitpos, &lr_mode,
5031 &lr_unsignedp, &volatilep, &lr_mask,
5032 &lr_and_mask);
5033 rl_inner = decode_field_reference (rl_arg,
5034 &rl_bitsize, &rl_bitpos, &rl_mode,
5035 &rl_unsignedp, &volatilep, &rl_mask,
5036 &rl_and_mask);
5037 rr_inner = decode_field_reference (rr_arg,
5038 &rr_bitsize, &rr_bitpos, &rr_mode,
5039 &rr_unsignedp, &volatilep, &rr_mask,
5040 &rr_and_mask);
5042 /* It must be true that the inner operation on the lhs of each
5043 comparison must be the same if we are to be able to do anything.
5044 Then see if we have constants. If not, the same must be true for
5045 the rhs's. */
5046 if (volatilep || ll_inner == 0 || rl_inner == 0
5047 || ! operand_equal_p (ll_inner, rl_inner, 0))
5048 return 0;
5050 if (TREE_CODE (lr_arg) == INTEGER_CST
5051 && TREE_CODE (rr_arg) == INTEGER_CST)
5052 l_const = lr_arg, r_const = rr_arg;
5053 else if (lr_inner == 0 || rr_inner == 0
5054 || ! operand_equal_p (lr_inner, rr_inner, 0))
5055 return 0;
5056 else
5057 l_const = r_const = 0;
5059 /* If either comparison code is not correct for our logical operation,
5060 fail. However, we can convert a one-bit comparison against zero into
5061 the opposite comparison against that bit being set in the field. */
5063 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5064 if (lcode != wanted_code)
5066 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5068 /* Make the left operand unsigned, since we are only interested
5069 in the value of one bit. Otherwise we are doing the wrong
5070 thing below. */
5071 ll_unsignedp = 1;
5072 l_const = ll_mask;
5074 else
5075 return 0;
5078 /* This is analogous to the code for l_const above. */
5079 if (rcode != wanted_code)
5081 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5083 rl_unsignedp = 1;
5084 r_const = rl_mask;
5086 else
5087 return 0;
5090 /* After this point all optimizations will generate bit-field
5091 references, which we might not want. */
5092 if (! lang_hooks.can_use_bit_fields_p ())
5093 return 0;
5095 /* See if we can find a mode that contains both fields being compared on
5096 the left. If we can't, fail. Otherwise, update all constants and masks
5097 to be relative to a field of that size. */
5098 first_bit = MIN (ll_bitpos, rl_bitpos);
5099 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5100 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5101 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5102 volatilep);
5103 if (lnmode == VOIDmode)
5104 return 0;
5106 lnbitsize = GET_MODE_BITSIZE (lnmode);
5107 lnbitpos = first_bit & ~ (lnbitsize - 1);
5108 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5109 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5111 if (BYTES_BIG_ENDIAN)
5113 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5114 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5117 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5118 size_int (xll_bitpos), 0);
5119 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5120 size_int (xrl_bitpos), 0);
5122 if (l_const)
5124 l_const = fold_convert (lntype, l_const);
5125 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5126 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5127 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5128 fold_build1 (BIT_NOT_EXPR,
5129 lntype, ll_mask),
5130 0)))
5132 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5134 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5137 if (r_const)
5139 r_const = fold_convert (lntype, r_const);
5140 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5141 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5142 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5143 fold_build1 (BIT_NOT_EXPR,
5144 lntype, rl_mask),
5145 0)))
5147 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5149 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5153 /* If the right sides are not constant, do the same for it. Also,
5154 disallow this optimization if a size or signedness mismatch occurs
5155 between the left and right sides. */
5156 if (l_const == 0)
5158 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5159 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5160 /* Make sure the two fields on the right
5161 correspond to the left without being swapped. */
5162 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5163 return 0;
5165 first_bit = MIN (lr_bitpos, rr_bitpos);
5166 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5167 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5168 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5169 volatilep);
5170 if (rnmode == VOIDmode)
5171 return 0;
5173 rnbitsize = GET_MODE_BITSIZE (rnmode);
5174 rnbitpos = first_bit & ~ (rnbitsize - 1);
5175 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5176 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5178 if (BYTES_BIG_ENDIAN)
5180 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5181 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5184 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5185 size_int (xlr_bitpos), 0);
5186 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5187 size_int (xrr_bitpos), 0);
5189 /* Make a mask that corresponds to both fields being compared.
5190 Do this for both items being compared. If the operands are the
5191 same size and the bits being compared are in the same position
5192 then we can do this by masking both and comparing the masked
5193 results. */
5194 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5195 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5196 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5198 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5199 ll_unsignedp || rl_unsignedp);
5200 if (! all_ones_mask_p (ll_mask, lnbitsize))
5201 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5203 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5204 lr_unsignedp || rr_unsignedp);
5205 if (! all_ones_mask_p (lr_mask, rnbitsize))
5206 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5208 return build2 (wanted_code, truth_type, lhs, rhs);
5211 /* There is still another way we can do something: If both pairs of
5212 fields being compared are adjacent, we may be able to make a wider
5213 field containing them both.
5215 Note that we still must mask the lhs/rhs expressions. Furthermore,
5216 the mask must be shifted to account for the shift done by
5217 make_bit_field_ref. */
5218 if ((ll_bitsize + ll_bitpos == rl_bitpos
5219 && lr_bitsize + lr_bitpos == rr_bitpos)
5220 || (ll_bitpos == rl_bitpos + rl_bitsize
5221 && lr_bitpos == rr_bitpos + rr_bitsize))
5223 tree type;
5225 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5226 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5227 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5228 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5230 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5231 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5232 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5233 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5235 /* Convert to the smaller type before masking out unwanted bits. */
5236 type = lntype;
5237 if (lntype != rntype)
5239 if (lnbitsize > rnbitsize)
5241 lhs = fold_convert (rntype, lhs);
5242 ll_mask = fold_convert (rntype, ll_mask);
5243 type = rntype;
5245 else if (lnbitsize < rnbitsize)
5247 rhs = fold_convert (lntype, rhs);
5248 lr_mask = fold_convert (lntype, lr_mask);
5249 type = lntype;
5253 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5254 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5256 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5257 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5259 return build2 (wanted_code, truth_type, lhs, rhs);
5262 return 0;
5265 /* Handle the case of comparisons with constants. If there is something in
5266 common between the masks, those bits of the constants must be the same.
5267 If not, the condition is always false. Test for this to avoid generating
5268 incorrect code below. */
5269 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5270 if (! integer_zerop (result)
5271 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5272 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5274 if (wanted_code == NE_EXPR)
5276 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5277 return constant_boolean_node (true, truth_type);
5279 else
5281 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5282 return constant_boolean_node (false, truth_type);
5286 /* Construct the expression we will return. First get the component
5287 reference we will make. Unless the mask is all ones the width of
5288 that field, perform the mask operation. Then compare with the
5289 merged constant. */
5290 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5291 ll_unsignedp || rl_unsignedp);
5293 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5294 if (! all_ones_mask_p (ll_mask, lnbitsize))
5295 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5297 return build2 (wanted_code, truth_type, result,
5298 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5301 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5302 constant. */
5304 static tree
5305 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5307 tree arg0 = op0;
5308 enum tree_code op_code;
5309 tree comp_const = op1;
5310 tree minmax_const;
5311 int consts_equal, consts_lt;
5312 tree inner;
5314 STRIP_SIGN_NOPS (arg0);
5316 op_code = TREE_CODE (arg0);
5317 minmax_const = TREE_OPERAND (arg0, 1);
5318 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5319 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5320 inner = TREE_OPERAND (arg0, 0);
5322 /* If something does not permit us to optimize, return the original tree. */
5323 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5324 || TREE_CODE (comp_const) != INTEGER_CST
5325 || TREE_CONSTANT_OVERFLOW (comp_const)
5326 || TREE_CODE (minmax_const) != INTEGER_CST
5327 || TREE_CONSTANT_OVERFLOW (minmax_const))
5328 return NULL_TREE;
5330 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5331 and GT_EXPR, doing the rest with recursive calls using logical
5332 simplifications. */
5333 switch (code)
5335 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5337 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5338 type, op0, op1);
5339 if (tem)
5340 return invert_truthvalue (tem);
5341 return NULL_TREE;
5344 case GE_EXPR:
5345 return
5346 fold_build2 (TRUTH_ORIF_EXPR, type,
5347 optimize_minmax_comparison
5348 (EQ_EXPR, type, arg0, comp_const),
5349 optimize_minmax_comparison
5350 (GT_EXPR, type, arg0, comp_const));
5352 case EQ_EXPR:
5353 if (op_code == MAX_EXPR && consts_equal)
5354 /* MAX (X, 0) == 0 -> X <= 0 */
5355 return fold_build2 (LE_EXPR, type, inner, comp_const);
5357 else if (op_code == MAX_EXPR && consts_lt)
5358 /* MAX (X, 0) == 5 -> X == 5 */
5359 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5361 else if (op_code == MAX_EXPR)
5362 /* MAX (X, 0) == -1 -> false */
5363 return omit_one_operand (type, integer_zero_node, inner);
5365 else if (consts_equal)
5366 /* MIN (X, 0) == 0 -> X >= 0 */
5367 return fold_build2 (GE_EXPR, type, inner, comp_const);
5369 else if (consts_lt)
5370 /* MIN (X, 0) == 5 -> false */
5371 return omit_one_operand (type, integer_zero_node, inner);
5373 else
5374 /* MIN (X, 0) == -1 -> X == -1 */
5375 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5377 case GT_EXPR:
5378 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5379 /* MAX (X, 0) > 0 -> X > 0
5380 MAX (X, 0) > 5 -> X > 5 */
5381 return fold_build2 (GT_EXPR, type, inner, comp_const);
5383 else if (op_code == MAX_EXPR)
5384 /* MAX (X, 0) > -1 -> true */
5385 return omit_one_operand (type, integer_one_node, inner);
5387 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5388 /* MIN (X, 0) > 0 -> false
5389 MIN (X, 0) > 5 -> false */
5390 return omit_one_operand (type, integer_zero_node, inner);
5392 else
5393 /* MIN (X, 0) > -1 -> X > -1 */
5394 return fold_build2 (GT_EXPR, type, inner, comp_const);
5396 default:
5397 return NULL_TREE;
5401 /* T is an integer expression that is being multiplied, divided, or taken a
5402 modulus (CODE says which and what kind of divide or modulus) by a
5403 constant C. See if we can eliminate that operation by folding it with
5404 other operations already in T. WIDE_TYPE, if non-null, is a type that
5405 should be used for the computation if wider than our type.
5407 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5408 (X * 2) + (Y * 4). We must, however, be assured that either the original
5409 expression would not overflow or that overflow is undefined for the type
5410 in the language in question.
5412 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5413 the machine has a multiply-accumulate insn or that this is part of an
5414 addressing calculation.
5416 If we return a non-null expression, it is an equivalent form of the
5417 original computation, but need not be in the original type. */
5419 static tree
5420 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5422 /* To avoid exponential search depth, refuse to allow recursion past
5423 three levels. Beyond that (1) it's highly unlikely that we'll find
5424 something interesting and (2) we've probably processed it before
5425 when we built the inner expression. */
5427 static int depth;
5428 tree ret;
5430 if (depth > 3)
5431 return NULL;
5433 depth++;
5434 ret = extract_muldiv_1 (t, c, code, wide_type);
5435 depth--;
5437 return ret;
5440 static tree
5441 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5443 tree type = TREE_TYPE (t);
5444 enum tree_code tcode = TREE_CODE (t);
5445 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5446 > GET_MODE_SIZE (TYPE_MODE (type)))
5447 ? wide_type : type);
5448 tree t1, t2;
5449 int same_p = tcode == code;
5450 tree op0 = NULL_TREE, op1 = NULL_TREE;
5452 /* Don't deal with constants of zero here; they confuse the code below. */
5453 if (integer_zerop (c))
5454 return NULL_TREE;
5456 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5457 op0 = TREE_OPERAND (t, 0);
5459 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5460 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5462 /* Note that we need not handle conditional operations here since fold
5463 already handles those cases. So just do arithmetic here. */
5464 switch (tcode)
5466 case INTEGER_CST:
5467 /* For a constant, we can always simplify if we are a multiply
5468 or (for divide and modulus) if it is a multiple of our constant. */
5469 if (code == MULT_EXPR
5470 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5471 return const_binop (code, fold_convert (ctype, t),
5472 fold_convert (ctype, c), 0);
5473 break;
5475 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5476 /* If op0 is an expression ... */
5477 if ((COMPARISON_CLASS_P (op0)
5478 || UNARY_CLASS_P (op0)
5479 || BINARY_CLASS_P (op0)
5480 || EXPRESSION_CLASS_P (op0))
5481 /* ... and is unsigned, and its type is smaller than ctype,
5482 then we cannot pass through as widening. */
5483 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5484 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5485 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5486 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5487 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5488 /* ... or this is a truncation (t is narrower than op0),
5489 then we cannot pass through this narrowing. */
5490 || (GET_MODE_SIZE (TYPE_MODE (type))
5491 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5492 /* ... or signedness changes for division or modulus,
5493 then we cannot pass through this conversion. */
5494 || (code != MULT_EXPR
5495 && (TYPE_UNSIGNED (ctype)
5496 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5497 break;
5499 /* Pass the constant down and see if we can make a simplification. If
5500 we can, replace this expression with the inner simplification for
5501 possible later conversion to our or some other type. */
5502 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5503 && TREE_CODE (t2) == INTEGER_CST
5504 && ! TREE_CONSTANT_OVERFLOW (t2)
5505 && (0 != (t1 = extract_muldiv (op0, t2, code,
5506 code == MULT_EXPR
5507 ? ctype : NULL_TREE))))
5508 return t1;
5509 break;
5511 case ABS_EXPR:
5512 /* If widening the type changes it from signed to unsigned, then we
5513 must avoid building ABS_EXPR itself as unsigned. */
5514 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5516 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5517 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5519 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5520 return fold_convert (ctype, t1);
5522 break;
5524 /* FALLTHROUGH */
5525 case NEGATE_EXPR:
5526 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5527 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5528 break;
5530 case MIN_EXPR: case MAX_EXPR:
5531 /* If widening the type changes the signedness, then we can't perform
5532 this optimization as that changes the result. */
5533 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5534 break;
5536 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5537 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5538 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5540 if (tree_int_cst_sgn (c) < 0)
5541 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5543 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5544 fold_convert (ctype, t2));
5546 break;
5548 case LSHIFT_EXPR: case RSHIFT_EXPR:
5549 /* If the second operand is constant, this is a multiplication
5550 or floor division, by a power of two, so we can treat it that
5551 way unless the multiplier or divisor overflows. Signed
5552 left-shift overflow is implementation-defined rather than
5553 undefined in C90, so do not convert signed left shift into
5554 multiplication. */
5555 if (TREE_CODE (op1) == INTEGER_CST
5556 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5557 /* const_binop may not detect overflow correctly,
5558 so check for it explicitly here. */
5559 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5560 && TREE_INT_CST_HIGH (op1) == 0
5561 && 0 != (t1 = fold_convert (ctype,
5562 const_binop (LSHIFT_EXPR,
5563 size_one_node,
5564 op1, 0)))
5565 && ! TREE_OVERFLOW (t1))
5566 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5567 ? MULT_EXPR : FLOOR_DIV_EXPR,
5568 ctype, fold_convert (ctype, op0), t1),
5569 c, code, wide_type);
5570 break;
5572 case PLUS_EXPR: case MINUS_EXPR:
5573 /* See if we can eliminate the operation on both sides. If we can, we
5574 can return a new PLUS or MINUS. If we can't, the only remaining
5575 cases where we can do anything are if the second operand is a
5576 constant. */
5577 t1 = extract_muldiv (op0, c, code, wide_type);
5578 t2 = extract_muldiv (op1, c, code, wide_type);
5579 if (t1 != 0 && t2 != 0
5580 && (code == MULT_EXPR
5581 /* If not multiplication, we can only do this if both operands
5582 are divisible by c. */
5583 || (multiple_of_p (ctype, op0, c)
5584 && multiple_of_p (ctype, op1, c))))
5585 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5586 fold_convert (ctype, t2));
5588 /* If this was a subtraction, negate OP1 and set it to be an addition.
5589 This simplifies the logic below. */
5590 if (tcode == MINUS_EXPR)
5591 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5593 if (TREE_CODE (op1) != INTEGER_CST)
5594 break;
5596 /* If either OP1 or C are negative, this optimization is not safe for
5597 some of the division and remainder types while for others we need
5598 to change the code. */
5599 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5601 if (code == CEIL_DIV_EXPR)
5602 code = FLOOR_DIV_EXPR;
5603 else if (code == FLOOR_DIV_EXPR)
5604 code = CEIL_DIV_EXPR;
5605 else if (code != MULT_EXPR
5606 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5607 break;
5610 /* If it's a multiply or a division/modulus operation of a multiple
5611 of our constant, do the operation and verify it doesn't overflow. */
5612 if (code == MULT_EXPR
5613 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5615 op1 = const_binop (code, fold_convert (ctype, op1),
5616 fold_convert (ctype, c), 0);
5617 /* We allow the constant to overflow with wrapping semantics. */
5618 if (op1 == 0
5619 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5620 break;
5622 else
5623 break;
5625 /* If we have an unsigned type is not a sizetype, we cannot widen
5626 the operation since it will change the result if the original
5627 computation overflowed. */
5628 if (TYPE_UNSIGNED (ctype)
5629 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5630 && ctype != type)
5631 break;
5633 /* If we were able to eliminate our operation from the first side,
5634 apply our operation to the second side and reform the PLUS. */
5635 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5636 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5638 /* The last case is if we are a multiply. In that case, we can
5639 apply the distributive law to commute the multiply and addition
5640 if the multiplication of the constants doesn't overflow. */
5641 if (code == MULT_EXPR)
5642 return fold_build2 (tcode, ctype,
5643 fold_build2 (code, ctype,
5644 fold_convert (ctype, op0),
5645 fold_convert (ctype, c)),
5646 op1);
5648 break;
5650 case MULT_EXPR:
5651 /* We have a special case here if we are doing something like
5652 (C * 8) % 4 since we know that's zero. */
5653 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5654 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5655 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5656 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5657 return omit_one_operand (type, integer_zero_node, op0);
5659 /* ... fall through ... */
5661 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5662 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5663 /* If we can extract our operation from the LHS, do so and return a
5664 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5665 do something only if the second operand is a constant. */
5666 if (same_p
5667 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5668 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5669 fold_convert (ctype, op1));
5670 else if (tcode == MULT_EXPR && code == MULT_EXPR
5671 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5672 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5673 fold_convert (ctype, t1));
5674 else if (TREE_CODE (op1) != INTEGER_CST)
5675 return 0;
5677 /* If these are the same operation types, we can associate them
5678 assuming no overflow. */
5679 if (tcode == code
5680 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5681 fold_convert (ctype, c), 0))
5682 && ! TREE_OVERFLOW (t1))
5683 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5685 /* If these operations "cancel" each other, we have the main
5686 optimizations of this pass, which occur when either constant is a
5687 multiple of the other, in which case we replace this with either an
5688 operation or CODE or TCODE.
5690 If we have an unsigned type that is not a sizetype, we cannot do
5691 this since it will change the result if the original computation
5692 overflowed. */
5693 if ((! TYPE_UNSIGNED (ctype)
5694 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5695 && ! flag_wrapv
5696 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5697 || (tcode == MULT_EXPR
5698 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5699 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5701 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5702 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5703 fold_convert (ctype,
5704 const_binop (TRUNC_DIV_EXPR,
5705 op1, c, 0)));
5706 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5707 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5708 fold_convert (ctype,
5709 const_binop (TRUNC_DIV_EXPR,
5710 c, op1, 0)));
5712 break;
5714 default:
5715 break;
5718 return 0;
5721 /* Return a node which has the indicated constant VALUE (either 0 or
5722 1), and is of the indicated TYPE. */
5724 tree
5725 constant_boolean_node (int value, tree type)
5727 if (type == integer_type_node)
5728 return value ? integer_one_node : integer_zero_node;
5729 else if (type == boolean_type_node)
5730 return value ? boolean_true_node : boolean_false_node;
5731 else
5732 return build_int_cst (type, value);
5736 /* Return true if expr looks like an ARRAY_REF and set base and
5737 offset to the appropriate trees. If there is no offset,
5738 offset is set to NULL_TREE. Base will be canonicalized to
5739 something you can get the element type from using
5740 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5741 in bytes to the base. */
5743 static bool
5744 extract_array_ref (tree expr, tree *base, tree *offset)
5746 /* One canonical form is a PLUS_EXPR with the first
5747 argument being an ADDR_EXPR with a possible NOP_EXPR
5748 attached. */
5749 if (TREE_CODE (expr) == PLUS_EXPR)
5751 tree op0 = TREE_OPERAND (expr, 0);
5752 tree inner_base, dummy1;
5753 /* Strip NOP_EXPRs here because the C frontends and/or
5754 folders present us (int *)&x.a + 4B possibly. */
5755 STRIP_NOPS (op0);
5756 if (extract_array_ref (op0, &inner_base, &dummy1))
5758 *base = inner_base;
5759 if (dummy1 == NULL_TREE)
5760 *offset = TREE_OPERAND (expr, 1);
5761 else
5762 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5763 dummy1, TREE_OPERAND (expr, 1));
5764 return true;
5767 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5768 which we transform into an ADDR_EXPR with appropriate
5769 offset. For other arguments to the ADDR_EXPR we assume
5770 zero offset and as such do not care about the ADDR_EXPR
5771 type and strip possible nops from it. */
5772 else if (TREE_CODE (expr) == ADDR_EXPR)
5774 tree op0 = TREE_OPERAND (expr, 0);
5775 if (TREE_CODE (op0) == ARRAY_REF)
5777 tree idx = TREE_OPERAND (op0, 1);
5778 *base = TREE_OPERAND (op0, 0);
5779 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5780 array_ref_element_size (op0));
5782 else
5784 /* Handle array-to-pointer decay as &a. */
5785 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5786 *base = TREE_OPERAND (expr, 0);
5787 else
5788 *base = expr;
5789 *offset = NULL_TREE;
5791 return true;
5793 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5794 else if (SSA_VAR_P (expr)
5795 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5797 *base = expr;
5798 *offset = NULL_TREE;
5799 return true;
5802 return false;
5806 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5807 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5808 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5809 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5810 COND is the first argument to CODE; otherwise (as in the example
5811 given here), it is the second argument. TYPE is the type of the
5812 original expression. Return NULL_TREE if no simplification is
5813 possible. */
5815 static tree
5816 fold_binary_op_with_conditional_arg (enum tree_code code,
5817 tree type, tree op0, tree op1,
5818 tree cond, tree arg, int cond_first_p)
5820 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5821 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5822 tree test, true_value, false_value;
5823 tree lhs = NULL_TREE;
5824 tree rhs = NULL_TREE;
5826 /* This transformation is only worthwhile if we don't have to wrap
5827 arg in a SAVE_EXPR, and the operation can be simplified on at least
5828 one of the branches once its pushed inside the COND_EXPR. */
5829 if (!TREE_CONSTANT (arg))
5830 return NULL_TREE;
5832 if (TREE_CODE (cond) == COND_EXPR)
5834 test = TREE_OPERAND (cond, 0);
5835 true_value = TREE_OPERAND (cond, 1);
5836 false_value = TREE_OPERAND (cond, 2);
5837 /* If this operand throws an expression, then it does not make
5838 sense to try to perform a logical or arithmetic operation
5839 involving it. */
5840 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5841 lhs = true_value;
5842 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5843 rhs = false_value;
5845 else
5847 tree testtype = TREE_TYPE (cond);
5848 test = cond;
5849 true_value = constant_boolean_node (true, testtype);
5850 false_value = constant_boolean_node (false, testtype);
5853 arg = fold_convert (arg_type, arg);
5854 if (lhs == 0)
5856 true_value = fold_convert (cond_type, true_value);
5857 if (cond_first_p)
5858 lhs = fold_build2 (code, type, true_value, arg);
5859 else
5860 lhs = fold_build2 (code, type, arg, true_value);
5862 if (rhs == 0)
5864 false_value = fold_convert (cond_type, false_value);
5865 if (cond_first_p)
5866 rhs = fold_build2 (code, type, false_value, arg);
5867 else
5868 rhs = fold_build2 (code, type, arg, false_value);
5871 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5872 return fold_convert (type, test);
5876 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5878 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5879 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5880 ADDEND is the same as X.
5882 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5883 and finite. The problematic cases are when X is zero, and its mode
5884 has signed zeros. In the case of rounding towards -infinity,
5885 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5886 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5888 static bool
5889 fold_real_zero_addition_p (tree type, tree addend, int negate)
5891 if (!real_zerop (addend))
5892 return false;
5894 /* Don't allow the fold with -fsignaling-nans. */
5895 if (HONOR_SNANS (TYPE_MODE (type)))
5896 return false;
5898 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5899 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5900 return true;
5902 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5903 if (TREE_CODE (addend) == REAL_CST
5904 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5905 negate = !negate;
5907 /* The mode has signed zeros, and we have to honor their sign.
5908 In this situation, there is only one case we can return true for.
5909 X - 0 is the same as X unless rounding towards -infinity is
5910 supported. */
5911 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5914 /* Subroutine of fold() that checks comparisons of built-in math
5915 functions against real constants.
5917 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5918 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5919 is the type of the result and ARG0 and ARG1 are the operands of the
5920 comparison. ARG1 must be a TREE_REAL_CST.
5922 The function returns the constant folded tree if a simplification
5923 can be made, and NULL_TREE otherwise. */
5925 static tree
5926 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5927 tree type, tree arg0, tree arg1)
5929 REAL_VALUE_TYPE c;
5931 if (BUILTIN_SQRT_P (fcode))
5933 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5934 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5936 c = TREE_REAL_CST (arg1);
5937 if (REAL_VALUE_NEGATIVE (c))
5939 /* sqrt(x) < y is always false, if y is negative. */
5940 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5941 return omit_one_operand (type, integer_zero_node, arg);
5943 /* sqrt(x) > y is always true, if y is negative and we
5944 don't care about NaNs, i.e. negative values of x. */
5945 if (code == NE_EXPR || !HONOR_NANS (mode))
5946 return omit_one_operand (type, integer_one_node, arg);
5948 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5949 return fold_build2 (GE_EXPR, type, arg,
5950 build_real (TREE_TYPE (arg), dconst0));
5952 else if (code == GT_EXPR || code == GE_EXPR)
5954 REAL_VALUE_TYPE c2;
5956 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5957 real_convert (&c2, mode, &c2);
5959 if (REAL_VALUE_ISINF (c2))
5961 /* sqrt(x) > y is x == +Inf, when y is very large. */
5962 if (HONOR_INFINITIES (mode))
5963 return fold_build2 (EQ_EXPR, type, arg,
5964 build_real (TREE_TYPE (arg), c2));
5966 /* sqrt(x) > y is always false, when y is very large
5967 and we don't care about infinities. */
5968 return omit_one_operand (type, integer_zero_node, arg);
5971 /* sqrt(x) > c is the same as x > c*c. */
5972 return fold_build2 (code, type, arg,
5973 build_real (TREE_TYPE (arg), c2));
5975 else if (code == LT_EXPR || code == LE_EXPR)
5977 REAL_VALUE_TYPE c2;
5979 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5980 real_convert (&c2, mode, &c2);
5982 if (REAL_VALUE_ISINF (c2))
5984 /* sqrt(x) < y is always true, when y is a very large
5985 value and we don't care about NaNs or Infinities. */
5986 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5987 return omit_one_operand (type, integer_one_node, arg);
5989 /* sqrt(x) < y is x != +Inf when y is very large and we
5990 don't care about NaNs. */
5991 if (! HONOR_NANS (mode))
5992 return fold_build2 (NE_EXPR, type, arg,
5993 build_real (TREE_TYPE (arg), c2));
5995 /* sqrt(x) < y is x >= 0 when y is very large and we
5996 don't care about Infinities. */
5997 if (! HONOR_INFINITIES (mode))
5998 return fold_build2 (GE_EXPR, type, arg,
5999 build_real (TREE_TYPE (arg), dconst0));
6001 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6002 if (lang_hooks.decls.global_bindings_p () != 0
6003 || CONTAINS_PLACEHOLDER_P (arg))
6004 return NULL_TREE;
6006 arg = save_expr (arg);
6007 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6008 fold_build2 (GE_EXPR, type, arg,
6009 build_real (TREE_TYPE (arg),
6010 dconst0)),
6011 fold_build2 (NE_EXPR, type, arg,
6012 build_real (TREE_TYPE (arg),
6013 c2)));
6016 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6017 if (! HONOR_NANS (mode))
6018 return fold_build2 (code, type, arg,
6019 build_real (TREE_TYPE (arg), c2));
6021 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6022 if (lang_hooks.decls.global_bindings_p () == 0
6023 && ! CONTAINS_PLACEHOLDER_P (arg))
6025 arg = save_expr (arg);
6026 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6027 fold_build2 (GE_EXPR, type, arg,
6028 build_real (TREE_TYPE (arg),
6029 dconst0)),
6030 fold_build2 (code, type, arg,
6031 build_real (TREE_TYPE (arg),
6032 c2)));
6037 return NULL_TREE;
6040 /* Subroutine of fold() that optimizes comparisons against Infinities,
6041 either +Inf or -Inf.
6043 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6044 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6045 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6047 The function returns the constant folded tree if a simplification
6048 can be made, and NULL_TREE otherwise. */
6050 static tree
6051 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6053 enum machine_mode mode;
6054 REAL_VALUE_TYPE max;
6055 tree temp;
6056 bool neg;
6058 mode = TYPE_MODE (TREE_TYPE (arg0));
6060 /* For negative infinity swap the sense of the comparison. */
6061 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6062 if (neg)
6063 code = swap_tree_comparison (code);
6065 switch (code)
6067 case GT_EXPR:
6068 /* x > +Inf is always false, if with ignore sNANs. */
6069 if (HONOR_SNANS (mode))
6070 return NULL_TREE;
6071 return omit_one_operand (type, integer_zero_node, arg0);
6073 case LE_EXPR:
6074 /* x <= +Inf is always true, if we don't case about NaNs. */
6075 if (! HONOR_NANS (mode))
6076 return omit_one_operand (type, integer_one_node, arg0);
6078 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6079 if (lang_hooks.decls.global_bindings_p () == 0
6080 && ! CONTAINS_PLACEHOLDER_P (arg0))
6082 arg0 = save_expr (arg0);
6083 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6085 break;
6087 case EQ_EXPR:
6088 case GE_EXPR:
6089 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6090 real_maxval (&max, neg, mode);
6091 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6092 arg0, build_real (TREE_TYPE (arg0), max));
6094 case LT_EXPR:
6095 /* x < +Inf is always equal to x <= DBL_MAX. */
6096 real_maxval (&max, neg, mode);
6097 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6098 arg0, build_real (TREE_TYPE (arg0), max));
6100 case NE_EXPR:
6101 /* x != +Inf is always equal to !(x > DBL_MAX). */
6102 real_maxval (&max, neg, mode);
6103 if (! HONOR_NANS (mode))
6104 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6105 arg0, build_real (TREE_TYPE (arg0), max));
6107 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6108 arg0, build_real (TREE_TYPE (arg0), max));
6109 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6111 default:
6112 break;
6115 return NULL_TREE;
6118 /* Subroutine of fold() that optimizes comparisons of a division by
6119 a nonzero integer constant against an integer constant, i.e.
6120 X/C1 op C2.
6122 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6123 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6124 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6126 The function returns the constant folded tree if a simplification
6127 can be made, and NULL_TREE otherwise. */
6129 static tree
6130 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6132 tree prod, tmp, hi, lo;
6133 tree arg00 = TREE_OPERAND (arg0, 0);
6134 tree arg01 = TREE_OPERAND (arg0, 1);
6135 unsigned HOST_WIDE_INT lpart;
6136 HOST_WIDE_INT hpart;
6137 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6138 bool neg_overflow;
6139 int overflow;
6141 /* We have to do this the hard way to detect unsigned overflow.
6142 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6143 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6144 TREE_INT_CST_HIGH (arg01),
6145 TREE_INT_CST_LOW (arg1),
6146 TREE_INT_CST_HIGH (arg1),
6147 &lpart, &hpart, unsigned_p);
6148 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6149 prod = force_fit_type (prod, -1, overflow, false);
6150 neg_overflow = false;
6152 if (unsigned_p)
6154 tmp = int_const_binop (MINUS_EXPR, arg01,
6155 build_int_cst (TREE_TYPE (arg01), 1), 0);
6156 lo = prod;
6158 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6159 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6160 TREE_INT_CST_HIGH (prod),
6161 TREE_INT_CST_LOW (tmp),
6162 TREE_INT_CST_HIGH (tmp),
6163 &lpart, &hpart, unsigned_p);
6164 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6165 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6166 TREE_CONSTANT_OVERFLOW (prod));
6168 else if (tree_int_cst_sgn (arg01) >= 0)
6170 tmp = int_const_binop (MINUS_EXPR, arg01,
6171 build_int_cst (TREE_TYPE (arg01), 1), 0);
6172 switch (tree_int_cst_sgn (arg1))
6174 case -1:
6175 neg_overflow = true;
6176 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6177 hi = prod;
6178 break;
6180 case 0:
6181 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6182 hi = tmp;
6183 break;
6185 case 1:
6186 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6187 lo = prod;
6188 break;
6190 default:
6191 gcc_unreachable ();
6194 else
6196 /* A negative divisor reverses the relational operators. */
6197 code = swap_tree_comparison (code);
6199 tmp = int_const_binop (PLUS_EXPR, arg01,
6200 build_int_cst (TREE_TYPE (arg01), 1), 0);
6201 switch (tree_int_cst_sgn (arg1))
6203 case -1:
6204 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6205 lo = prod;
6206 break;
6208 case 0:
6209 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6210 lo = tmp;
6211 break;
6213 case 1:
6214 neg_overflow = true;
6215 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6216 hi = prod;
6217 break;
6219 default:
6220 gcc_unreachable ();
6224 switch (code)
6226 case EQ_EXPR:
6227 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6228 return omit_one_operand (type, integer_zero_node, arg00);
6229 if (TREE_OVERFLOW (hi))
6230 return fold_build2 (GE_EXPR, type, arg00, lo);
6231 if (TREE_OVERFLOW (lo))
6232 return fold_build2 (LE_EXPR, type, arg00, hi);
6233 return build_range_check (type, arg00, 1, lo, hi);
6235 case NE_EXPR:
6236 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6237 return omit_one_operand (type, integer_one_node, arg00);
6238 if (TREE_OVERFLOW (hi))
6239 return fold_build2 (LT_EXPR, type, arg00, lo);
6240 if (TREE_OVERFLOW (lo))
6241 return fold_build2 (GT_EXPR, type, arg00, hi);
6242 return build_range_check (type, arg00, 0, lo, hi);
6244 case LT_EXPR:
6245 if (TREE_OVERFLOW (lo))
6247 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6248 return omit_one_operand (type, tmp, arg00);
6250 return fold_build2 (LT_EXPR, type, arg00, lo);
6252 case LE_EXPR:
6253 if (TREE_OVERFLOW (hi))
6255 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6256 return omit_one_operand (type, tmp, arg00);
6258 return fold_build2 (LE_EXPR, type, arg00, hi);
6260 case GT_EXPR:
6261 if (TREE_OVERFLOW (hi))
6263 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6264 return omit_one_operand (type, tmp, arg00);
6266 return fold_build2 (GT_EXPR, type, arg00, hi);
6268 case GE_EXPR:
6269 if (TREE_OVERFLOW (lo))
6271 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6272 return omit_one_operand (type, tmp, arg00);
6274 return fold_build2 (GE_EXPR, type, arg00, lo);
6276 default:
6277 break;
6280 return NULL_TREE;
6284 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6285 equality/inequality test, then return a simplified form of the test
6286 using a sign testing. Otherwise return NULL. TYPE is the desired
6287 result type. */
6289 static tree
6290 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6291 tree result_type)
6293 /* If this is testing a single bit, we can optimize the test. */
6294 if ((code == NE_EXPR || code == EQ_EXPR)
6295 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6296 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6298 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6299 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6300 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6302 if (arg00 != NULL_TREE
6303 /* This is only a win if casting to a signed type is cheap,
6304 i.e. when arg00's type is not a partial mode. */
6305 && TYPE_PRECISION (TREE_TYPE (arg00))
6306 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6308 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6309 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6310 result_type, fold_convert (stype, arg00),
6311 build_int_cst (stype, 0));
6315 return NULL_TREE;
6318 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6319 equality/inequality test, then return a simplified form of
6320 the test using shifts and logical operations. Otherwise return
6321 NULL. TYPE is the desired result type. */
6323 tree
6324 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6325 tree result_type)
6327 /* If this is testing a single bit, we can optimize the test. */
6328 if ((code == NE_EXPR || code == EQ_EXPR)
6329 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6330 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6332 tree inner = TREE_OPERAND (arg0, 0);
6333 tree type = TREE_TYPE (arg0);
6334 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6335 enum machine_mode operand_mode = TYPE_MODE (type);
6336 int ops_unsigned;
6337 tree signed_type, unsigned_type, intermediate_type;
6338 tree tem, one;
6340 /* First, see if we can fold the single bit test into a sign-bit
6341 test. */
6342 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6343 result_type);
6344 if (tem)
6345 return tem;
6347 /* Otherwise we have (A & C) != 0 where C is a single bit,
6348 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6349 Similarly for (A & C) == 0. */
6351 /* If INNER is a right shift of a constant and it plus BITNUM does
6352 not overflow, adjust BITNUM and INNER. */
6353 if (TREE_CODE (inner) == RSHIFT_EXPR
6354 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6355 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6356 && bitnum < TYPE_PRECISION (type)
6357 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6358 bitnum - TYPE_PRECISION (type)))
6360 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6361 inner = TREE_OPERAND (inner, 0);
6364 /* If we are going to be able to omit the AND below, we must do our
6365 operations as unsigned. If we must use the AND, we have a choice.
6366 Normally unsigned is faster, but for some machines signed is. */
6367 #ifdef LOAD_EXTEND_OP
6368 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6369 && !flag_syntax_only) ? 0 : 1;
6370 #else
6371 ops_unsigned = 1;
6372 #endif
6374 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6375 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6376 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6377 inner = fold_convert (intermediate_type, inner);
6379 if (bitnum != 0)
6380 inner = build2 (RSHIFT_EXPR, intermediate_type,
6381 inner, size_int (bitnum));
6383 one = build_int_cst (intermediate_type, 1);
6385 if (code == EQ_EXPR)
6386 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6388 /* Put the AND last so it can combine with more things. */
6389 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6391 /* Make sure to return the proper type. */
6392 inner = fold_convert (result_type, inner);
6394 return inner;
6396 return NULL_TREE;
6399 /* Check whether we are allowed to reorder operands arg0 and arg1,
6400 such that the evaluation of arg1 occurs before arg0. */
6402 static bool
6403 reorder_operands_p (tree arg0, tree arg1)
6405 if (! flag_evaluation_order)
6406 return true;
6407 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6408 return true;
6409 return ! TREE_SIDE_EFFECTS (arg0)
6410 && ! TREE_SIDE_EFFECTS (arg1);
6413 /* Test whether it is preferable two swap two operands, ARG0 and
6414 ARG1, for example because ARG0 is an integer constant and ARG1
6415 isn't. If REORDER is true, only recommend swapping if we can
6416 evaluate the operands in reverse order. */
6418 bool
6419 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6421 STRIP_SIGN_NOPS (arg0);
6422 STRIP_SIGN_NOPS (arg1);
6424 if (TREE_CODE (arg1) == INTEGER_CST)
6425 return 0;
6426 if (TREE_CODE (arg0) == INTEGER_CST)
6427 return 1;
6429 if (TREE_CODE (arg1) == REAL_CST)
6430 return 0;
6431 if (TREE_CODE (arg0) == REAL_CST)
6432 return 1;
6434 if (TREE_CODE (arg1) == COMPLEX_CST)
6435 return 0;
6436 if (TREE_CODE (arg0) == COMPLEX_CST)
6437 return 1;
6439 if (TREE_CONSTANT (arg1))
6440 return 0;
6441 if (TREE_CONSTANT (arg0))
6442 return 1;
6444 if (optimize_size)
6445 return 0;
6447 if (reorder && flag_evaluation_order
6448 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6449 return 0;
6451 if (DECL_P (arg1))
6452 return 0;
6453 if (DECL_P (arg0))
6454 return 1;
6456 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6457 for commutative and comparison operators. Ensuring a canonical
6458 form allows the optimizers to find additional redundancies without
6459 having to explicitly check for both orderings. */
6460 if (TREE_CODE (arg0) == SSA_NAME
6461 && TREE_CODE (arg1) == SSA_NAME
6462 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6463 return 1;
6465 return 0;
6468 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6469 ARG0 is extended to a wider type. */
6471 static tree
6472 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6474 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6475 tree arg1_unw;
6476 tree shorter_type, outer_type;
6477 tree min, max;
6478 bool above, below;
6480 if (arg0_unw == arg0)
6481 return NULL_TREE;
6482 shorter_type = TREE_TYPE (arg0_unw);
6484 #ifdef HAVE_canonicalize_funcptr_for_compare
6485 /* Disable this optimization if we're casting a function pointer
6486 type on targets that require function pointer canonicalization. */
6487 if (HAVE_canonicalize_funcptr_for_compare
6488 && TREE_CODE (shorter_type) == POINTER_TYPE
6489 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6490 return NULL_TREE;
6491 #endif
6493 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6494 return NULL_TREE;
6496 arg1_unw = get_unwidened (arg1, shorter_type);
6498 /* If possible, express the comparison in the shorter mode. */
6499 if ((code == EQ_EXPR || code == NE_EXPR
6500 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6501 && (TREE_TYPE (arg1_unw) == shorter_type
6502 || (TREE_CODE (arg1_unw) == INTEGER_CST
6503 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6504 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6505 && int_fits_type_p (arg1_unw, shorter_type))))
6506 return fold_build2 (code, type, arg0_unw,
6507 fold_convert (shorter_type, arg1_unw));
6509 if (TREE_CODE (arg1_unw) != INTEGER_CST
6510 || TREE_CODE (shorter_type) != INTEGER_TYPE
6511 || !int_fits_type_p (arg1_unw, shorter_type))
6512 return NULL_TREE;
6514 /* If we are comparing with the integer that does not fit into the range
6515 of the shorter type, the result is known. */
6516 outer_type = TREE_TYPE (arg1_unw);
6517 min = lower_bound_in_type (outer_type, shorter_type);
6518 max = upper_bound_in_type (outer_type, shorter_type);
6520 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6521 max, arg1_unw));
6522 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6523 arg1_unw, min));
6525 switch (code)
6527 case EQ_EXPR:
6528 if (above || below)
6529 return omit_one_operand (type, integer_zero_node, arg0);
6530 break;
6532 case NE_EXPR:
6533 if (above || below)
6534 return omit_one_operand (type, integer_one_node, arg0);
6535 break;
6537 case LT_EXPR:
6538 case LE_EXPR:
6539 if (above)
6540 return omit_one_operand (type, integer_one_node, arg0);
6541 else if (below)
6542 return omit_one_operand (type, integer_zero_node, arg0);
6544 case GT_EXPR:
6545 case GE_EXPR:
6546 if (above)
6547 return omit_one_operand (type, integer_zero_node, arg0);
6548 else if (below)
6549 return omit_one_operand (type, integer_one_node, arg0);
6551 default:
6552 break;
6555 return NULL_TREE;
6558 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6559 ARG0 just the signedness is changed. */
6561 static tree
6562 fold_sign_changed_comparison (enum tree_code code, tree type,
6563 tree arg0, tree arg1)
6565 tree arg0_inner, tmp;
6566 tree inner_type, outer_type;
6568 if (TREE_CODE (arg0) != NOP_EXPR
6569 && TREE_CODE (arg0) != CONVERT_EXPR)
6570 return NULL_TREE;
6572 outer_type = TREE_TYPE (arg0);
6573 arg0_inner = TREE_OPERAND (arg0, 0);
6574 inner_type = TREE_TYPE (arg0_inner);
6576 #ifdef HAVE_canonicalize_funcptr_for_compare
6577 /* Disable this optimization if we're casting a function pointer
6578 type on targets that require function pointer canonicalization. */
6579 if (HAVE_canonicalize_funcptr_for_compare
6580 && TREE_CODE (inner_type) == POINTER_TYPE
6581 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6582 return NULL_TREE;
6583 #endif
6585 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6586 return NULL_TREE;
6588 if (TREE_CODE (arg1) != INTEGER_CST
6589 && !((TREE_CODE (arg1) == NOP_EXPR
6590 || TREE_CODE (arg1) == CONVERT_EXPR)
6591 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6592 return NULL_TREE;
6594 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6595 && code != NE_EXPR
6596 && code != EQ_EXPR)
6597 return NULL_TREE;
6599 if (TREE_CODE (arg1) == INTEGER_CST)
6601 tmp = build_int_cst_wide (inner_type,
6602 TREE_INT_CST_LOW (arg1),
6603 TREE_INT_CST_HIGH (arg1));
6604 arg1 = force_fit_type (tmp, 0,
6605 TREE_OVERFLOW (arg1),
6606 TREE_CONSTANT_OVERFLOW (arg1));
6608 else
6609 arg1 = fold_convert (inner_type, arg1);
6611 return fold_build2 (code, type, arg0_inner, arg1);
6614 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6615 step of the array. Reconstructs s and delta in the case of s * delta
6616 being an integer constant (and thus already folded).
6617 ADDR is the address. MULT is the multiplicative expression.
6618 If the function succeeds, the new address expression is returned. Otherwise
6619 NULL_TREE is returned. */
6621 static tree
6622 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6624 tree s, delta, step;
6625 tree ref = TREE_OPERAND (addr, 0), pref;
6626 tree ret, pos;
6627 tree itype;
6629 /* Canonicalize op1 into a possibly non-constant delta
6630 and an INTEGER_CST s. */
6631 if (TREE_CODE (op1) == MULT_EXPR)
6633 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6635 STRIP_NOPS (arg0);
6636 STRIP_NOPS (arg1);
6638 if (TREE_CODE (arg0) == INTEGER_CST)
6640 s = arg0;
6641 delta = arg1;
6643 else if (TREE_CODE (arg1) == INTEGER_CST)
6645 s = arg1;
6646 delta = arg0;
6648 else
6649 return NULL_TREE;
6651 else if (TREE_CODE (op1) == INTEGER_CST)
6653 delta = op1;
6654 s = NULL_TREE;
6656 else
6658 /* Simulate we are delta * 1. */
6659 delta = op1;
6660 s = integer_one_node;
6663 for (;; ref = TREE_OPERAND (ref, 0))
6665 if (TREE_CODE (ref) == ARRAY_REF)
6667 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6668 if (! itype)
6669 continue;
6671 step = array_ref_element_size (ref);
6672 if (TREE_CODE (step) != INTEGER_CST)
6673 continue;
6675 if (s)
6677 if (! tree_int_cst_equal (step, s))
6678 continue;
6680 else
6682 /* Try if delta is a multiple of step. */
6683 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6684 if (! tmp)
6685 continue;
6686 delta = tmp;
6689 break;
6692 if (!handled_component_p (ref))
6693 return NULL_TREE;
6696 /* We found the suitable array reference. So copy everything up to it,
6697 and replace the index. */
6699 pref = TREE_OPERAND (addr, 0);
6700 ret = copy_node (pref);
6701 pos = ret;
6703 while (pref != ref)
6705 pref = TREE_OPERAND (pref, 0);
6706 TREE_OPERAND (pos, 0) = copy_node (pref);
6707 pos = TREE_OPERAND (pos, 0);
6710 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6711 fold_convert (itype,
6712 TREE_OPERAND (pos, 1)),
6713 fold_convert (itype, delta));
6715 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6719 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6720 means A >= Y && A != MAX, but in this case we know that
6721 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6723 static tree
6724 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6726 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6728 if (TREE_CODE (bound) == LT_EXPR)
6729 a = TREE_OPERAND (bound, 0);
6730 else if (TREE_CODE (bound) == GT_EXPR)
6731 a = TREE_OPERAND (bound, 1);
6732 else
6733 return NULL_TREE;
6735 typea = TREE_TYPE (a);
6736 if (!INTEGRAL_TYPE_P (typea)
6737 && !POINTER_TYPE_P (typea))
6738 return NULL_TREE;
6740 if (TREE_CODE (ineq) == LT_EXPR)
6742 a1 = TREE_OPERAND (ineq, 1);
6743 y = TREE_OPERAND (ineq, 0);
6745 else if (TREE_CODE (ineq) == GT_EXPR)
6747 a1 = TREE_OPERAND (ineq, 0);
6748 y = TREE_OPERAND (ineq, 1);
6750 else
6751 return NULL_TREE;
6753 if (TREE_TYPE (a1) != typea)
6754 return NULL_TREE;
6756 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6757 if (!integer_onep (diff))
6758 return NULL_TREE;
6760 return fold_build2 (GE_EXPR, type, a, y);
6763 /* Fold a sum or difference of at least one multiplication.
6764 Returns the folded tree or NULL if no simplification could be made. */
6766 static tree
6767 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6769 tree arg00, arg01, arg10, arg11;
6770 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6772 /* (A * C) +- (B * C) -> (A+-B) * C.
6773 (A * C) +- A -> A * (C+-1).
6774 We are most concerned about the case where C is a constant,
6775 but other combinations show up during loop reduction. Since
6776 it is not difficult, try all four possibilities. */
6778 if (TREE_CODE (arg0) == MULT_EXPR)
6780 arg00 = TREE_OPERAND (arg0, 0);
6781 arg01 = TREE_OPERAND (arg0, 1);
6783 else
6785 arg00 = arg0;
6786 arg01 = build_one_cst (type);
6788 if (TREE_CODE (arg1) == MULT_EXPR)
6790 arg10 = TREE_OPERAND (arg1, 0);
6791 arg11 = TREE_OPERAND (arg1, 1);
6793 else
6795 arg10 = arg1;
6796 arg11 = build_one_cst (type);
6798 same = NULL_TREE;
6800 if (operand_equal_p (arg01, arg11, 0))
6801 same = arg01, alt0 = arg00, alt1 = arg10;
6802 else if (operand_equal_p (arg00, arg10, 0))
6803 same = arg00, alt0 = arg01, alt1 = arg11;
6804 else if (operand_equal_p (arg00, arg11, 0))
6805 same = arg00, alt0 = arg01, alt1 = arg10;
6806 else if (operand_equal_p (arg01, arg10, 0))
6807 same = arg01, alt0 = arg00, alt1 = arg11;
6809 /* No identical multiplicands; see if we can find a common
6810 power-of-two factor in non-power-of-two multiplies. This
6811 can help in multi-dimensional array access. */
6812 else if (host_integerp (arg01, 0)
6813 && host_integerp (arg11, 0))
6815 HOST_WIDE_INT int01, int11, tmp;
6816 bool swap = false;
6817 tree maybe_same;
6818 int01 = TREE_INT_CST_LOW (arg01);
6819 int11 = TREE_INT_CST_LOW (arg11);
6821 /* Move min of absolute values to int11. */
6822 if ((int01 >= 0 ? int01 : -int01)
6823 < (int11 >= 0 ? int11 : -int11))
6825 tmp = int01, int01 = int11, int11 = tmp;
6826 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6827 maybe_same = arg01;
6828 swap = true;
6830 else
6831 maybe_same = arg11;
6833 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6835 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6836 build_int_cst (TREE_TYPE (arg00),
6837 int01 / int11));
6838 alt1 = arg10;
6839 same = maybe_same;
6840 if (swap)
6841 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6845 if (same)
6846 return fold_build2 (MULT_EXPR, type,
6847 fold_build2 (code, type,
6848 fold_convert (type, alt0),
6849 fold_convert (type, alt1)),
6850 fold_convert (type, same));
6852 return NULL_TREE;
6855 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6856 specified by EXPR into the buffer PTR of length LEN bytes.
6857 Return the number of bytes placed in the buffer, or zero
6858 upon failure. */
6860 static int
6861 native_encode_int (tree expr, unsigned char *ptr, int len)
6863 tree type = TREE_TYPE (expr);
6864 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6865 int byte, offset, word, words;
6866 unsigned char value;
6868 if (total_bytes > len)
6869 return 0;
6870 words = total_bytes / UNITS_PER_WORD;
6872 for (byte = 0; byte < total_bytes; byte++)
6874 int bitpos = byte * BITS_PER_UNIT;
6875 if (bitpos < HOST_BITS_PER_WIDE_INT)
6876 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6877 else
6878 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6879 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6881 if (total_bytes > UNITS_PER_WORD)
6883 word = byte / UNITS_PER_WORD;
6884 if (WORDS_BIG_ENDIAN)
6885 word = (words - 1) - word;
6886 offset = word * UNITS_PER_WORD;
6887 if (BYTES_BIG_ENDIAN)
6888 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6889 else
6890 offset += byte % UNITS_PER_WORD;
6892 else
6893 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6894 ptr[offset] = value;
6896 return total_bytes;
6900 /* Subroutine of native_encode_expr. Encode the REAL_CST
6901 specified by EXPR into the buffer PTR of length LEN bytes.
6902 Return the number of bytes placed in the buffer, or zero
6903 upon failure. */
6905 static int
6906 native_encode_real (tree expr, unsigned char *ptr, int len)
6908 tree type = TREE_TYPE (expr);
6909 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6910 int byte, offset, word, words;
6911 unsigned char value;
6913 /* There are always 32 bits in each long, no matter the size of
6914 the hosts long. We handle floating point representations with
6915 up to 192 bits. */
6916 long tmp[6];
6918 if (total_bytes > len)
6919 return 0;
6920 words = total_bytes / UNITS_PER_WORD;
6922 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6924 for (byte = 0; byte < total_bytes; byte++)
6926 int bitpos = byte * BITS_PER_UNIT;
6927 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6929 if (total_bytes > UNITS_PER_WORD)
6931 word = byte / UNITS_PER_WORD;
6932 if (FLOAT_WORDS_BIG_ENDIAN)
6933 word = (words - 1) - word;
6934 offset = word * UNITS_PER_WORD;
6935 if (BYTES_BIG_ENDIAN)
6936 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6937 else
6938 offset += byte % UNITS_PER_WORD;
6940 else
6941 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6942 ptr[offset] = value;
6944 return total_bytes;
6947 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6948 specified by EXPR into the buffer PTR of length LEN bytes.
6949 Return the number of bytes placed in the buffer, or zero
6950 upon failure. */
6952 static int
6953 native_encode_complex (tree expr, unsigned char *ptr, int len)
6955 int rsize, isize;
6956 tree part;
6958 part = TREE_REALPART (expr);
6959 rsize = native_encode_expr (part, ptr, len);
6960 if (rsize == 0)
6961 return 0;
6962 part = TREE_IMAGPART (expr);
6963 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6964 if (isize != rsize)
6965 return 0;
6966 return rsize + isize;
6970 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6971 specified by EXPR into the buffer PTR of length LEN bytes.
6972 Return the number of bytes placed in the buffer, or zero
6973 upon failure. */
6975 static int
6976 native_encode_vector (tree expr, unsigned char *ptr, int len)
6978 int i, size, offset, count;
6979 tree itype, elem, elements;
6981 offset = 0;
6982 elements = TREE_VECTOR_CST_ELTS (expr);
6983 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6984 itype = TREE_TYPE (TREE_TYPE (expr));
6985 size = GET_MODE_SIZE (TYPE_MODE (itype));
6986 for (i = 0; i < count; i++)
6988 if (elements)
6990 elem = TREE_VALUE (elements);
6991 elements = TREE_CHAIN (elements);
6993 else
6994 elem = NULL_TREE;
6996 if (elem)
6998 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6999 return 0;
7001 else
7003 if (offset + size > len)
7004 return 0;
7005 memset (ptr+offset, 0, size);
7007 offset += size;
7009 return offset;
7013 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7014 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7015 buffer PTR of length LEN bytes. Return the number of bytes
7016 placed in the buffer, or zero upon failure. */
7018 static int
7019 native_encode_expr (tree expr, unsigned char *ptr, int len)
7021 switch (TREE_CODE (expr))
7023 case INTEGER_CST:
7024 return native_encode_int (expr, ptr, len);
7026 case REAL_CST:
7027 return native_encode_real (expr, ptr, len);
7029 case COMPLEX_CST:
7030 return native_encode_complex (expr, ptr, len);
7032 case VECTOR_CST:
7033 return native_encode_vector (expr, ptr, len);
7035 default:
7036 return 0;
7041 /* Subroutine of native_interpret_expr. Interpret the contents of
7042 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7043 If the buffer cannot be interpreted, return NULL_TREE. */
7045 static tree
7046 native_interpret_int (tree type, unsigned char *ptr, int len)
7048 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7049 int byte, offset, word, words;
7050 unsigned char value;
7051 unsigned int HOST_WIDE_INT lo = 0;
7052 HOST_WIDE_INT hi = 0;
7054 if (total_bytes > len)
7055 return NULL_TREE;
7056 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7057 return NULL_TREE;
7058 words = total_bytes / UNITS_PER_WORD;
7060 for (byte = 0; byte < total_bytes; byte++)
7062 int bitpos = byte * BITS_PER_UNIT;
7063 if (total_bytes > UNITS_PER_WORD)
7065 word = byte / UNITS_PER_WORD;
7066 if (WORDS_BIG_ENDIAN)
7067 word = (words - 1) - word;
7068 offset = word * UNITS_PER_WORD;
7069 if (BYTES_BIG_ENDIAN)
7070 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7071 else
7072 offset += byte % UNITS_PER_WORD;
7074 else
7075 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7076 value = ptr[offset];
7078 if (bitpos < HOST_BITS_PER_WIDE_INT)
7079 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7080 else
7081 hi |= (unsigned HOST_WIDE_INT) value
7082 << (bitpos - HOST_BITS_PER_WIDE_INT);
7085 return force_fit_type (build_int_cst_wide (type, lo, hi),
7086 0, false, false);
7090 /* Subroutine of native_interpret_expr. Interpret the contents of
7091 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7092 If the buffer cannot be interpreted, return NULL_TREE. */
7094 static tree
7095 native_interpret_real (tree type, unsigned char *ptr, int len)
7097 enum machine_mode mode = TYPE_MODE (type);
7098 int total_bytes = GET_MODE_SIZE (mode);
7099 int byte, offset, word, words;
7100 unsigned char value;
7101 /* There are always 32 bits in each long, no matter the size of
7102 the hosts long. We handle floating point representations with
7103 up to 192 bits. */
7104 REAL_VALUE_TYPE r;
7105 long tmp[6];
7107 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7108 if (total_bytes > len || total_bytes > 24)
7109 return NULL_TREE;
7110 words = total_bytes / UNITS_PER_WORD;
7112 memset (tmp, 0, sizeof (tmp));
7113 for (byte = 0; byte < total_bytes; byte++)
7115 int bitpos = byte * BITS_PER_UNIT;
7116 if (total_bytes > UNITS_PER_WORD)
7118 word = byte / UNITS_PER_WORD;
7119 if (FLOAT_WORDS_BIG_ENDIAN)
7120 word = (words - 1) - word;
7121 offset = word * UNITS_PER_WORD;
7122 if (BYTES_BIG_ENDIAN)
7123 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7124 else
7125 offset += byte % UNITS_PER_WORD;
7127 else
7128 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7129 value = ptr[offset];
7131 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7134 real_from_target (&r, tmp, mode);
7135 return build_real (type, r);
7139 /* Subroutine of native_interpret_expr. Interpret the contents of
7140 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7141 If the buffer cannot be interpreted, return NULL_TREE. */
7143 static tree
7144 native_interpret_complex (tree type, unsigned char *ptr, int len)
7146 tree etype, rpart, ipart;
7147 int size;
7149 etype = TREE_TYPE (type);
7150 size = GET_MODE_SIZE (TYPE_MODE (etype));
7151 if (size * 2 > len)
7152 return NULL_TREE;
7153 rpart = native_interpret_expr (etype, ptr, size);
7154 if (!rpart)
7155 return NULL_TREE;
7156 ipart = native_interpret_expr (etype, ptr+size, size);
7157 if (!ipart)
7158 return NULL_TREE;
7159 return build_complex (type, rpart, ipart);
7163 /* Subroutine of native_interpret_expr. Interpret the contents of
7164 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7165 If the buffer cannot be interpreted, return NULL_TREE. */
7167 static tree
7168 native_interpret_vector (tree type, unsigned char *ptr, int len)
7170 tree etype, elem, elements;
7171 int i, size, count;
7173 etype = TREE_TYPE (type);
7174 size = GET_MODE_SIZE (TYPE_MODE (etype));
7175 count = TYPE_VECTOR_SUBPARTS (type);
7176 if (size * count > len)
7177 return NULL_TREE;
7179 elements = NULL_TREE;
7180 for (i = count - 1; i >= 0; i--)
7182 elem = native_interpret_expr (etype, ptr+(i*size), size);
7183 if (!elem)
7184 return NULL_TREE;
7185 elements = tree_cons (NULL_TREE, elem, elements);
7187 return build_vector (type, elements);
7191 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7192 the buffer PTR of length LEN as a constant of type TYPE. For
7193 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7194 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7195 return NULL_TREE. */
7197 static tree
7198 native_interpret_expr (tree type, unsigned char *ptr, int len)
7200 switch (TREE_CODE (type))
7202 case INTEGER_TYPE:
7203 case ENUMERAL_TYPE:
7204 case BOOLEAN_TYPE:
7205 return native_interpret_int (type, ptr, len);
7207 case REAL_TYPE:
7208 return native_interpret_real (type, ptr, len);
7210 case COMPLEX_TYPE:
7211 return native_interpret_complex (type, ptr, len);
7213 case VECTOR_TYPE:
7214 return native_interpret_vector (type, ptr, len);
7216 default:
7217 return NULL_TREE;
7222 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7223 TYPE at compile-time. If we're unable to perform the conversion
7224 return NULL_TREE. */
7226 static tree
7227 fold_view_convert_expr (tree type, tree expr)
7229 /* We support up to 512-bit values (for V8DFmode). */
7230 unsigned char buffer[64];
7231 int len;
7233 /* Check that the host and target are sane. */
7234 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7235 return NULL_TREE;
7237 len = native_encode_expr (expr, buffer, sizeof (buffer));
7238 if (len == 0)
7239 return NULL_TREE;
7241 return native_interpret_expr (type, buffer, len);
7245 /* Fold a unary expression of code CODE and type TYPE with operand
7246 OP0. Return the folded expression if folding is successful.
7247 Otherwise, return NULL_TREE. */
7249 tree
7250 fold_unary (enum tree_code code, tree type, tree op0)
7252 tree tem;
7253 tree arg0;
7254 enum tree_code_class kind = TREE_CODE_CLASS (code);
7256 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7257 && TREE_CODE_LENGTH (code) == 1);
7259 arg0 = op0;
7260 if (arg0)
7262 if (code == NOP_EXPR || code == CONVERT_EXPR
7263 || code == FLOAT_EXPR || code == ABS_EXPR)
7265 /* Don't use STRIP_NOPS, because signedness of argument type
7266 matters. */
7267 STRIP_SIGN_NOPS (arg0);
7269 else
7271 /* Strip any conversions that don't change the mode. This
7272 is safe for every expression, except for a comparison
7273 expression because its signedness is derived from its
7274 operands.
7276 Note that this is done as an internal manipulation within
7277 the constant folder, in order to find the simplest
7278 representation of the arguments so that their form can be
7279 studied. In any cases, the appropriate type conversions
7280 should be put back in the tree that will get out of the
7281 constant folder. */
7282 STRIP_NOPS (arg0);
7286 if (TREE_CODE_CLASS (code) == tcc_unary)
7288 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7289 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7290 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7291 else if (TREE_CODE (arg0) == COND_EXPR)
7293 tree arg01 = TREE_OPERAND (arg0, 1);
7294 tree arg02 = TREE_OPERAND (arg0, 2);
7295 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7296 arg01 = fold_build1 (code, type, arg01);
7297 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7298 arg02 = fold_build1 (code, type, arg02);
7299 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7300 arg01, arg02);
7302 /* If this was a conversion, and all we did was to move into
7303 inside the COND_EXPR, bring it back out. But leave it if
7304 it is a conversion from integer to integer and the
7305 result precision is no wider than a word since such a
7306 conversion is cheap and may be optimized away by combine,
7307 while it couldn't if it were outside the COND_EXPR. Then return
7308 so we don't get into an infinite recursion loop taking the
7309 conversion out and then back in. */
7311 if ((code == NOP_EXPR || code == CONVERT_EXPR
7312 || code == NON_LVALUE_EXPR)
7313 && TREE_CODE (tem) == COND_EXPR
7314 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7315 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7316 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7317 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7318 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7319 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7320 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7321 && (INTEGRAL_TYPE_P
7322 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7323 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7324 || flag_syntax_only))
7325 tem = build1 (code, type,
7326 build3 (COND_EXPR,
7327 TREE_TYPE (TREE_OPERAND
7328 (TREE_OPERAND (tem, 1), 0)),
7329 TREE_OPERAND (tem, 0),
7330 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7331 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7332 return tem;
7334 else if (COMPARISON_CLASS_P (arg0))
7336 if (TREE_CODE (type) == BOOLEAN_TYPE)
7338 arg0 = copy_node (arg0);
7339 TREE_TYPE (arg0) = type;
7340 return arg0;
7342 else if (TREE_CODE (type) != INTEGER_TYPE)
7343 return fold_build3 (COND_EXPR, type, arg0,
7344 fold_build1 (code, type,
7345 integer_one_node),
7346 fold_build1 (code, type,
7347 integer_zero_node));
7351 switch (code)
7353 case NOP_EXPR:
7354 case FLOAT_EXPR:
7355 case CONVERT_EXPR:
7356 case FIX_TRUNC_EXPR:
7357 if (TREE_TYPE (op0) == type)
7358 return op0;
7360 /* If we have (type) (a CMP b) and type is an integral type, return
7361 new expression involving the new type. */
7362 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7363 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7364 TREE_OPERAND (op0, 1));
7366 /* Handle cases of two conversions in a row. */
7367 if (TREE_CODE (op0) == NOP_EXPR
7368 || TREE_CODE (op0) == CONVERT_EXPR)
7370 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7371 tree inter_type = TREE_TYPE (op0);
7372 int inside_int = INTEGRAL_TYPE_P (inside_type);
7373 int inside_ptr = POINTER_TYPE_P (inside_type);
7374 int inside_float = FLOAT_TYPE_P (inside_type);
7375 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7376 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7377 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7378 int inter_int = INTEGRAL_TYPE_P (inter_type);
7379 int inter_ptr = POINTER_TYPE_P (inter_type);
7380 int inter_float = FLOAT_TYPE_P (inter_type);
7381 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7382 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7383 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7384 int final_int = INTEGRAL_TYPE_P (type);
7385 int final_ptr = POINTER_TYPE_P (type);
7386 int final_float = FLOAT_TYPE_P (type);
7387 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7388 unsigned int final_prec = TYPE_PRECISION (type);
7389 int final_unsignedp = TYPE_UNSIGNED (type);
7391 /* In addition to the cases of two conversions in a row
7392 handled below, if we are converting something to its own
7393 type via an object of identical or wider precision, neither
7394 conversion is needed. */
7395 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7396 && (((inter_int || inter_ptr) && final_int)
7397 || (inter_float && final_float))
7398 && inter_prec >= final_prec)
7399 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7401 /* Likewise, if the intermediate and final types are either both
7402 float or both integer, we don't need the middle conversion if
7403 it is wider than the final type and doesn't change the signedness
7404 (for integers). Avoid this if the final type is a pointer
7405 since then we sometimes need the inner conversion. Likewise if
7406 the outer has a precision not equal to the size of its mode. */
7407 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7408 || (inter_float && inside_float)
7409 || (inter_vec && inside_vec))
7410 && inter_prec >= inside_prec
7411 && (inter_float || inter_vec
7412 || inter_unsignedp == inside_unsignedp)
7413 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7414 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7415 && ! final_ptr
7416 && (! final_vec || inter_prec == inside_prec))
7417 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7419 /* If we have a sign-extension of a zero-extended value, we can
7420 replace that by a single zero-extension. */
7421 if (inside_int && inter_int && final_int
7422 && inside_prec < inter_prec && inter_prec < final_prec
7423 && inside_unsignedp && !inter_unsignedp)
7424 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7426 /* Two conversions in a row are not needed unless:
7427 - some conversion is floating-point (overstrict for now), or
7428 - some conversion is a vector (overstrict for now), or
7429 - the intermediate type is narrower than both initial and
7430 final, or
7431 - the intermediate type and innermost type differ in signedness,
7432 and the outermost type is wider than the intermediate, or
7433 - the initial type is a pointer type and the precisions of the
7434 intermediate and final types differ, or
7435 - the final type is a pointer type and the precisions of the
7436 initial and intermediate types differ.
7437 - the final type is a pointer type and the initial type not
7438 - the initial type is a pointer to an array and the final type
7439 not. */
7440 if (! inside_float && ! inter_float && ! final_float
7441 && ! inside_vec && ! inter_vec && ! final_vec
7442 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7443 && ! (inside_int && inter_int
7444 && inter_unsignedp != inside_unsignedp
7445 && inter_prec < final_prec)
7446 && ((inter_unsignedp && inter_prec > inside_prec)
7447 == (final_unsignedp && final_prec > inter_prec))
7448 && ! (inside_ptr && inter_prec != final_prec)
7449 && ! (final_ptr && inside_prec != inter_prec)
7450 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7451 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7452 && final_ptr == inside_ptr
7453 && ! (inside_ptr
7454 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7455 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7456 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7459 /* Handle (T *)&A.B.C for A being of type T and B and C
7460 living at offset zero. This occurs frequently in
7461 C++ upcasting and then accessing the base. */
7462 if (TREE_CODE (op0) == ADDR_EXPR
7463 && POINTER_TYPE_P (type)
7464 && handled_component_p (TREE_OPERAND (op0, 0)))
7466 HOST_WIDE_INT bitsize, bitpos;
7467 tree offset;
7468 enum machine_mode mode;
7469 int unsignedp, volatilep;
7470 tree base = TREE_OPERAND (op0, 0);
7471 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7472 &mode, &unsignedp, &volatilep, false);
7473 /* If the reference was to a (constant) zero offset, we can use
7474 the address of the base if it has the same base type
7475 as the result type. */
7476 if (! offset && bitpos == 0
7477 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7478 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7479 return fold_convert (type, build_fold_addr_expr (base));
7482 if ((TREE_CODE (op0) == MODIFY_EXPR
7483 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7484 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7485 /* Detect assigning a bitfield. */
7486 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7487 && DECL_BIT_FIELD
7488 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7490 /* Don't leave an assignment inside a conversion
7491 unless assigning a bitfield. */
7492 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7493 /* First do the assignment, then return converted constant. */
7494 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7495 TREE_NO_WARNING (tem) = 1;
7496 TREE_USED (tem) = 1;
7497 return tem;
7500 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7501 constants (if x has signed type, the sign bit cannot be set
7502 in c). This folds extension into the BIT_AND_EXPR. */
7503 if (INTEGRAL_TYPE_P (type)
7504 && TREE_CODE (type) != BOOLEAN_TYPE
7505 && TREE_CODE (op0) == BIT_AND_EXPR
7506 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7508 tree and = op0;
7509 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7510 int change = 0;
7512 if (TYPE_UNSIGNED (TREE_TYPE (and))
7513 || (TYPE_PRECISION (type)
7514 <= TYPE_PRECISION (TREE_TYPE (and))))
7515 change = 1;
7516 else if (TYPE_PRECISION (TREE_TYPE (and1))
7517 <= HOST_BITS_PER_WIDE_INT
7518 && host_integerp (and1, 1))
7520 unsigned HOST_WIDE_INT cst;
7522 cst = tree_low_cst (and1, 1);
7523 cst &= (HOST_WIDE_INT) -1
7524 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7525 change = (cst == 0);
7526 #ifdef LOAD_EXTEND_OP
7527 if (change
7528 && !flag_syntax_only
7529 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7530 == ZERO_EXTEND))
7532 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7533 and0 = fold_convert (uns, and0);
7534 and1 = fold_convert (uns, and1);
7536 #endif
7538 if (change)
7540 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7541 TREE_INT_CST_HIGH (and1));
7542 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7543 TREE_CONSTANT_OVERFLOW (and1));
7544 return fold_build2 (BIT_AND_EXPR, type,
7545 fold_convert (type, and0), tem);
7549 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7550 T2 being pointers to types of the same size. */
7551 if (POINTER_TYPE_P (type)
7552 && BINARY_CLASS_P (arg0)
7553 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7554 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7556 tree arg00 = TREE_OPERAND (arg0, 0);
7557 tree t0 = type;
7558 tree t1 = TREE_TYPE (arg00);
7559 tree tt0 = TREE_TYPE (t0);
7560 tree tt1 = TREE_TYPE (t1);
7561 tree s0 = TYPE_SIZE (tt0);
7562 tree s1 = TYPE_SIZE (tt1);
7564 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7565 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7566 TREE_OPERAND (arg0, 1));
7569 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7570 of the same precision, and X is a integer type not narrower than
7571 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7572 if (INTEGRAL_TYPE_P (type)
7573 && TREE_CODE (op0) == BIT_NOT_EXPR
7574 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7575 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7576 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7577 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7579 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7580 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7581 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7582 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7585 tem = fold_convert_const (code, type, arg0);
7586 return tem ? tem : NULL_TREE;
7588 case VIEW_CONVERT_EXPR:
7589 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7590 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7591 return fold_view_convert_expr (type, op0);
7593 case NEGATE_EXPR:
7594 tem = fold_negate_expr (arg0);
7595 if (tem)
7596 return fold_convert (type, tem);
7597 return NULL_TREE;
7599 case ABS_EXPR:
7600 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7601 return fold_abs_const (arg0, type);
7602 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7603 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7604 /* Convert fabs((double)float) into (double)fabsf(float). */
7605 else if (TREE_CODE (arg0) == NOP_EXPR
7606 && TREE_CODE (type) == REAL_TYPE)
7608 tree targ0 = strip_float_extensions (arg0);
7609 if (targ0 != arg0)
7610 return fold_convert (type, fold_build1 (ABS_EXPR,
7611 TREE_TYPE (targ0),
7612 targ0));
7614 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7615 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7616 return arg0;
7618 /* Strip sign ops from argument. */
7619 if (TREE_CODE (type) == REAL_TYPE)
7621 tem = fold_strip_sign_ops (arg0);
7622 if (tem)
7623 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7625 return NULL_TREE;
7627 case CONJ_EXPR:
7628 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7629 return fold_convert (type, arg0);
7630 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7632 tree itype = TREE_TYPE (type);
7633 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7634 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7635 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7637 if (TREE_CODE (arg0) == COMPLEX_CST)
7639 tree itype = TREE_TYPE (type);
7640 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7641 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7642 return build_complex (type, rpart, negate_expr (ipart));
7644 if (TREE_CODE (arg0) == CONJ_EXPR)
7645 return fold_convert (type, TREE_OPERAND (arg0, 0));
7646 return NULL_TREE;
7648 case BIT_NOT_EXPR:
7649 if (TREE_CODE (arg0) == INTEGER_CST)
7650 return fold_not_const (arg0, type);
7651 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7652 return TREE_OPERAND (arg0, 0);
7653 /* Convert ~ (-A) to A - 1. */
7654 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7655 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7656 build_int_cst (type, 1));
7657 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7658 else if (INTEGRAL_TYPE_P (type)
7659 && ((TREE_CODE (arg0) == MINUS_EXPR
7660 && integer_onep (TREE_OPERAND (arg0, 1)))
7661 || (TREE_CODE (arg0) == PLUS_EXPR
7662 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7663 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7664 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7665 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7666 && (tem = fold_unary (BIT_NOT_EXPR, type,
7667 fold_convert (type,
7668 TREE_OPERAND (arg0, 0)))))
7669 return fold_build2 (BIT_XOR_EXPR, type, tem,
7670 fold_convert (type, TREE_OPERAND (arg0, 1)));
7671 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7672 && (tem = fold_unary (BIT_NOT_EXPR, type,
7673 fold_convert (type,
7674 TREE_OPERAND (arg0, 1)))))
7675 return fold_build2 (BIT_XOR_EXPR, type,
7676 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7678 return NULL_TREE;
7680 case TRUTH_NOT_EXPR:
7681 /* The argument to invert_truthvalue must have Boolean type. */
7682 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7683 arg0 = fold_convert (boolean_type_node, arg0);
7685 /* Note that the operand of this must be an int
7686 and its values must be 0 or 1.
7687 ("true" is a fixed value perhaps depending on the language,
7688 but we don't handle values other than 1 correctly yet.) */
7689 tem = fold_truth_not_expr (arg0);
7690 if (!tem)
7691 return NULL_TREE;
7692 return fold_convert (type, tem);
7694 case REALPART_EXPR:
7695 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7696 return fold_convert (type, arg0);
7697 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7698 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7699 TREE_OPERAND (arg0, 1));
7700 if (TREE_CODE (arg0) == COMPLEX_CST)
7701 return fold_convert (type, TREE_REALPART (arg0));
7702 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7704 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7705 tem = fold_build2 (TREE_CODE (arg0), itype,
7706 fold_build1 (REALPART_EXPR, itype,
7707 TREE_OPERAND (arg0, 0)),
7708 fold_build1 (REALPART_EXPR, itype,
7709 TREE_OPERAND (arg0, 1)));
7710 return fold_convert (type, tem);
7712 if (TREE_CODE (arg0) == CONJ_EXPR)
7714 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7715 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7716 return fold_convert (type, tem);
7718 return NULL_TREE;
7720 case IMAGPART_EXPR:
7721 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7722 return fold_convert (type, integer_zero_node);
7723 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7724 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7725 TREE_OPERAND (arg0, 0));
7726 if (TREE_CODE (arg0) == COMPLEX_CST)
7727 return fold_convert (type, TREE_IMAGPART (arg0));
7728 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7730 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7731 tem = fold_build2 (TREE_CODE (arg0), itype,
7732 fold_build1 (IMAGPART_EXPR, itype,
7733 TREE_OPERAND (arg0, 0)),
7734 fold_build1 (IMAGPART_EXPR, itype,
7735 TREE_OPERAND (arg0, 1)));
7736 return fold_convert (type, tem);
7738 if (TREE_CODE (arg0) == CONJ_EXPR)
7740 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7741 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7742 return fold_convert (type, negate_expr (tem));
7744 return NULL_TREE;
7746 default:
7747 return NULL_TREE;
7748 } /* switch (code) */
7751 /* Fold a binary expression of code CODE and type TYPE with operands
7752 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7753 Return the folded expression if folding is successful. Otherwise,
7754 return NULL_TREE. */
7756 static tree
7757 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7759 enum tree_code compl_code;
7761 if (code == MIN_EXPR)
7762 compl_code = MAX_EXPR;
7763 else if (code == MAX_EXPR)
7764 compl_code = MIN_EXPR;
7765 else
7766 gcc_unreachable ();
7768 /* MIN (MAX (a, b), b) == b. */
7769 if (TREE_CODE (op0) == compl_code
7770 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7771 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7773 /* MIN (MAX (b, a), b) == b. */
7774 if (TREE_CODE (op0) == compl_code
7775 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7776 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7777 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7779 /* MIN (a, MAX (a, b)) == a. */
7780 if (TREE_CODE (op1) == compl_code
7781 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7782 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7783 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7785 /* MIN (a, MAX (b, a)) == a. */
7786 if (TREE_CODE (op1) == compl_code
7787 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7788 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7789 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7791 return NULL_TREE;
7794 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7795 by changing CODE to reduce the magnitude of constants involved in
7796 ARG0 of the comparison.
7797 Returns a canonicalized comparison tree if a simplification was
7798 possible, otherwise returns NULL_TREE. */
7800 static tree
7801 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7802 tree arg0, tree arg1)
7804 enum tree_code code0 = TREE_CODE (arg0);
7805 tree t, cst0 = NULL_TREE;
7806 int sgn0;
7807 bool swap = false;
7809 /* Match A +- CST code arg1 and CST code arg1. */
7810 if (!(((code0 == MINUS_EXPR
7811 || code0 == PLUS_EXPR)
7812 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7813 || code0 == INTEGER_CST))
7814 return NULL_TREE;
7816 /* Identify the constant in arg0 and its sign. */
7817 if (code0 == INTEGER_CST)
7818 cst0 = arg0;
7819 else
7820 cst0 = TREE_OPERAND (arg0, 1);
7821 sgn0 = tree_int_cst_sgn (cst0);
7823 /* Overflowed constants and zero will cause problems. */
7824 if (integer_zerop (cst0)
7825 || TREE_OVERFLOW (cst0))
7826 return NULL_TREE;
7828 /* See if we can reduce the magnitude of the constant in
7829 arg0 by changing the comparison code. */
7830 if (code0 == INTEGER_CST)
7832 /* CST <= arg1 -> CST-1 < arg1. */
7833 if (code == LE_EXPR && sgn0 == 1)
7834 code = LT_EXPR;
7835 /* -CST < arg1 -> -CST-1 <= arg1. */
7836 else if (code == LT_EXPR && sgn0 == -1)
7837 code = LE_EXPR;
7838 /* CST > arg1 -> CST-1 >= arg1. */
7839 else if (code == GT_EXPR && sgn0 == 1)
7840 code = GE_EXPR;
7841 /* -CST >= arg1 -> -CST-1 > arg1. */
7842 else if (code == GE_EXPR && sgn0 == -1)
7843 code = GT_EXPR;
7844 else
7845 return NULL_TREE;
7846 /* arg1 code' CST' might be more canonical. */
7847 swap = true;
7849 else
7851 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7852 if (code == LT_EXPR
7853 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7854 code = LE_EXPR;
7855 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7856 else if (code == GT_EXPR
7857 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7858 code = GE_EXPR;
7859 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7860 else if (code == LE_EXPR
7861 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7862 code = LT_EXPR;
7863 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7864 else if (code == GE_EXPR
7865 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7866 code = GT_EXPR;
7867 else
7868 return NULL_TREE;
7871 /* Now build the constant reduced in magnitude. */
7872 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7873 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7874 if (code0 != INTEGER_CST)
7875 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7877 /* If swapping might yield to a more canonical form, do so. */
7878 if (swap)
7879 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7880 else
7881 return fold_build2 (code, type, t, arg1);
7884 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7885 overflow further. Try to decrease the magnitude of constants involved
7886 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7887 and put sole constants at the second argument position.
7888 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7890 static tree
7891 maybe_canonicalize_comparison (enum tree_code code, tree type,
7892 tree arg0, tree arg1)
7894 tree t;
7896 /* In principle pointers also have undefined overflow behavior,
7897 but that causes problems elsewhere. */
7898 if ((flag_wrapv || flag_trapv)
7899 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7900 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7901 return NULL_TREE;
7903 /* Try canonicalization by simplifying arg0. */
7904 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7905 if (t)
7906 return t;
7908 /* Try canonicalization by simplifying arg1 using the swapped
7909 comparison. */
7910 code = swap_tree_comparison (code);
7911 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7914 /* Subroutine of fold_binary. This routine performs all of the
7915 transformations that are common to the equality/inequality
7916 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7917 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7918 fold_binary should call fold_binary. Fold a comparison with
7919 tree code CODE and type TYPE with operands OP0 and OP1. Return
7920 the folded comparison or NULL_TREE. */
7922 static tree
7923 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7925 tree arg0, arg1, tem;
7927 arg0 = op0;
7928 arg1 = op1;
7930 STRIP_SIGN_NOPS (arg0);
7931 STRIP_SIGN_NOPS (arg1);
7933 tem = fold_relational_const (code, type, arg0, arg1);
7934 if (tem != NULL_TREE)
7935 return tem;
7937 /* If one arg is a real or integer constant, put it last. */
7938 if (tree_swap_operands_p (arg0, arg1, true))
7939 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7941 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7942 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7943 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7944 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7945 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7946 && !(flag_wrapv || flag_trapv))
7947 && (TREE_CODE (arg1) == INTEGER_CST
7948 && !TREE_OVERFLOW (arg1)))
7950 tree const1 = TREE_OPERAND (arg0, 1);
7951 tree const2 = arg1;
7952 tree variable = TREE_OPERAND (arg0, 0);
7953 tree lhs;
7954 int lhs_add;
7955 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7957 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7958 TREE_TYPE (arg1), const2, const1);
7959 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7960 && (TREE_CODE (lhs) != INTEGER_CST
7961 || !TREE_OVERFLOW (lhs)))
7962 return fold_build2 (code, type, variable, lhs);
7965 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7966 same object, then we can fold this to a comparison of the two offsets in
7967 signed size type. This is possible because pointer arithmetic is
7968 restricted to retain within an object and overflow on pointer differences
7969 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7970 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7971 && !flag_wrapv && !flag_trapv)
7973 tree base0, offset0, base1, offset1;
7975 if (extract_array_ref (arg0, &base0, &offset0)
7976 && extract_array_ref (arg1, &base1, &offset1)
7977 && operand_equal_p (base0, base1, 0))
7979 tree signed_size_type_node;
7980 signed_size_type_node = signed_type_for (size_type_node);
7982 /* By converting to signed size type we cover middle-end pointer
7983 arithmetic which operates on unsigned pointer types of size
7984 type size and ARRAY_REF offsets which are properly sign or
7985 zero extended from their type in case it is narrower than
7986 size type. */
7987 if (offset0 == NULL_TREE)
7988 offset0 = build_int_cst (signed_size_type_node, 0);
7989 else
7990 offset0 = fold_convert (signed_size_type_node, offset0);
7991 if (offset1 == NULL_TREE)
7992 offset1 = build_int_cst (signed_size_type_node, 0);
7993 else
7994 offset1 = fold_convert (signed_size_type_node, offset1);
7996 return fold_build2 (code, type, offset0, offset1);
8000 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8001 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8002 the resulting offset is smaller in absolute value than the
8003 original one. */
8004 if (!(flag_wrapv || flag_trapv)
8005 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8006 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8007 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8008 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8009 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8010 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8011 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8013 tree const1 = TREE_OPERAND (arg0, 1);
8014 tree const2 = TREE_OPERAND (arg1, 1);
8015 tree variable1 = TREE_OPERAND (arg0, 0);
8016 tree variable2 = TREE_OPERAND (arg1, 0);
8017 tree cst;
8019 /* Put the constant on the side where it doesn't overflow and is
8020 of lower absolute value than before. */
8021 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8022 ? MINUS_EXPR : PLUS_EXPR,
8023 const2, const1, 0);
8024 if (!TREE_OVERFLOW (cst)
8025 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8026 return fold_build2 (code, type,
8027 variable1,
8028 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8029 variable2, cst));
8031 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8032 ? MINUS_EXPR : PLUS_EXPR,
8033 const1, const2, 0);
8034 if (!TREE_OVERFLOW (cst)
8035 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8036 return fold_build2 (code, type,
8037 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8038 variable1, cst),
8039 variable2);
8042 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8043 if (tem)
8044 return tem;
8046 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8048 tree targ0 = strip_float_extensions (arg0);
8049 tree targ1 = strip_float_extensions (arg1);
8050 tree newtype = TREE_TYPE (targ0);
8052 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8053 newtype = TREE_TYPE (targ1);
8055 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8056 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8057 return fold_build2 (code, type, fold_convert (newtype, targ0),
8058 fold_convert (newtype, targ1));
8060 /* (-a) CMP (-b) -> b CMP a */
8061 if (TREE_CODE (arg0) == NEGATE_EXPR
8062 && TREE_CODE (arg1) == NEGATE_EXPR)
8063 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8064 TREE_OPERAND (arg0, 0));
8066 if (TREE_CODE (arg1) == REAL_CST)
8068 REAL_VALUE_TYPE cst;
8069 cst = TREE_REAL_CST (arg1);
8071 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8072 if (TREE_CODE (arg0) == NEGATE_EXPR)
8073 return fold_build2 (swap_tree_comparison (code), type,
8074 TREE_OPERAND (arg0, 0),
8075 build_real (TREE_TYPE (arg1),
8076 REAL_VALUE_NEGATE (cst)));
8078 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8079 /* a CMP (-0) -> a CMP 0 */
8080 if (REAL_VALUE_MINUS_ZERO (cst))
8081 return fold_build2 (code, type, arg0,
8082 build_real (TREE_TYPE (arg1), dconst0));
8084 /* x != NaN is always true, other ops are always false. */
8085 if (REAL_VALUE_ISNAN (cst)
8086 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8088 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8089 return omit_one_operand (type, tem, arg0);
8092 /* Fold comparisons against infinity. */
8093 if (REAL_VALUE_ISINF (cst))
8095 tem = fold_inf_compare (code, type, arg0, arg1);
8096 if (tem != NULL_TREE)
8097 return tem;
8101 /* If this is a comparison of a real constant with a PLUS_EXPR
8102 or a MINUS_EXPR of a real constant, we can convert it into a
8103 comparison with a revised real constant as long as no overflow
8104 occurs when unsafe_math_optimizations are enabled. */
8105 if (flag_unsafe_math_optimizations
8106 && TREE_CODE (arg1) == REAL_CST
8107 && (TREE_CODE (arg0) == PLUS_EXPR
8108 || TREE_CODE (arg0) == MINUS_EXPR)
8109 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8110 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8111 ? MINUS_EXPR : PLUS_EXPR,
8112 arg1, TREE_OPERAND (arg0, 1), 0))
8113 && ! TREE_CONSTANT_OVERFLOW (tem))
8114 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8116 /* Likewise, we can simplify a comparison of a real constant with
8117 a MINUS_EXPR whose first operand is also a real constant, i.e.
8118 (c1 - x) < c2 becomes x > c1-c2. */
8119 if (flag_unsafe_math_optimizations
8120 && TREE_CODE (arg1) == REAL_CST
8121 && TREE_CODE (arg0) == MINUS_EXPR
8122 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8123 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8124 arg1, 0))
8125 && ! TREE_CONSTANT_OVERFLOW (tem))
8126 return fold_build2 (swap_tree_comparison (code), type,
8127 TREE_OPERAND (arg0, 1), tem);
8129 /* Fold comparisons against built-in math functions. */
8130 if (TREE_CODE (arg1) == REAL_CST
8131 && flag_unsafe_math_optimizations
8132 && ! flag_errno_math)
8134 enum built_in_function fcode = builtin_mathfn_code (arg0);
8136 if (fcode != END_BUILTINS)
8138 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8139 if (tem != NULL_TREE)
8140 return tem;
8145 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8146 if (TREE_CONSTANT (arg1)
8147 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8148 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8149 /* This optimization is invalid for ordered comparisons
8150 if CONST+INCR overflows or if foo+incr might overflow.
8151 This optimization is invalid for floating point due to rounding.
8152 For pointer types we assume overflow doesn't happen. */
8153 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8154 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8155 && (code == EQ_EXPR || code == NE_EXPR))))
8157 tree varop, newconst;
8159 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8161 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8162 arg1, TREE_OPERAND (arg0, 1));
8163 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8164 TREE_OPERAND (arg0, 0),
8165 TREE_OPERAND (arg0, 1));
8167 else
8169 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8170 arg1, TREE_OPERAND (arg0, 1));
8171 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8172 TREE_OPERAND (arg0, 0),
8173 TREE_OPERAND (arg0, 1));
8177 /* If VAROP is a reference to a bitfield, we must mask
8178 the constant by the width of the field. */
8179 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8180 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8181 && host_integerp (DECL_SIZE (TREE_OPERAND
8182 (TREE_OPERAND (varop, 0), 1)), 1))
8184 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8185 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8186 tree folded_compare, shift;
8188 /* First check whether the comparison would come out
8189 always the same. If we don't do that we would
8190 change the meaning with the masking. */
8191 folded_compare = fold_build2 (code, type,
8192 TREE_OPERAND (varop, 0), arg1);
8193 if (TREE_CODE (folded_compare) == INTEGER_CST)
8194 return omit_one_operand (type, folded_compare, varop);
8196 shift = build_int_cst (NULL_TREE,
8197 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8198 shift = fold_convert (TREE_TYPE (varop), shift);
8199 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8200 newconst, shift);
8201 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8202 newconst, shift);
8205 return fold_build2 (code, type, varop, newconst);
8208 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8209 && (TREE_CODE (arg0) == NOP_EXPR
8210 || TREE_CODE (arg0) == CONVERT_EXPR))
8212 /* If we are widening one operand of an integer comparison,
8213 see if the other operand is similarly being widened. Perhaps we
8214 can do the comparison in the narrower type. */
8215 tem = fold_widened_comparison (code, type, arg0, arg1);
8216 if (tem)
8217 return tem;
8219 /* Or if we are changing signedness. */
8220 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8221 if (tem)
8222 return tem;
8225 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8226 constant, we can simplify it. */
8227 if (TREE_CODE (arg1) == INTEGER_CST
8228 && (TREE_CODE (arg0) == MIN_EXPR
8229 || TREE_CODE (arg0) == MAX_EXPR)
8230 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8232 tem = optimize_minmax_comparison (code, type, op0, op1);
8233 if (tem)
8234 return tem;
8237 /* Simplify comparison of something with itself. (For IEEE
8238 floating-point, we can only do some of these simplifications.) */
8239 if (operand_equal_p (arg0, arg1, 0))
8241 switch (code)
8243 case EQ_EXPR:
8244 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8245 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8246 return constant_boolean_node (1, type);
8247 break;
8249 case GE_EXPR:
8250 case LE_EXPR:
8251 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8252 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8253 return constant_boolean_node (1, type);
8254 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8256 case NE_EXPR:
8257 /* For NE, we can only do this simplification if integer
8258 or we don't honor IEEE floating point NaNs. */
8259 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8260 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8261 break;
8262 /* ... fall through ... */
8263 case GT_EXPR:
8264 case LT_EXPR:
8265 return constant_boolean_node (0, type);
8266 default:
8267 gcc_unreachable ();
8271 /* If we are comparing an expression that just has comparisons
8272 of two integer values, arithmetic expressions of those comparisons,
8273 and constants, we can simplify it. There are only three cases
8274 to check: the two values can either be equal, the first can be
8275 greater, or the second can be greater. Fold the expression for
8276 those three values. Since each value must be 0 or 1, we have
8277 eight possibilities, each of which corresponds to the constant 0
8278 or 1 or one of the six possible comparisons.
8280 This handles common cases like (a > b) == 0 but also handles
8281 expressions like ((x > y) - (y > x)) > 0, which supposedly
8282 occur in macroized code. */
8284 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8286 tree cval1 = 0, cval2 = 0;
8287 int save_p = 0;
8289 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8290 /* Don't handle degenerate cases here; they should already
8291 have been handled anyway. */
8292 && cval1 != 0 && cval2 != 0
8293 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8294 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8295 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8296 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8297 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8298 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8299 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8301 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8302 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8304 /* We can't just pass T to eval_subst in case cval1 or cval2
8305 was the same as ARG1. */
8307 tree high_result
8308 = fold_build2 (code, type,
8309 eval_subst (arg0, cval1, maxval,
8310 cval2, minval),
8311 arg1);
8312 tree equal_result
8313 = fold_build2 (code, type,
8314 eval_subst (arg0, cval1, maxval,
8315 cval2, maxval),
8316 arg1);
8317 tree low_result
8318 = fold_build2 (code, type,
8319 eval_subst (arg0, cval1, minval,
8320 cval2, maxval),
8321 arg1);
8323 /* All three of these results should be 0 or 1. Confirm they are.
8324 Then use those values to select the proper code to use. */
8326 if (TREE_CODE (high_result) == INTEGER_CST
8327 && TREE_CODE (equal_result) == INTEGER_CST
8328 && TREE_CODE (low_result) == INTEGER_CST)
8330 /* Make a 3-bit mask with the high-order bit being the
8331 value for `>', the next for '=', and the low for '<'. */
8332 switch ((integer_onep (high_result) * 4)
8333 + (integer_onep (equal_result) * 2)
8334 + integer_onep (low_result))
8336 case 0:
8337 /* Always false. */
8338 return omit_one_operand (type, integer_zero_node, arg0);
8339 case 1:
8340 code = LT_EXPR;
8341 break;
8342 case 2:
8343 code = EQ_EXPR;
8344 break;
8345 case 3:
8346 code = LE_EXPR;
8347 break;
8348 case 4:
8349 code = GT_EXPR;
8350 break;
8351 case 5:
8352 code = NE_EXPR;
8353 break;
8354 case 6:
8355 code = GE_EXPR;
8356 break;
8357 case 7:
8358 /* Always true. */
8359 return omit_one_operand (type, integer_one_node, arg0);
8362 if (save_p)
8363 return save_expr (build2 (code, type, cval1, cval2));
8364 return fold_build2 (code, type, cval1, cval2);
8369 /* Fold a comparison of the address of COMPONENT_REFs with the same
8370 type and component to a comparison of the address of the base
8371 object. In short, &x->a OP &y->a to x OP y and
8372 &x->a OP &y.a to x OP &y */
8373 if (TREE_CODE (arg0) == ADDR_EXPR
8374 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8375 && TREE_CODE (arg1) == ADDR_EXPR
8376 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8378 tree cref0 = TREE_OPERAND (arg0, 0);
8379 tree cref1 = TREE_OPERAND (arg1, 0);
8380 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8382 tree op0 = TREE_OPERAND (cref0, 0);
8383 tree op1 = TREE_OPERAND (cref1, 0);
8384 return fold_build2 (code, type,
8385 build_fold_addr_expr (op0),
8386 build_fold_addr_expr (op1));
8390 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8391 into a single range test. */
8392 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8393 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8394 && TREE_CODE (arg1) == INTEGER_CST
8395 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8396 && !integer_zerop (TREE_OPERAND (arg0, 1))
8397 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8398 && !TREE_OVERFLOW (arg1))
8400 tem = fold_div_compare (code, type, arg0, arg1);
8401 if (tem != NULL_TREE)
8402 return tem;
8405 /* Fold ~X op ~Y as Y op X. */
8406 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8407 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8408 return fold_build2 (code, type,
8409 TREE_OPERAND (arg1, 0),
8410 TREE_OPERAND (arg0, 0));
8412 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8413 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8414 && TREE_CODE (arg1) == INTEGER_CST)
8415 return fold_build2 (swap_tree_comparison (code), type,
8416 TREE_OPERAND (arg0, 0),
8417 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8419 return NULL_TREE;
8423 /* Subroutine of fold_binary. Optimize complex multiplications of the
8424 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8425 argument EXPR represents the expression "z" of type TYPE. */
8427 static tree
8428 fold_mult_zconjz (tree type, tree expr)
8430 tree itype = TREE_TYPE (type);
8431 tree rpart, ipart, tem;
8433 if (TREE_CODE (expr) == COMPLEX_EXPR)
8435 rpart = TREE_OPERAND (expr, 0);
8436 ipart = TREE_OPERAND (expr, 1);
8438 else if (TREE_CODE (expr) == COMPLEX_CST)
8440 rpart = TREE_REALPART (expr);
8441 ipart = TREE_IMAGPART (expr);
8443 else
8445 expr = save_expr (expr);
8446 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8447 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8450 rpart = save_expr (rpart);
8451 ipart = save_expr (ipart);
8452 tem = fold_build2 (PLUS_EXPR, itype,
8453 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8454 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8455 return fold_build2 (COMPLEX_EXPR, type, tem,
8456 fold_convert (itype, integer_zero_node));
8460 /* Fold a binary expression of code CODE and type TYPE with operands
8461 OP0 and OP1. Return the folded expression if folding is
8462 successful. Otherwise, return NULL_TREE. */
8464 tree
8465 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8467 enum tree_code_class kind = TREE_CODE_CLASS (code);
8468 tree arg0, arg1, tem;
8469 tree t1 = NULL_TREE;
8471 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8472 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8473 && TREE_CODE_LENGTH (code) == 2
8474 && op0 != NULL_TREE
8475 && op1 != NULL_TREE);
8477 arg0 = op0;
8478 arg1 = op1;
8480 /* Strip any conversions that don't change the mode. This is
8481 safe for every expression, except for a comparison expression
8482 because its signedness is derived from its operands. So, in
8483 the latter case, only strip conversions that don't change the
8484 signedness.
8486 Note that this is done as an internal manipulation within the
8487 constant folder, in order to find the simplest representation
8488 of the arguments so that their form can be studied. In any
8489 cases, the appropriate type conversions should be put back in
8490 the tree that will get out of the constant folder. */
8492 if (kind == tcc_comparison)
8494 STRIP_SIGN_NOPS (arg0);
8495 STRIP_SIGN_NOPS (arg1);
8497 else
8499 STRIP_NOPS (arg0);
8500 STRIP_NOPS (arg1);
8503 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8504 constant but we can't do arithmetic on them. */
8505 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8506 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8507 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8508 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8510 if (kind == tcc_binary)
8511 tem = const_binop (code, arg0, arg1, 0);
8512 else if (kind == tcc_comparison)
8513 tem = fold_relational_const (code, type, arg0, arg1);
8514 else
8515 tem = NULL_TREE;
8517 if (tem != NULL_TREE)
8519 if (TREE_TYPE (tem) != type)
8520 tem = fold_convert (type, tem);
8521 return tem;
8525 /* If this is a commutative operation, and ARG0 is a constant, move it
8526 to ARG1 to reduce the number of tests below. */
8527 if (commutative_tree_code (code)
8528 && tree_swap_operands_p (arg0, arg1, true))
8529 return fold_build2 (code, type, op1, op0);
8531 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8533 First check for cases where an arithmetic operation is applied to a
8534 compound, conditional, or comparison operation. Push the arithmetic
8535 operation inside the compound or conditional to see if any folding
8536 can then be done. Convert comparison to conditional for this purpose.
8537 The also optimizes non-constant cases that used to be done in
8538 expand_expr.
8540 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8541 one of the operands is a comparison and the other is a comparison, a
8542 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8543 code below would make the expression more complex. Change it to a
8544 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8545 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8547 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8548 || code == EQ_EXPR || code == NE_EXPR)
8549 && ((truth_value_p (TREE_CODE (arg0))
8550 && (truth_value_p (TREE_CODE (arg1))
8551 || (TREE_CODE (arg1) == BIT_AND_EXPR
8552 && integer_onep (TREE_OPERAND (arg1, 1)))))
8553 || (truth_value_p (TREE_CODE (arg1))
8554 && (truth_value_p (TREE_CODE (arg0))
8555 || (TREE_CODE (arg0) == BIT_AND_EXPR
8556 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8558 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8559 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8560 : TRUTH_XOR_EXPR,
8561 boolean_type_node,
8562 fold_convert (boolean_type_node, arg0),
8563 fold_convert (boolean_type_node, arg1));
8565 if (code == EQ_EXPR)
8566 tem = invert_truthvalue (tem);
8568 return fold_convert (type, tem);
8571 if (TREE_CODE_CLASS (code) == tcc_binary
8572 || TREE_CODE_CLASS (code) == tcc_comparison)
8574 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8575 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8576 fold_build2 (code, type,
8577 TREE_OPERAND (arg0, 1), op1));
8578 if (TREE_CODE (arg1) == COMPOUND_EXPR
8579 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8580 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8581 fold_build2 (code, type,
8582 op0, TREE_OPERAND (arg1, 1)));
8584 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8586 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8587 arg0, arg1,
8588 /*cond_first_p=*/1);
8589 if (tem != NULL_TREE)
8590 return tem;
8593 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8595 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8596 arg1, arg0,
8597 /*cond_first_p=*/0);
8598 if (tem != NULL_TREE)
8599 return tem;
8603 switch (code)
8605 case PLUS_EXPR:
8606 /* A + (-B) -> A - B */
8607 if (TREE_CODE (arg1) == NEGATE_EXPR)
8608 return fold_build2 (MINUS_EXPR, type,
8609 fold_convert (type, arg0),
8610 fold_convert (type, TREE_OPERAND (arg1, 0)));
8611 /* (-A) + B -> B - A */
8612 if (TREE_CODE (arg0) == NEGATE_EXPR
8613 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8614 return fold_build2 (MINUS_EXPR, type,
8615 fold_convert (type, arg1),
8616 fold_convert (type, TREE_OPERAND (arg0, 0)));
8617 /* Convert ~A + 1 to -A. */
8618 if (INTEGRAL_TYPE_P (type)
8619 && TREE_CODE (arg0) == BIT_NOT_EXPR
8620 && integer_onep (arg1))
8621 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8623 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8624 same or one. */
8625 if ((TREE_CODE (arg0) == MULT_EXPR
8626 || TREE_CODE (arg1) == MULT_EXPR)
8627 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8629 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8630 if (tem)
8631 return tem;
8634 if (! FLOAT_TYPE_P (type))
8636 if (integer_zerop (arg1))
8637 return non_lvalue (fold_convert (type, arg0));
8639 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8640 with a constant, and the two constants have no bits in common,
8641 we should treat this as a BIT_IOR_EXPR since this may produce more
8642 simplifications. */
8643 if (TREE_CODE (arg0) == BIT_AND_EXPR
8644 && TREE_CODE (arg1) == BIT_AND_EXPR
8645 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8646 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8647 && integer_zerop (const_binop (BIT_AND_EXPR,
8648 TREE_OPERAND (arg0, 1),
8649 TREE_OPERAND (arg1, 1), 0)))
8651 code = BIT_IOR_EXPR;
8652 goto bit_ior;
8655 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8656 (plus (plus (mult) (mult)) (foo)) so that we can
8657 take advantage of the factoring cases below. */
8658 if (((TREE_CODE (arg0) == PLUS_EXPR
8659 || TREE_CODE (arg0) == MINUS_EXPR)
8660 && TREE_CODE (arg1) == MULT_EXPR)
8661 || ((TREE_CODE (arg1) == PLUS_EXPR
8662 || TREE_CODE (arg1) == MINUS_EXPR)
8663 && TREE_CODE (arg0) == MULT_EXPR))
8665 tree parg0, parg1, parg, marg;
8666 enum tree_code pcode;
8668 if (TREE_CODE (arg1) == MULT_EXPR)
8669 parg = arg0, marg = arg1;
8670 else
8671 parg = arg1, marg = arg0;
8672 pcode = TREE_CODE (parg);
8673 parg0 = TREE_OPERAND (parg, 0);
8674 parg1 = TREE_OPERAND (parg, 1);
8675 STRIP_NOPS (parg0);
8676 STRIP_NOPS (parg1);
8678 if (TREE_CODE (parg0) == MULT_EXPR
8679 && TREE_CODE (parg1) != MULT_EXPR)
8680 return fold_build2 (pcode, type,
8681 fold_build2 (PLUS_EXPR, type,
8682 fold_convert (type, parg0),
8683 fold_convert (type, marg)),
8684 fold_convert (type, parg1));
8685 if (TREE_CODE (parg0) != MULT_EXPR
8686 && TREE_CODE (parg1) == MULT_EXPR)
8687 return fold_build2 (PLUS_EXPR, type,
8688 fold_convert (type, parg0),
8689 fold_build2 (pcode, type,
8690 fold_convert (type, marg),
8691 fold_convert (type,
8692 parg1)));
8695 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8696 of the array. Loop optimizer sometimes produce this type of
8697 expressions. */
8698 if (TREE_CODE (arg0) == ADDR_EXPR)
8700 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8701 if (tem)
8702 return fold_convert (type, tem);
8704 else if (TREE_CODE (arg1) == ADDR_EXPR)
8706 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8707 if (tem)
8708 return fold_convert (type, tem);
8711 else
8713 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8714 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8715 return non_lvalue (fold_convert (type, arg0));
8717 /* Likewise if the operands are reversed. */
8718 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8719 return non_lvalue (fold_convert (type, arg1));
8721 /* Convert X + -C into X - C. */
8722 if (TREE_CODE (arg1) == REAL_CST
8723 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8725 tem = fold_negate_const (arg1, type);
8726 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8727 return fold_build2 (MINUS_EXPR, type,
8728 fold_convert (type, arg0),
8729 fold_convert (type, tem));
8732 if (flag_unsafe_math_optimizations
8733 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8734 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8735 && (tem = distribute_real_division (code, type, arg0, arg1)))
8736 return tem;
8738 /* Convert x+x into x*2.0. */
8739 if (operand_equal_p (arg0, arg1, 0)
8740 && SCALAR_FLOAT_TYPE_P (type))
8741 return fold_build2 (MULT_EXPR, type, arg0,
8742 build_real (type, dconst2));
8744 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8745 if (flag_unsafe_math_optimizations
8746 && TREE_CODE (arg1) == PLUS_EXPR
8747 && TREE_CODE (arg0) != MULT_EXPR)
8749 tree tree10 = TREE_OPERAND (arg1, 0);
8750 tree tree11 = TREE_OPERAND (arg1, 1);
8751 if (TREE_CODE (tree11) == MULT_EXPR
8752 && TREE_CODE (tree10) == MULT_EXPR)
8754 tree tree0;
8755 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8756 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8759 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8760 if (flag_unsafe_math_optimizations
8761 && TREE_CODE (arg0) == PLUS_EXPR
8762 && TREE_CODE (arg1) != MULT_EXPR)
8764 tree tree00 = TREE_OPERAND (arg0, 0);
8765 tree tree01 = TREE_OPERAND (arg0, 1);
8766 if (TREE_CODE (tree01) == MULT_EXPR
8767 && TREE_CODE (tree00) == MULT_EXPR)
8769 tree tree0;
8770 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8771 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8776 bit_rotate:
8777 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8778 is a rotate of A by C1 bits. */
8779 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8780 is a rotate of A by B bits. */
8782 enum tree_code code0, code1;
8783 code0 = TREE_CODE (arg0);
8784 code1 = TREE_CODE (arg1);
8785 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8786 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8787 && operand_equal_p (TREE_OPERAND (arg0, 0),
8788 TREE_OPERAND (arg1, 0), 0)
8789 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8791 tree tree01, tree11;
8792 enum tree_code code01, code11;
8794 tree01 = TREE_OPERAND (arg0, 1);
8795 tree11 = TREE_OPERAND (arg1, 1);
8796 STRIP_NOPS (tree01);
8797 STRIP_NOPS (tree11);
8798 code01 = TREE_CODE (tree01);
8799 code11 = TREE_CODE (tree11);
8800 if (code01 == INTEGER_CST
8801 && code11 == INTEGER_CST
8802 && TREE_INT_CST_HIGH (tree01) == 0
8803 && TREE_INT_CST_HIGH (tree11) == 0
8804 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8805 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8806 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8807 code0 == LSHIFT_EXPR ? tree01 : tree11);
8808 else if (code11 == MINUS_EXPR)
8810 tree tree110, tree111;
8811 tree110 = TREE_OPERAND (tree11, 0);
8812 tree111 = TREE_OPERAND (tree11, 1);
8813 STRIP_NOPS (tree110);
8814 STRIP_NOPS (tree111);
8815 if (TREE_CODE (tree110) == INTEGER_CST
8816 && 0 == compare_tree_int (tree110,
8817 TYPE_PRECISION
8818 (TREE_TYPE (TREE_OPERAND
8819 (arg0, 0))))
8820 && operand_equal_p (tree01, tree111, 0))
8821 return build2 ((code0 == LSHIFT_EXPR
8822 ? LROTATE_EXPR
8823 : RROTATE_EXPR),
8824 type, TREE_OPERAND (arg0, 0), tree01);
8826 else if (code01 == MINUS_EXPR)
8828 tree tree010, tree011;
8829 tree010 = TREE_OPERAND (tree01, 0);
8830 tree011 = TREE_OPERAND (tree01, 1);
8831 STRIP_NOPS (tree010);
8832 STRIP_NOPS (tree011);
8833 if (TREE_CODE (tree010) == INTEGER_CST
8834 && 0 == compare_tree_int (tree010,
8835 TYPE_PRECISION
8836 (TREE_TYPE (TREE_OPERAND
8837 (arg0, 0))))
8838 && operand_equal_p (tree11, tree011, 0))
8839 return build2 ((code0 != LSHIFT_EXPR
8840 ? LROTATE_EXPR
8841 : RROTATE_EXPR),
8842 type, TREE_OPERAND (arg0, 0), tree11);
8847 associate:
8848 /* In most languages, can't associate operations on floats through
8849 parentheses. Rather than remember where the parentheses were, we
8850 don't associate floats at all, unless the user has specified
8851 -funsafe-math-optimizations. */
8853 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8855 tree var0, con0, lit0, minus_lit0;
8856 tree var1, con1, lit1, minus_lit1;
8858 /* Split both trees into variables, constants, and literals. Then
8859 associate each group together, the constants with literals,
8860 then the result with variables. This increases the chances of
8861 literals being recombined later and of generating relocatable
8862 expressions for the sum of a constant and literal. */
8863 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8864 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8865 code == MINUS_EXPR);
8867 /* Only do something if we found more than two objects. Otherwise,
8868 nothing has changed and we risk infinite recursion. */
8869 if (2 < ((var0 != 0) + (var1 != 0)
8870 + (con0 != 0) + (con1 != 0)
8871 + (lit0 != 0) + (lit1 != 0)
8872 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8874 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8875 if (code == MINUS_EXPR)
8876 code = PLUS_EXPR;
8878 var0 = associate_trees (var0, var1, code, type);
8879 con0 = associate_trees (con0, con1, code, type);
8880 lit0 = associate_trees (lit0, lit1, code, type);
8881 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8883 /* Preserve the MINUS_EXPR if the negative part of the literal is
8884 greater than the positive part. Otherwise, the multiplicative
8885 folding code (i.e extract_muldiv) may be fooled in case
8886 unsigned constants are subtracted, like in the following
8887 example: ((X*2 + 4) - 8U)/2. */
8888 if (minus_lit0 && lit0)
8890 if (TREE_CODE (lit0) == INTEGER_CST
8891 && TREE_CODE (minus_lit0) == INTEGER_CST
8892 && tree_int_cst_lt (lit0, minus_lit0))
8894 minus_lit0 = associate_trees (minus_lit0, lit0,
8895 MINUS_EXPR, type);
8896 lit0 = 0;
8898 else
8900 lit0 = associate_trees (lit0, minus_lit0,
8901 MINUS_EXPR, type);
8902 minus_lit0 = 0;
8905 if (minus_lit0)
8907 if (con0 == 0)
8908 return fold_convert (type,
8909 associate_trees (var0, minus_lit0,
8910 MINUS_EXPR, type));
8911 else
8913 con0 = associate_trees (con0, minus_lit0,
8914 MINUS_EXPR, type);
8915 return fold_convert (type,
8916 associate_trees (var0, con0,
8917 PLUS_EXPR, type));
8921 con0 = associate_trees (con0, lit0, code, type);
8922 return fold_convert (type, associate_trees (var0, con0,
8923 code, type));
8927 return NULL_TREE;
8929 case MINUS_EXPR:
8930 /* A - (-B) -> A + B */
8931 if (TREE_CODE (arg1) == NEGATE_EXPR)
8932 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8933 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8934 if (TREE_CODE (arg0) == NEGATE_EXPR
8935 && (FLOAT_TYPE_P (type)
8936 || INTEGRAL_TYPE_P (type))
8937 && negate_expr_p (arg1)
8938 && reorder_operands_p (arg0, arg1))
8939 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8940 TREE_OPERAND (arg0, 0));
8941 /* Convert -A - 1 to ~A. */
8942 if (INTEGRAL_TYPE_P (type)
8943 && TREE_CODE (arg0) == NEGATE_EXPR
8944 && integer_onep (arg1))
8945 return fold_build1 (BIT_NOT_EXPR, type,
8946 fold_convert (type, TREE_OPERAND (arg0, 0)));
8948 /* Convert -1 - A to ~A. */
8949 if (INTEGRAL_TYPE_P (type)
8950 && integer_all_onesp (arg0))
8951 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8953 if (! FLOAT_TYPE_P (type))
8955 if (integer_zerop (arg0))
8956 return negate_expr (fold_convert (type, arg1));
8957 if (integer_zerop (arg1))
8958 return non_lvalue (fold_convert (type, arg0));
8960 /* Fold A - (A & B) into ~B & A. */
8961 if (!TREE_SIDE_EFFECTS (arg0)
8962 && TREE_CODE (arg1) == BIT_AND_EXPR)
8964 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8965 return fold_build2 (BIT_AND_EXPR, type,
8966 fold_build1 (BIT_NOT_EXPR, type,
8967 TREE_OPERAND (arg1, 0)),
8968 arg0);
8969 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8970 return fold_build2 (BIT_AND_EXPR, type,
8971 fold_build1 (BIT_NOT_EXPR, type,
8972 TREE_OPERAND (arg1, 1)),
8973 arg0);
8976 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8977 any power of 2 minus 1. */
8978 if (TREE_CODE (arg0) == BIT_AND_EXPR
8979 && TREE_CODE (arg1) == BIT_AND_EXPR
8980 && operand_equal_p (TREE_OPERAND (arg0, 0),
8981 TREE_OPERAND (arg1, 0), 0))
8983 tree mask0 = TREE_OPERAND (arg0, 1);
8984 tree mask1 = TREE_OPERAND (arg1, 1);
8985 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8987 if (operand_equal_p (tem, mask1, 0))
8989 tem = fold_build2 (BIT_XOR_EXPR, type,
8990 TREE_OPERAND (arg0, 0), mask1);
8991 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8996 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8997 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8998 return non_lvalue (fold_convert (type, arg0));
9000 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9001 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9002 (-ARG1 + ARG0) reduces to -ARG1. */
9003 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9004 return negate_expr (fold_convert (type, arg1));
9006 /* Fold &x - &x. This can happen from &x.foo - &x.
9007 This is unsafe for certain floats even in non-IEEE formats.
9008 In IEEE, it is unsafe because it does wrong for NaNs.
9009 Also note that operand_equal_p is always false if an operand
9010 is volatile. */
9012 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9013 && operand_equal_p (arg0, arg1, 0))
9014 return fold_convert (type, integer_zero_node);
9016 /* A - B -> A + (-B) if B is easily negatable. */
9017 if (negate_expr_p (arg1)
9018 && ((FLOAT_TYPE_P (type)
9019 /* Avoid this transformation if B is a positive REAL_CST. */
9020 && (TREE_CODE (arg1) != REAL_CST
9021 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9022 || INTEGRAL_TYPE_P (type)))
9023 return fold_build2 (PLUS_EXPR, type,
9024 fold_convert (type, arg0),
9025 fold_convert (type, negate_expr (arg1)));
9027 /* Try folding difference of addresses. */
9029 HOST_WIDE_INT diff;
9031 if ((TREE_CODE (arg0) == ADDR_EXPR
9032 || TREE_CODE (arg1) == ADDR_EXPR)
9033 && ptr_difference_const (arg0, arg1, &diff))
9034 return build_int_cst_type (type, diff);
9037 /* Fold &a[i] - &a[j] to i-j. */
9038 if (TREE_CODE (arg0) == ADDR_EXPR
9039 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9040 && TREE_CODE (arg1) == ADDR_EXPR
9041 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9043 tree aref0 = TREE_OPERAND (arg0, 0);
9044 tree aref1 = TREE_OPERAND (arg1, 0);
9045 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9046 TREE_OPERAND (aref1, 0), 0))
9048 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9049 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9050 tree esz = array_ref_element_size (aref0);
9051 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9052 return fold_build2 (MULT_EXPR, type, diff,
9053 fold_convert (type, esz));
9058 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9059 of the array. Loop optimizer sometimes produce this type of
9060 expressions. */
9061 if (TREE_CODE (arg0) == ADDR_EXPR)
9063 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9064 if (tem)
9065 return fold_convert (type, tem);
9068 if (flag_unsafe_math_optimizations
9069 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9070 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9071 && (tem = distribute_real_division (code, type, arg0, arg1)))
9072 return tem;
9074 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9075 same or one. */
9076 if ((TREE_CODE (arg0) == MULT_EXPR
9077 || TREE_CODE (arg1) == MULT_EXPR)
9078 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9080 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9081 if (tem)
9082 return tem;
9085 goto associate;
9087 case MULT_EXPR:
9088 /* (-A) * (-B) -> A * B */
9089 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9090 return fold_build2 (MULT_EXPR, type,
9091 fold_convert (type, TREE_OPERAND (arg0, 0)),
9092 fold_convert (type, negate_expr (arg1)));
9093 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9094 return fold_build2 (MULT_EXPR, type,
9095 fold_convert (type, negate_expr (arg0)),
9096 fold_convert (type, TREE_OPERAND (arg1, 0)));
9098 if (! FLOAT_TYPE_P (type))
9100 if (integer_zerop (arg1))
9101 return omit_one_operand (type, arg1, arg0);
9102 if (integer_onep (arg1))
9103 return non_lvalue (fold_convert (type, arg0));
9104 /* Transform x * -1 into -x. */
9105 if (integer_all_onesp (arg1))
9106 return fold_convert (type, negate_expr (arg0));
9107 /* Transform x * -C into -x * C if x is easily negatable. */
9108 if (TREE_CODE (arg1) == INTEGER_CST
9109 && tree_int_cst_sgn (arg1) == -1
9110 && negate_expr_p (arg0)
9111 && (tem = negate_expr (arg1)) != arg1
9112 && !TREE_OVERFLOW (tem))
9113 return fold_build2 (MULT_EXPR, type,
9114 negate_expr (arg0), tem);
9116 /* (a * (1 << b)) is (a << b) */
9117 if (TREE_CODE (arg1) == LSHIFT_EXPR
9118 && integer_onep (TREE_OPERAND (arg1, 0)))
9119 return fold_build2 (LSHIFT_EXPR, type, arg0,
9120 TREE_OPERAND (arg1, 1));
9121 if (TREE_CODE (arg0) == LSHIFT_EXPR
9122 && integer_onep (TREE_OPERAND (arg0, 0)))
9123 return fold_build2 (LSHIFT_EXPR, type, arg1,
9124 TREE_OPERAND (arg0, 1));
9126 if (TREE_CODE (arg1) == INTEGER_CST
9127 && 0 != (tem = extract_muldiv (op0,
9128 fold_convert (type, arg1),
9129 code, NULL_TREE)))
9130 return fold_convert (type, tem);
9132 /* Optimize z * conj(z) for integer complex numbers. */
9133 if (TREE_CODE (arg0) == CONJ_EXPR
9134 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9135 return fold_mult_zconjz (type, arg1);
9136 if (TREE_CODE (arg1) == CONJ_EXPR
9137 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9138 return fold_mult_zconjz (type, arg0);
9140 else
9142 /* Maybe fold x * 0 to 0. The expressions aren't the same
9143 when x is NaN, since x * 0 is also NaN. Nor are they the
9144 same in modes with signed zeros, since multiplying a
9145 negative value by 0 gives -0, not +0. */
9146 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9147 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9148 && real_zerop (arg1))
9149 return omit_one_operand (type, arg1, arg0);
9150 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9151 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9152 && real_onep (arg1))
9153 return non_lvalue (fold_convert (type, arg0));
9155 /* Transform x * -1.0 into -x. */
9156 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9157 && real_minus_onep (arg1))
9158 return fold_convert (type, negate_expr (arg0));
9160 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9161 if (flag_unsafe_math_optimizations
9162 && TREE_CODE (arg0) == RDIV_EXPR
9163 && TREE_CODE (arg1) == REAL_CST
9164 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9166 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9167 arg1, 0);
9168 if (tem)
9169 return fold_build2 (RDIV_EXPR, type, tem,
9170 TREE_OPERAND (arg0, 1));
9173 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9174 if (operand_equal_p (arg0, arg1, 0))
9176 tree tem = fold_strip_sign_ops (arg0);
9177 if (tem != NULL_TREE)
9179 tem = fold_convert (type, tem);
9180 return fold_build2 (MULT_EXPR, type, tem, tem);
9184 /* Optimize z * conj(z) for floating point complex numbers.
9185 Guarded by flag_unsafe_math_optimizations as non-finite
9186 imaginary components don't produce scalar results. */
9187 if (flag_unsafe_math_optimizations
9188 && TREE_CODE (arg0) == CONJ_EXPR
9189 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9190 return fold_mult_zconjz (type, arg1);
9191 if (flag_unsafe_math_optimizations
9192 && TREE_CODE (arg1) == CONJ_EXPR
9193 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9194 return fold_mult_zconjz (type, arg0);
9196 if (flag_unsafe_math_optimizations)
9198 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9199 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9201 /* Optimizations of root(...)*root(...). */
9202 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9204 tree rootfn, arg, arglist;
9205 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9206 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9208 /* Optimize sqrt(x)*sqrt(x) as x. */
9209 if (BUILTIN_SQRT_P (fcode0)
9210 && operand_equal_p (arg00, arg10, 0)
9211 && ! HONOR_SNANS (TYPE_MODE (type)))
9212 return arg00;
9214 /* Optimize root(x)*root(y) as root(x*y). */
9215 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9216 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9217 arglist = build_tree_list (NULL_TREE, arg);
9218 return build_function_call_expr (rootfn, arglist);
9221 /* Optimize expN(x)*expN(y) as expN(x+y). */
9222 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9224 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9225 tree arg = fold_build2 (PLUS_EXPR, type,
9226 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9227 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9228 tree arglist = build_tree_list (NULL_TREE, arg);
9229 return build_function_call_expr (expfn, arglist);
9232 /* Optimizations of pow(...)*pow(...). */
9233 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9234 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9235 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9237 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9238 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9239 1)));
9240 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9241 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9242 1)));
9244 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9245 if (operand_equal_p (arg01, arg11, 0))
9247 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9248 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9249 tree arglist = tree_cons (NULL_TREE, arg,
9250 build_tree_list (NULL_TREE,
9251 arg01));
9252 return build_function_call_expr (powfn, arglist);
9255 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9256 if (operand_equal_p (arg00, arg10, 0))
9258 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9259 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9260 tree arglist = tree_cons (NULL_TREE, arg00,
9261 build_tree_list (NULL_TREE,
9262 arg));
9263 return build_function_call_expr (powfn, arglist);
9267 /* Optimize tan(x)*cos(x) as sin(x). */
9268 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9269 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9270 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9271 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9272 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9273 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9274 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9275 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9277 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9279 if (sinfn != NULL_TREE)
9280 return build_function_call_expr (sinfn,
9281 TREE_OPERAND (arg0, 1));
9284 /* Optimize x*pow(x,c) as pow(x,c+1). */
9285 if (fcode1 == BUILT_IN_POW
9286 || fcode1 == BUILT_IN_POWF
9287 || fcode1 == BUILT_IN_POWL)
9289 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9290 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9291 1)));
9292 if (TREE_CODE (arg11) == REAL_CST
9293 && ! TREE_CONSTANT_OVERFLOW (arg11)
9294 && operand_equal_p (arg0, arg10, 0))
9296 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9297 REAL_VALUE_TYPE c;
9298 tree arg, arglist;
9300 c = TREE_REAL_CST (arg11);
9301 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9302 arg = build_real (type, c);
9303 arglist = build_tree_list (NULL_TREE, arg);
9304 arglist = tree_cons (NULL_TREE, arg0, arglist);
9305 return build_function_call_expr (powfn, arglist);
9309 /* Optimize pow(x,c)*x as pow(x,c+1). */
9310 if (fcode0 == BUILT_IN_POW
9311 || fcode0 == BUILT_IN_POWF
9312 || fcode0 == BUILT_IN_POWL)
9314 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9315 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9316 1)));
9317 if (TREE_CODE (arg01) == REAL_CST
9318 && ! TREE_CONSTANT_OVERFLOW (arg01)
9319 && operand_equal_p (arg1, arg00, 0))
9321 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9322 REAL_VALUE_TYPE c;
9323 tree arg, arglist;
9325 c = TREE_REAL_CST (arg01);
9326 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9327 arg = build_real (type, c);
9328 arglist = build_tree_list (NULL_TREE, arg);
9329 arglist = tree_cons (NULL_TREE, arg1, arglist);
9330 return build_function_call_expr (powfn, arglist);
9334 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9335 if (! optimize_size
9336 && operand_equal_p (arg0, arg1, 0))
9338 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9340 if (powfn)
9342 tree arg = build_real (type, dconst2);
9343 tree arglist = build_tree_list (NULL_TREE, arg);
9344 arglist = tree_cons (NULL_TREE, arg0, arglist);
9345 return build_function_call_expr (powfn, arglist);
9350 goto associate;
9352 case BIT_IOR_EXPR:
9353 bit_ior:
9354 if (integer_all_onesp (arg1))
9355 return omit_one_operand (type, arg1, arg0);
9356 if (integer_zerop (arg1))
9357 return non_lvalue (fold_convert (type, arg0));
9358 if (operand_equal_p (arg0, arg1, 0))
9359 return non_lvalue (fold_convert (type, arg0));
9361 /* ~X | X is -1. */
9362 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9363 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9365 t1 = build_int_cst (type, -1);
9366 t1 = force_fit_type (t1, 0, false, false);
9367 return omit_one_operand (type, t1, arg1);
9370 /* X | ~X is -1. */
9371 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9372 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9374 t1 = build_int_cst (type, -1);
9375 t1 = force_fit_type (t1, 0, false, false);
9376 return omit_one_operand (type, t1, arg0);
9379 /* Canonicalize (X & C1) | C2. */
9380 if (TREE_CODE (arg0) == BIT_AND_EXPR
9381 && TREE_CODE (arg1) == INTEGER_CST
9382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9384 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9385 int width = TYPE_PRECISION (type);
9386 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9387 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9388 hi2 = TREE_INT_CST_HIGH (arg1);
9389 lo2 = TREE_INT_CST_LOW (arg1);
9391 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9392 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9393 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9395 if (width > HOST_BITS_PER_WIDE_INT)
9397 mhi = (unsigned HOST_WIDE_INT) -1
9398 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9399 mlo = -1;
9401 else
9403 mhi = 0;
9404 mlo = (unsigned HOST_WIDE_INT) -1
9405 >> (HOST_BITS_PER_WIDE_INT - width);
9408 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9409 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9410 return fold_build2 (BIT_IOR_EXPR, type,
9411 TREE_OPERAND (arg0, 0), arg1);
9413 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9414 hi1 &= mhi;
9415 lo1 &= mlo;
9416 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9417 return fold_build2 (BIT_IOR_EXPR, type,
9418 fold_build2 (BIT_AND_EXPR, type,
9419 TREE_OPERAND (arg0, 0),
9420 build_int_cst_wide (type,
9421 lo1 & ~lo2,
9422 hi1 & ~hi2)),
9423 arg1);
9426 /* (X & Y) | Y is (X, Y). */
9427 if (TREE_CODE (arg0) == BIT_AND_EXPR
9428 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9429 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9430 /* (X & Y) | X is (Y, X). */
9431 if (TREE_CODE (arg0) == BIT_AND_EXPR
9432 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9433 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9434 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9435 /* X | (X & Y) is (Y, X). */
9436 if (TREE_CODE (arg1) == BIT_AND_EXPR
9437 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9438 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9439 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9440 /* X | (Y & X) is (Y, X). */
9441 if (TREE_CODE (arg1) == BIT_AND_EXPR
9442 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9443 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9444 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9446 t1 = distribute_bit_expr (code, type, arg0, arg1);
9447 if (t1 != NULL_TREE)
9448 return t1;
9450 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9452 This results in more efficient code for machines without a NAND
9453 instruction. Combine will canonicalize to the first form
9454 which will allow use of NAND instructions provided by the
9455 backend if they exist. */
9456 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9457 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9459 return fold_build1 (BIT_NOT_EXPR, type,
9460 build2 (BIT_AND_EXPR, type,
9461 TREE_OPERAND (arg0, 0),
9462 TREE_OPERAND (arg1, 0)));
9465 /* See if this can be simplified into a rotate first. If that
9466 is unsuccessful continue in the association code. */
9467 goto bit_rotate;
9469 case BIT_XOR_EXPR:
9470 if (integer_zerop (arg1))
9471 return non_lvalue (fold_convert (type, arg0));
9472 if (integer_all_onesp (arg1))
9473 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9474 if (operand_equal_p (arg0, arg1, 0))
9475 return omit_one_operand (type, integer_zero_node, arg0);
9477 /* ~X ^ X is -1. */
9478 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9479 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9481 t1 = build_int_cst (type, -1);
9482 t1 = force_fit_type (t1, 0, false, false);
9483 return omit_one_operand (type, t1, arg1);
9486 /* X ^ ~X is -1. */
9487 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9490 t1 = build_int_cst (type, -1);
9491 t1 = force_fit_type (t1, 0, false, false);
9492 return omit_one_operand (type, t1, arg0);
9495 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9496 with a constant, and the two constants have no bits in common,
9497 we should treat this as a BIT_IOR_EXPR since this may produce more
9498 simplifications. */
9499 if (TREE_CODE (arg0) == BIT_AND_EXPR
9500 && TREE_CODE (arg1) == BIT_AND_EXPR
9501 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9502 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9503 && integer_zerop (const_binop (BIT_AND_EXPR,
9504 TREE_OPERAND (arg0, 1),
9505 TREE_OPERAND (arg1, 1), 0)))
9507 code = BIT_IOR_EXPR;
9508 goto bit_ior;
9511 /* (X | Y) ^ X -> Y & ~ X*/
9512 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9513 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9515 tree t2 = TREE_OPERAND (arg0, 1);
9516 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9517 arg1);
9518 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9519 fold_convert (type, t1));
9520 return t1;
9523 /* (Y | X) ^ X -> Y & ~ X*/
9524 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9525 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9527 tree t2 = TREE_OPERAND (arg0, 0);
9528 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9529 arg1);
9530 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9531 fold_convert (type, t1));
9532 return t1;
9535 /* X ^ (X | Y) -> Y & ~ X*/
9536 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9537 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9539 tree t2 = TREE_OPERAND (arg1, 1);
9540 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9541 arg0);
9542 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9543 fold_convert (type, t1));
9544 return t1;
9547 /* X ^ (Y | X) -> Y & ~ X*/
9548 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9549 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9551 tree t2 = TREE_OPERAND (arg1, 0);
9552 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9553 arg0);
9554 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9555 fold_convert (type, t1));
9556 return t1;
9559 /* Convert ~X ^ ~Y to X ^ Y. */
9560 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9561 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9562 return fold_build2 (code, type,
9563 fold_convert (type, TREE_OPERAND (arg0, 0)),
9564 fold_convert (type, TREE_OPERAND (arg1, 0)));
9566 /* Convert ~X ^ C to X ^ ~C. */
9567 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9568 && TREE_CODE (arg1) == INTEGER_CST)
9569 return fold_build2 (code, type,
9570 fold_convert (type, TREE_OPERAND (arg0, 0)),
9571 fold_build1 (BIT_NOT_EXPR, type, arg1));
9573 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9574 if (TREE_CODE (arg0) == BIT_AND_EXPR
9575 && integer_onep (TREE_OPERAND (arg0, 1))
9576 && integer_onep (arg1))
9577 return fold_build2 (EQ_EXPR, type, arg0,
9578 build_int_cst (TREE_TYPE (arg0), 0));
9580 /* Fold (X & Y) ^ Y as ~X & Y. */
9581 if (TREE_CODE (arg0) == BIT_AND_EXPR
9582 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9584 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9585 return fold_build2 (BIT_AND_EXPR, type,
9586 fold_build1 (BIT_NOT_EXPR, type, tem),
9587 fold_convert (type, arg1));
9589 /* Fold (X & Y) ^ X as ~Y & X. */
9590 if (TREE_CODE (arg0) == BIT_AND_EXPR
9591 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9592 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9594 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9595 return fold_build2 (BIT_AND_EXPR, type,
9596 fold_build1 (BIT_NOT_EXPR, type, tem),
9597 fold_convert (type, arg1));
9599 /* Fold X ^ (X & Y) as X & ~Y. */
9600 if (TREE_CODE (arg1) == BIT_AND_EXPR
9601 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9603 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9604 return fold_build2 (BIT_AND_EXPR, type,
9605 fold_convert (type, arg0),
9606 fold_build1 (BIT_NOT_EXPR, type, tem));
9608 /* Fold X ^ (Y & X) as ~Y & X. */
9609 if (TREE_CODE (arg1) == BIT_AND_EXPR
9610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9611 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9613 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9614 return fold_build2 (BIT_AND_EXPR, type,
9615 fold_build1 (BIT_NOT_EXPR, type, tem),
9616 fold_convert (type, arg0));
9619 /* See if this can be simplified into a rotate first. If that
9620 is unsuccessful continue in the association code. */
9621 goto bit_rotate;
9623 case BIT_AND_EXPR:
9624 if (integer_all_onesp (arg1))
9625 return non_lvalue (fold_convert (type, arg0));
9626 if (integer_zerop (arg1))
9627 return omit_one_operand (type, arg1, arg0);
9628 if (operand_equal_p (arg0, arg1, 0))
9629 return non_lvalue (fold_convert (type, arg0));
9631 /* ~X & X is always zero. */
9632 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9633 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9634 return omit_one_operand (type, integer_zero_node, arg1);
9636 /* X & ~X is always zero. */
9637 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9638 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9639 return omit_one_operand (type, integer_zero_node, arg0);
9641 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9642 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9643 && TREE_CODE (arg1) == INTEGER_CST
9644 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9645 return fold_build2 (BIT_IOR_EXPR, type,
9646 fold_build2 (BIT_AND_EXPR, type,
9647 TREE_OPERAND (arg0, 0), arg1),
9648 fold_build2 (BIT_AND_EXPR, type,
9649 TREE_OPERAND (arg0, 1), arg1));
9651 /* (X | Y) & Y is (X, Y). */
9652 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9653 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9654 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9655 /* (X | Y) & X is (Y, X). */
9656 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9657 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9658 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9659 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9660 /* X & (X | Y) is (Y, X). */
9661 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9662 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9663 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9664 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9665 /* X & (Y | X) is (Y, X). */
9666 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9667 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9668 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9669 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9671 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9672 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9673 && integer_onep (TREE_OPERAND (arg0, 1))
9674 && integer_onep (arg1))
9676 tem = TREE_OPERAND (arg0, 0);
9677 return fold_build2 (EQ_EXPR, type,
9678 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9679 build_int_cst (TREE_TYPE (tem), 1)),
9680 build_int_cst (TREE_TYPE (tem), 0));
9682 /* Fold ~X & 1 as (X & 1) == 0. */
9683 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9684 && integer_onep (arg1))
9686 tem = TREE_OPERAND (arg0, 0);
9687 return fold_build2 (EQ_EXPR, type,
9688 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9689 build_int_cst (TREE_TYPE (tem), 1)),
9690 build_int_cst (TREE_TYPE (tem), 0));
9693 /* Fold (X ^ Y) & Y as ~X & Y. */
9694 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9695 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9697 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9698 return fold_build2 (BIT_AND_EXPR, type,
9699 fold_build1 (BIT_NOT_EXPR, type, tem),
9700 fold_convert (type, arg1));
9702 /* Fold (X ^ Y) & X as ~Y & X. */
9703 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9704 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9705 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9707 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9708 return fold_build2 (BIT_AND_EXPR, type,
9709 fold_build1 (BIT_NOT_EXPR, type, tem),
9710 fold_convert (type, arg1));
9712 /* Fold X & (X ^ Y) as X & ~Y. */
9713 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9714 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9716 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9717 return fold_build2 (BIT_AND_EXPR, type,
9718 fold_convert (type, arg0),
9719 fold_build1 (BIT_NOT_EXPR, type, tem));
9721 /* Fold X & (Y ^ X) as ~Y & X. */
9722 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9723 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9724 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9726 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9727 return fold_build2 (BIT_AND_EXPR, type,
9728 fold_build1 (BIT_NOT_EXPR, type, tem),
9729 fold_convert (type, arg0));
9732 t1 = distribute_bit_expr (code, type, arg0, arg1);
9733 if (t1 != NULL_TREE)
9734 return t1;
9735 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9736 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9737 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9739 unsigned int prec
9740 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9742 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9743 && (~TREE_INT_CST_LOW (arg1)
9744 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9745 return fold_convert (type, TREE_OPERAND (arg0, 0));
9748 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9750 This results in more efficient code for machines without a NOR
9751 instruction. Combine will canonicalize to the first form
9752 which will allow use of NOR instructions provided by the
9753 backend if they exist. */
9754 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9755 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9757 return fold_build1 (BIT_NOT_EXPR, type,
9758 build2 (BIT_IOR_EXPR, type,
9759 TREE_OPERAND (arg0, 0),
9760 TREE_OPERAND (arg1, 0)));
9763 goto associate;
9765 case RDIV_EXPR:
9766 /* Don't touch a floating-point divide by zero unless the mode
9767 of the constant can represent infinity. */
9768 if (TREE_CODE (arg1) == REAL_CST
9769 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9770 && real_zerop (arg1))
9771 return NULL_TREE;
9773 /* Optimize A / A to 1.0 if we don't care about
9774 NaNs or Infinities. Skip the transformation
9775 for non-real operands. */
9776 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9777 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9778 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9779 && operand_equal_p (arg0, arg1, 0))
9781 tree r = build_real (TREE_TYPE (arg0), dconst1);
9783 return omit_two_operands (type, r, arg0, arg1);
9786 /* The complex version of the above A / A optimization. */
9787 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9788 && operand_equal_p (arg0, arg1, 0))
9790 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9791 if (! HONOR_NANS (TYPE_MODE (elem_type))
9792 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9794 tree r = build_real (elem_type, dconst1);
9795 /* omit_two_operands will call fold_convert for us. */
9796 return omit_two_operands (type, r, arg0, arg1);
9800 /* (-A) / (-B) -> A / B */
9801 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9802 return fold_build2 (RDIV_EXPR, type,
9803 TREE_OPERAND (arg0, 0),
9804 negate_expr (arg1));
9805 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9806 return fold_build2 (RDIV_EXPR, type,
9807 negate_expr (arg0),
9808 TREE_OPERAND (arg1, 0));
9810 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9811 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9812 && real_onep (arg1))
9813 return non_lvalue (fold_convert (type, arg0));
9815 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9816 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9817 && real_minus_onep (arg1))
9818 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9820 /* If ARG1 is a constant, we can convert this to a multiply by the
9821 reciprocal. This does not have the same rounding properties,
9822 so only do this if -funsafe-math-optimizations. We can actually
9823 always safely do it if ARG1 is a power of two, but it's hard to
9824 tell if it is or not in a portable manner. */
9825 if (TREE_CODE (arg1) == REAL_CST)
9827 if (flag_unsafe_math_optimizations
9828 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9829 arg1, 0)))
9830 return fold_build2 (MULT_EXPR, type, arg0, tem);
9831 /* Find the reciprocal if optimizing and the result is exact. */
9832 if (optimize)
9834 REAL_VALUE_TYPE r;
9835 r = TREE_REAL_CST (arg1);
9836 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9838 tem = build_real (type, r);
9839 return fold_build2 (MULT_EXPR, type,
9840 fold_convert (type, arg0), tem);
9844 /* Convert A/B/C to A/(B*C). */
9845 if (flag_unsafe_math_optimizations
9846 && TREE_CODE (arg0) == RDIV_EXPR)
9847 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9848 fold_build2 (MULT_EXPR, type,
9849 TREE_OPERAND (arg0, 1), arg1));
9851 /* Convert A/(B/C) to (A/B)*C. */
9852 if (flag_unsafe_math_optimizations
9853 && TREE_CODE (arg1) == RDIV_EXPR)
9854 return fold_build2 (MULT_EXPR, type,
9855 fold_build2 (RDIV_EXPR, type, arg0,
9856 TREE_OPERAND (arg1, 0)),
9857 TREE_OPERAND (arg1, 1));
9859 /* Convert C1/(X*C2) into (C1/C2)/X. */
9860 if (flag_unsafe_math_optimizations
9861 && TREE_CODE (arg1) == MULT_EXPR
9862 && TREE_CODE (arg0) == REAL_CST
9863 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9865 tree tem = const_binop (RDIV_EXPR, arg0,
9866 TREE_OPERAND (arg1, 1), 0);
9867 if (tem)
9868 return fold_build2 (RDIV_EXPR, type, tem,
9869 TREE_OPERAND (arg1, 0));
9872 if (flag_unsafe_math_optimizations)
9874 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9875 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9877 /* Optimize sin(x)/cos(x) as tan(x). */
9878 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9879 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9880 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9881 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9882 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9884 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9886 if (tanfn != NULL_TREE)
9887 return build_function_call_expr (tanfn,
9888 TREE_OPERAND (arg0, 1));
9891 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9892 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9893 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9894 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9895 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9896 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9898 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9900 if (tanfn != NULL_TREE)
9902 tree tmp = TREE_OPERAND (arg0, 1);
9903 tmp = build_function_call_expr (tanfn, tmp);
9904 return fold_build2 (RDIV_EXPR, type,
9905 build_real (type, dconst1), tmp);
9909 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9910 NaNs or Infinities. */
9911 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9912 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9913 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9915 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9916 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9918 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9919 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9920 && operand_equal_p (arg00, arg01, 0))
9922 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9924 if (cosfn != NULL_TREE)
9925 return build_function_call_expr (cosfn,
9926 TREE_OPERAND (arg0, 1));
9930 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9931 NaNs or Infinities. */
9932 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9933 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9934 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9936 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9937 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9939 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9940 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9941 && operand_equal_p (arg00, arg01, 0))
9943 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9945 if (cosfn != NULL_TREE)
9947 tree tmp = TREE_OPERAND (arg0, 1);
9948 tmp = build_function_call_expr (cosfn, tmp);
9949 return fold_build2 (RDIV_EXPR, type,
9950 build_real (type, dconst1),
9951 tmp);
9956 /* Optimize pow(x,c)/x as pow(x,c-1). */
9957 if (fcode0 == BUILT_IN_POW
9958 || fcode0 == BUILT_IN_POWF
9959 || fcode0 == BUILT_IN_POWL)
9961 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9962 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9963 if (TREE_CODE (arg01) == REAL_CST
9964 && ! TREE_CONSTANT_OVERFLOW (arg01)
9965 && operand_equal_p (arg1, arg00, 0))
9967 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9968 REAL_VALUE_TYPE c;
9969 tree arg, arglist;
9971 c = TREE_REAL_CST (arg01);
9972 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9973 arg = build_real (type, c);
9974 arglist = build_tree_list (NULL_TREE, arg);
9975 arglist = tree_cons (NULL_TREE, arg1, arglist);
9976 return build_function_call_expr (powfn, arglist);
9980 /* Optimize x/expN(y) into x*expN(-y). */
9981 if (BUILTIN_EXPONENT_P (fcode1))
9983 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9984 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9985 tree arglist = build_tree_list (NULL_TREE,
9986 fold_convert (type, arg));
9987 arg1 = build_function_call_expr (expfn, arglist);
9988 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9991 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9992 if (fcode1 == BUILT_IN_POW
9993 || fcode1 == BUILT_IN_POWF
9994 || fcode1 == BUILT_IN_POWL)
9996 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9997 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9998 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9999 tree neg11 = fold_convert (type, negate_expr (arg11));
10000 tree arglist = tree_cons(NULL_TREE, arg10,
10001 build_tree_list (NULL_TREE, neg11));
10002 arg1 = build_function_call_expr (powfn, arglist);
10003 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10006 return NULL_TREE;
10008 case TRUNC_DIV_EXPR:
10009 case FLOOR_DIV_EXPR:
10010 /* Simplify A / (B << N) where A and B are positive and B is
10011 a power of 2, to A >> (N + log2(B)). */
10012 if (TREE_CODE (arg1) == LSHIFT_EXPR
10013 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10015 tree sval = TREE_OPERAND (arg1, 0);
10016 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10018 tree sh_cnt = TREE_OPERAND (arg1, 1);
10019 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10021 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10022 sh_cnt, build_int_cst (NULL_TREE, pow2));
10023 return fold_build2 (RSHIFT_EXPR, type,
10024 fold_convert (type, arg0), sh_cnt);
10027 /* Fall thru */
10029 case ROUND_DIV_EXPR:
10030 case CEIL_DIV_EXPR:
10031 case EXACT_DIV_EXPR:
10032 if (integer_onep (arg1))
10033 return non_lvalue (fold_convert (type, arg0));
10034 if (integer_zerop (arg1))
10035 return NULL_TREE;
10036 /* X / -1 is -X. */
10037 if (!TYPE_UNSIGNED (type)
10038 && TREE_CODE (arg1) == INTEGER_CST
10039 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10040 && TREE_INT_CST_HIGH (arg1) == -1)
10041 return fold_convert (type, negate_expr (arg0));
10043 /* Convert -A / -B to A / B when the type is signed and overflow is
10044 undefined. */
10045 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10046 && TREE_CODE (arg0) == NEGATE_EXPR
10047 && negate_expr_p (arg1))
10048 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10049 negate_expr (arg1));
10050 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10051 && TREE_CODE (arg1) == NEGATE_EXPR
10052 && negate_expr_p (arg0))
10053 return fold_build2 (code, type, negate_expr (arg0),
10054 TREE_OPERAND (arg1, 0));
10056 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10057 operation, EXACT_DIV_EXPR.
10059 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10060 At one time others generated faster code, it's not clear if they do
10061 after the last round to changes to the DIV code in expmed.c. */
10062 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10063 && multiple_of_p (type, arg0, arg1))
10064 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10066 if (TREE_CODE (arg1) == INTEGER_CST
10067 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10068 return fold_convert (type, tem);
10070 return NULL_TREE;
10072 case CEIL_MOD_EXPR:
10073 case FLOOR_MOD_EXPR:
10074 case ROUND_MOD_EXPR:
10075 case TRUNC_MOD_EXPR:
10076 /* X % 1 is always zero, but be sure to preserve any side
10077 effects in X. */
10078 if (integer_onep (arg1))
10079 return omit_one_operand (type, integer_zero_node, arg0);
10081 /* X % 0, return X % 0 unchanged so that we can get the
10082 proper warnings and errors. */
10083 if (integer_zerop (arg1))
10084 return NULL_TREE;
10086 /* 0 % X is always zero, but be sure to preserve any side
10087 effects in X. Place this after checking for X == 0. */
10088 if (integer_zerop (arg0))
10089 return omit_one_operand (type, integer_zero_node, arg1);
10091 /* X % -1 is zero. */
10092 if (!TYPE_UNSIGNED (type)
10093 && TREE_CODE (arg1) == INTEGER_CST
10094 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10095 && TREE_INT_CST_HIGH (arg1) == -1)
10096 return omit_one_operand (type, integer_zero_node, arg0);
10098 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10099 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10100 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10101 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10103 tree c = arg1;
10104 /* Also optimize A % (C << N) where C is a power of 2,
10105 to A & ((C << N) - 1). */
10106 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10107 c = TREE_OPERAND (arg1, 0);
10109 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10111 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10112 build_int_cst (TREE_TYPE (arg1), 1));
10113 return fold_build2 (BIT_AND_EXPR, type,
10114 fold_convert (type, arg0),
10115 fold_convert (type, mask));
10119 /* X % -C is the same as X % C. */
10120 if (code == TRUNC_MOD_EXPR
10121 && !TYPE_UNSIGNED (type)
10122 && TREE_CODE (arg1) == INTEGER_CST
10123 && !TREE_CONSTANT_OVERFLOW (arg1)
10124 && TREE_INT_CST_HIGH (arg1) < 0
10125 && !flag_trapv
10126 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10127 && !sign_bit_p (arg1, arg1))
10128 return fold_build2 (code, type, fold_convert (type, arg0),
10129 fold_convert (type, negate_expr (arg1)));
10131 /* X % -Y is the same as X % Y. */
10132 if (code == TRUNC_MOD_EXPR
10133 && !TYPE_UNSIGNED (type)
10134 && TREE_CODE (arg1) == NEGATE_EXPR
10135 && !flag_trapv)
10136 return fold_build2 (code, type, fold_convert (type, arg0),
10137 fold_convert (type, TREE_OPERAND (arg1, 0)));
10139 if (TREE_CODE (arg1) == INTEGER_CST
10140 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10141 return fold_convert (type, tem);
10143 return NULL_TREE;
10145 case LROTATE_EXPR:
10146 case RROTATE_EXPR:
10147 if (integer_all_onesp (arg0))
10148 return omit_one_operand (type, arg0, arg1);
10149 goto shift;
10151 case RSHIFT_EXPR:
10152 /* Optimize -1 >> x for arithmetic right shifts. */
10153 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10154 return omit_one_operand (type, arg0, arg1);
10155 /* ... fall through ... */
10157 case LSHIFT_EXPR:
10158 shift:
10159 if (integer_zerop (arg1))
10160 return non_lvalue (fold_convert (type, arg0));
10161 if (integer_zerop (arg0))
10162 return omit_one_operand (type, arg0, arg1);
10164 /* Since negative shift count is not well-defined,
10165 don't try to compute it in the compiler. */
10166 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10167 return NULL_TREE;
10169 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10170 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10171 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10172 && host_integerp (TREE_OPERAND (arg0, 1), false)
10173 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10175 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10176 + TREE_INT_CST_LOW (arg1));
10178 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10179 being well defined. */
10180 if (low >= TYPE_PRECISION (type))
10182 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10183 low = low % TYPE_PRECISION (type);
10184 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10185 return build_int_cst (type, 0);
10186 else
10187 low = TYPE_PRECISION (type) - 1;
10190 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10191 build_int_cst (type, low));
10194 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10195 into x & ((unsigned)-1 >> c) for unsigned types. */
10196 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10197 || (TYPE_UNSIGNED (type)
10198 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10199 && host_integerp (arg1, false)
10200 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10201 && host_integerp (TREE_OPERAND (arg0, 1), false)
10202 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10204 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10205 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10206 tree lshift;
10207 tree arg00;
10209 if (low0 == low1)
10211 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10213 lshift = build_int_cst (type, -1);
10214 lshift = int_const_binop (code, lshift, arg1, 0);
10216 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10220 /* Rewrite an LROTATE_EXPR by a constant into an
10221 RROTATE_EXPR by a new constant. */
10222 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10224 tree tem = build_int_cst (TREE_TYPE (arg1),
10225 GET_MODE_BITSIZE (TYPE_MODE (type)));
10226 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10227 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10230 /* If we have a rotate of a bit operation with the rotate count and
10231 the second operand of the bit operation both constant,
10232 permute the two operations. */
10233 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10234 && (TREE_CODE (arg0) == BIT_AND_EXPR
10235 || TREE_CODE (arg0) == BIT_IOR_EXPR
10236 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10237 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10238 return fold_build2 (TREE_CODE (arg0), type,
10239 fold_build2 (code, type,
10240 TREE_OPERAND (arg0, 0), arg1),
10241 fold_build2 (code, type,
10242 TREE_OPERAND (arg0, 1), arg1));
10244 /* Two consecutive rotates adding up to the width of the mode can
10245 be ignored. */
10246 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10247 && TREE_CODE (arg0) == RROTATE_EXPR
10248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10249 && TREE_INT_CST_HIGH (arg1) == 0
10250 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10251 && ((TREE_INT_CST_LOW (arg1)
10252 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10253 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10254 return TREE_OPERAND (arg0, 0);
10256 return NULL_TREE;
10258 case MIN_EXPR:
10259 if (operand_equal_p (arg0, arg1, 0))
10260 return omit_one_operand (type, arg0, arg1);
10261 if (INTEGRAL_TYPE_P (type)
10262 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10263 return omit_one_operand (type, arg1, arg0);
10264 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10265 if (tem)
10266 return tem;
10267 goto associate;
10269 case MAX_EXPR:
10270 if (operand_equal_p (arg0, arg1, 0))
10271 return omit_one_operand (type, arg0, arg1);
10272 if (INTEGRAL_TYPE_P (type)
10273 && TYPE_MAX_VALUE (type)
10274 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10275 return omit_one_operand (type, arg1, arg0);
10276 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10277 if (tem)
10278 return tem;
10279 goto associate;
10281 case TRUTH_ANDIF_EXPR:
10282 /* Note that the operands of this must be ints
10283 and their values must be 0 or 1.
10284 ("true" is a fixed value perhaps depending on the language.) */
10285 /* If first arg is constant zero, return it. */
10286 if (integer_zerop (arg0))
10287 return fold_convert (type, arg0);
10288 case TRUTH_AND_EXPR:
10289 /* If either arg is constant true, drop it. */
10290 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10291 return non_lvalue (fold_convert (type, arg1));
10292 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10293 /* Preserve sequence points. */
10294 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10295 return non_lvalue (fold_convert (type, arg0));
10296 /* If second arg is constant zero, result is zero, but first arg
10297 must be evaluated. */
10298 if (integer_zerop (arg1))
10299 return omit_one_operand (type, arg1, arg0);
10300 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10301 case will be handled here. */
10302 if (integer_zerop (arg0))
10303 return omit_one_operand (type, arg0, arg1);
10305 /* !X && X is always false. */
10306 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10307 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10308 return omit_one_operand (type, integer_zero_node, arg1);
10309 /* X && !X is always false. */
10310 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10311 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10312 return omit_one_operand (type, integer_zero_node, arg0);
10314 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10315 means A >= Y && A != MAX, but in this case we know that
10316 A < X <= MAX. */
10318 if (!TREE_SIDE_EFFECTS (arg0)
10319 && !TREE_SIDE_EFFECTS (arg1))
10321 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10322 if (tem && !operand_equal_p (tem, arg0, 0))
10323 return fold_build2 (code, type, tem, arg1);
10325 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10326 if (tem && !operand_equal_p (tem, arg1, 0))
10327 return fold_build2 (code, type, arg0, tem);
10330 truth_andor:
10331 /* We only do these simplifications if we are optimizing. */
10332 if (!optimize)
10333 return NULL_TREE;
10335 /* Check for things like (A || B) && (A || C). We can convert this
10336 to A || (B && C). Note that either operator can be any of the four
10337 truth and/or operations and the transformation will still be
10338 valid. Also note that we only care about order for the
10339 ANDIF and ORIF operators. If B contains side effects, this
10340 might change the truth-value of A. */
10341 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10342 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10343 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10344 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10345 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10346 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10348 tree a00 = TREE_OPERAND (arg0, 0);
10349 tree a01 = TREE_OPERAND (arg0, 1);
10350 tree a10 = TREE_OPERAND (arg1, 0);
10351 tree a11 = TREE_OPERAND (arg1, 1);
10352 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10353 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10354 && (code == TRUTH_AND_EXPR
10355 || code == TRUTH_OR_EXPR));
10357 if (operand_equal_p (a00, a10, 0))
10358 return fold_build2 (TREE_CODE (arg0), type, a00,
10359 fold_build2 (code, type, a01, a11));
10360 else if (commutative && operand_equal_p (a00, a11, 0))
10361 return fold_build2 (TREE_CODE (arg0), type, a00,
10362 fold_build2 (code, type, a01, a10));
10363 else if (commutative && operand_equal_p (a01, a10, 0))
10364 return fold_build2 (TREE_CODE (arg0), type, a01,
10365 fold_build2 (code, type, a00, a11));
10367 /* This case if tricky because we must either have commutative
10368 operators or else A10 must not have side-effects. */
10370 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10371 && operand_equal_p (a01, a11, 0))
10372 return fold_build2 (TREE_CODE (arg0), type,
10373 fold_build2 (code, type, a00, a10),
10374 a01);
10377 /* See if we can build a range comparison. */
10378 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10379 return tem;
10381 /* Check for the possibility of merging component references. If our
10382 lhs is another similar operation, try to merge its rhs with our
10383 rhs. Then try to merge our lhs and rhs. */
10384 if (TREE_CODE (arg0) == code
10385 && 0 != (tem = fold_truthop (code, type,
10386 TREE_OPERAND (arg0, 1), arg1)))
10387 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10389 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10390 return tem;
10392 return NULL_TREE;
10394 case TRUTH_ORIF_EXPR:
10395 /* Note that the operands of this must be ints
10396 and their values must be 0 or true.
10397 ("true" is a fixed value perhaps depending on the language.) */
10398 /* If first arg is constant true, return it. */
10399 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10400 return fold_convert (type, arg0);
10401 case TRUTH_OR_EXPR:
10402 /* If either arg is constant zero, drop it. */
10403 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10404 return non_lvalue (fold_convert (type, arg1));
10405 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10406 /* Preserve sequence points. */
10407 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10408 return non_lvalue (fold_convert (type, arg0));
10409 /* If second arg is constant true, result is true, but we must
10410 evaluate first arg. */
10411 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10412 return omit_one_operand (type, arg1, arg0);
10413 /* Likewise for first arg, but note this only occurs here for
10414 TRUTH_OR_EXPR. */
10415 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10416 return omit_one_operand (type, arg0, arg1);
10418 /* !X || X is always true. */
10419 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10420 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10421 return omit_one_operand (type, integer_one_node, arg1);
10422 /* X || !X is always true. */
10423 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10424 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10425 return omit_one_operand (type, integer_one_node, arg0);
10427 goto truth_andor;
10429 case TRUTH_XOR_EXPR:
10430 /* If the second arg is constant zero, drop it. */
10431 if (integer_zerop (arg1))
10432 return non_lvalue (fold_convert (type, arg0));
10433 /* If the second arg is constant true, this is a logical inversion. */
10434 if (integer_onep (arg1))
10436 /* Only call invert_truthvalue if operand is a truth value. */
10437 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10438 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10439 else
10440 tem = invert_truthvalue (arg0);
10441 return non_lvalue (fold_convert (type, tem));
10443 /* Identical arguments cancel to zero. */
10444 if (operand_equal_p (arg0, arg1, 0))
10445 return omit_one_operand (type, integer_zero_node, arg0);
10447 /* !X ^ X is always true. */
10448 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10449 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10450 return omit_one_operand (type, integer_one_node, arg1);
10452 /* X ^ !X is always true. */
10453 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10454 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10455 return omit_one_operand (type, integer_one_node, arg0);
10457 return NULL_TREE;
10459 case EQ_EXPR:
10460 case NE_EXPR:
10461 tem = fold_comparison (code, type, op0, op1);
10462 if (tem != NULL_TREE)
10463 return tem;
10465 /* bool_var != 0 becomes bool_var. */
10466 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10467 && code == NE_EXPR)
10468 return non_lvalue (fold_convert (type, arg0));
10470 /* bool_var == 1 becomes bool_var. */
10471 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10472 && code == EQ_EXPR)
10473 return non_lvalue (fold_convert (type, arg0));
10475 /* bool_var != 1 becomes !bool_var. */
10476 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10477 && code == NE_EXPR)
10478 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10480 /* bool_var == 0 becomes !bool_var. */
10481 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10482 && code == EQ_EXPR)
10483 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10485 /* If this is an equality comparison of the address of a non-weak
10486 object against zero, then we know the result. */
10487 if (TREE_CODE (arg0) == ADDR_EXPR
10488 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10489 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10490 && integer_zerop (arg1))
10491 return constant_boolean_node (code != EQ_EXPR, type);
10493 /* If this is an equality comparison of the address of two non-weak,
10494 unaliased symbols neither of which are extern (since we do not
10495 have access to attributes for externs), then we know the result. */
10496 if (TREE_CODE (arg0) == ADDR_EXPR
10497 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10498 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10499 && ! lookup_attribute ("alias",
10500 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10501 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10502 && TREE_CODE (arg1) == ADDR_EXPR
10503 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10504 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10505 && ! lookup_attribute ("alias",
10506 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10507 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10509 /* We know that we're looking at the address of two
10510 non-weak, unaliased, static _DECL nodes.
10512 It is both wasteful and incorrect to call operand_equal_p
10513 to compare the two ADDR_EXPR nodes. It is wasteful in that
10514 all we need to do is test pointer equality for the arguments
10515 to the two ADDR_EXPR nodes. It is incorrect to use
10516 operand_equal_p as that function is NOT equivalent to a
10517 C equality test. It can in fact return false for two
10518 objects which would test as equal using the C equality
10519 operator. */
10520 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10521 return constant_boolean_node (equal
10522 ? code == EQ_EXPR : code != EQ_EXPR,
10523 type);
10526 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10527 a MINUS_EXPR of a constant, we can convert it into a comparison with
10528 a revised constant as long as no overflow occurs. */
10529 if (TREE_CODE (arg1) == INTEGER_CST
10530 && (TREE_CODE (arg0) == PLUS_EXPR
10531 || TREE_CODE (arg0) == MINUS_EXPR)
10532 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10533 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10534 ? MINUS_EXPR : PLUS_EXPR,
10535 fold_convert (TREE_TYPE (arg0), arg1),
10536 TREE_OPERAND (arg0, 1), 0))
10537 && ! TREE_CONSTANT_OVERFLOW (tem))
10538 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10540 /* Similarly for a NEGATE_EXPR. */
10541 if (TREE_CODE (arg0) == NEGATE_EXPR
10542 && TREE_CODE (arg1) == INTEGER_CST
10543 && 0 != (tem = negate_expr (arg1))
10544 && TREE_CODE (tem) == INTEGER_CST
10545 && ! TREE_CONSTANT_OVERFLOW (tem))
10546 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10548 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10549 for !=. Don't do this for ordered comparisons due to overflow. */
10550 if (TREE_CODE (arg0) == MINUS_EXPR
10551 && integer_zerop (arg1))
10552 return fold_build2 (code, type,
10553 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10555 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10556 if (TREE_CODE (arg0) == ABS_EXPR
10557 && (integer_zerop (arg1) || real_zerop (arg1)))
10558 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10560 /* If this is an EQ or NE comparison with zero and ARG0 is
10561 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10562 two operations, but the latter can be done in one less insn
10563 on machines that have only two-operand insns or on which a
10564 constant cannot be the first operand. */
10565 if (TREE_CODE (arg0) == BIT_AND_EXPR
10566 && integer_zerop (arg1))
10568 tree arg00 = TREE_OPERAND (arg0, 0);
10569 tree arg01 = TREE_OPERAND (arg0, 1);
10570 if (TREE_CODE (arg00) == LSHIFT_EXPR
10571 && integer_onep (TREE_OPERAND (arg00, 0)))
10572 return
10573 fold_build2 (code, type,
10574 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10575 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10576 arg01, TREE_OPERAND (arg00, 1)),
10577 fold_convert (TREE_TYPE (arg0),
10578 integer_one_node)),
10579 arg1);
10580 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10581 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10582 return
10583 fold_build2 (code, type,
10584 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10585 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10586 arg00, TREE_OPERAND (arg01, 1)),
10587 fold_convert (TREE_TYPE (arg0),
10588 integer_one_node)),
10589 arg1);
10592 /* If this is an NE or EQ comparison of zero against the result of a
10593 signed MOD operation whose second operand is a power of 2, make
10594 the MOD operation unsigned since it is simpler and equivalent. */
10595 if (integer_zerop (arg1)
10596 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10597 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10598 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10599 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10600 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10601 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10603 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10604 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10605 fold_convert (newtype,
10606 TREE_OPERAND (arg0, 0)),
10607 fold_convert (newtype,
10608 TREE_OPERAND (arg0, 1)));
10610 return fold_build2 (code, type, newmod,
10611 fold_convert (newtype, arg1));
10614 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10615 C1 is a valid shift constant, and C2 is a power of two, i.e.
10616 a single bit. */
10617 if (TREE_CODE (arg0) == BIT_AND_EXPR
10618 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10619 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10620 == INTEGER_CST
10621 && integer_pow2p (TREE_OPERAND (arg0, 1))
10622 && integer_zerop (arg1))
10624 tree itype = TREE_TYPE (arg0);
10625 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10626 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10628 /* Check for a valid shift count. */
10629 if (TREE_INT_CST_HIGH (arg001) == 0
10630 && TREE_INT_CST_LOW (arg001) < prec)
10632 tree arg01 = TREE_OPERAND (arg0, 1);
10633 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10634 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10635 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10636 can be rewritten as (X & (C2 << C1)) != 0. */
10637 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10639 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10640 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10641 return fold_build2 (code, type, tem, arg1);
10643 /* Otherwise, for signed (arithmetic) shifts,
10644 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10645 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10646 else if (!TYPE_UNSIGNED (itype))
10647 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10648 arg000, build_int_cst (itype, 0));
10649 /* Otherwise, of unsigned (logical) shifts,
10650 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10651 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10652 else
10653 return omit_one_operand (type,
10654 code == EQ_EXPR ? integer_one_node
10655 : integer_zero_node,
10656 arg000);
10660 /* If this is an NE comparison of zero with an AND of one, remove the
10661 comparison since the AND will give the correct value. */
10662 if (code == NE_EXPR
10663 && integer_zerop (arg1)
10664 && TREE_CODE (arg0) == BIT_AND_EXPR
10665 && integer_onep (TREE_OPERAND (arg0, 1)))
10666 return fold_convert (type, arg0);
10668 /* If we have (A & C) == C where C is a power of 2, convert this into
10669 (A & C) != 0. Similarly for NE_EXPR. */
10670 if (TREE_CODE (arg0) == BIT_AND_EXPR
10671 && integer_pow2p (TREE_OPERAND (arg0, 1))
10672 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10673 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10674 arg0, fold_convert (TREE_TYPE (arg0),
10675 integer_zero_node));
10677 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10678 bit, then fold the expression into A < 0 or A >= 0. */
10679 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10680 if (tem)
10681 return tem;
10683 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10684 Similarly for NE_EXPR. */
10685 if (TREE_CODE (arg0) == BIT_AND_EXPR
10686 && TREE_CODE (arg1) == INTEGER_CST
10687 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10689 tree notc = fold_build1 (BIT_NOT_EXPR,
10690 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10691 TREE_OPERAND (arg0, 1));
10692 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10693 arg1, notc);
10694 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10695 if (integer_nonzerop (dandnotc))
10696 return omit_one_operand (type, rslt, arg0);
10699 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10700 Similarly for NE_EXPR. */
10701 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10702 && TREE_CODE (arg1) == INTEGER_CST
10703 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10705 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10706 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10707 TREE_OPERAND (arg0, 1), notd);
10708 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10709 if (integer_nonzerop (candnotd))
10710 return omit_one_operand (type, rslt, arg0);
10713 /* If this is a comparison of a field, we may be able to simplify it. */
10714 if (((TREE_CODE (arg0) == COMPONENT_REF
10715 && lang_hooks.can_use_bit_fields_p ())
10716 || TREE_CODE (arg0) == BIT_FIELD_REF)
10717 /* Handle the constant case even without -O
10718 to make sure the warnings are given. */
10719 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10721 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10722 if (t1)
10723 return t1;
10726 /* Optimize comparisons of strlen vs zero to a compare of the
10727 first character of the string vs zero. To wit,
10728 strlen(ptr) == 0 => *ptr == 0
10729 strlen(ptr) != 0 => *ptr != 0
10730 Other cases should reduce to one of these two (or a constant)
10731 due to the return value of strlen being unsigned. */
10732 if (TREE_CODE (arg0) == CALL_EXPR
10733 && integer_zerop (arg1))
10735 tree fndecl = get_callee_fndecl (arg0);
10736 tree arglist;
10738 if (fndecl
10739 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10740 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10741 && (arglist = TREE_OPERAND (arg0, 1))
10742 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10743 && ! TREE_CHAIN (arglist))
10745 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10746 return fold_build2 (code, type, iref,
10747 build_int_cst (TREE_TYPE (iref), 0));
10751 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10752 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10753 if (TREE_CODE (arg0) == RSHIFT_EXPR
10754 && integer_zerop (arg1)
10755 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10757 tree arg00 = TREE_OPERAND (arg0, 0);
10758 tree arg01 = TREE_OPERAND (arg0, 1);
10759 tree itype = TREE_TYPE (arg00);
10760 if (TREE_INT_CST_HIGH (arg01) == 0
10761 && TREE_INT_CST_LOW (arg01)
10762 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10764 if (TYPE_UNSIGNED (itype))
10766 itype = lang_hooks.types.signed_type (itype);
10767 arg00 = fold_convert (itype, arg00);
10769 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10770 type, arg00, build_int_cst (itype, 0));
10774 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10775 if (integer_zerop (arg1)
10776 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10777 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10778 TREE_OPERAND (arg0, 1));
10780 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10781 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10782 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10783 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10784 build_int_cst (TREE_TYPE (arg1), 0));
10785 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10786 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10787 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10788 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10789 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10790 build_int_cst (TREE_TYPE (arg1), 0));
10792 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10793 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10794 && TREE_CODE (arg1) == INTEGER_CST
10795 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10796 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10797 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10798 TREE_OPERAND (arg0, 1), arg1));
10800 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10801 (X & C) == 0 when C is a single bit. */
10802 if (TREE_CODE (arg0) == BIT_AND_EXPR
10803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10804 && integer_zerop (arg1)
10805 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10807 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10808 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10809 TREE_OPERAND (arg0, 1));
10810 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10811 type, tem, arg1);
10814 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10815 constant C is a power of two, i.e. a single bit. */
10816 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10817 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10818 && integer_zerop (arg1)
10819 && integer_pow2p (TREE_OPERAND (arg0, 1))
10820 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10821 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10823 tree arg00 = TREE_OPERAND (arg0, 0);
10824 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10825 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10828 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10829 when is C is a power of two, i.e. a single bit. */
10830 if (TREE_CODE (arg0) == BIT_AND_EXPR
10831 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10832 && integer_zerop (arg1)
10833 && integer_pow2p (TREE_OPERAND (arg0, 1))
10834 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10835 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10837 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10838 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10839 arg000, TREE_OPERAND (arg0, 1));
10840 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10841 tem, build_int_cst (TREE_TYPE (tem), 0));
10844 if (integer_zerop (arg1)
10845 && tree_expr_nonzero_p (arg0))
10847 tree res = constant_boolean_node (code==NE_EXPR, type);
10848 return omit_one_operand (type, res, arg0);
10851 /* Fold -X op -Y as X op Y, where op is eq/ne. */
10852 if (TREE_CODE (arg0) == NEGATE_EXPR
10853 && TREE_CODE (arg1) == NEGATE_EXPR)
10854 return fold_build2 (code, type,
10855 TREE_OPERAND (arg0, 0),
10856 TREE_OPERAND (arg1, 0));
10858 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10859 if (TREE_CODE (arg0) == BIT_AND_EXPR
10860 && TREE_CODE (arg1) == BIT_AND_EXPR)
10862 tree arg00 = TREE_OPERAND (arg0, 0);
10863 tree arg01 = TREE_OPERAND (arg0, 1);
10864 tree arg10 = TREE_OPERAND (arg1, 0);
10865 tree arg11 = TREE_OPERAND (arg1, 1);
10866 tree itype = TREE_TYPE (arg0);
10868 if (operand_equal_p (arg01, arg11, 0))
10869 return fold_build2 (code, type,
10870 fold_build2 (BIT_AND_EXPR, itype,
10871 fold_build2 (BIT_XOR_EXPR, itype,
10872 arg00, arg10),
10873 arg01),
10874 build_int_cst (itype, 0));
10876 if (operand_equal_p (arg01, arg10, 0))
10877 return fold_build2 (code, type,
10878 fold_build2 (BIT_AND_EXPR, itype,
10879 fold_build2 (BIT_XOR_EXPR, itype,
10880 arg00, arg11),
10881 arg01),
10882 build_int_cst (itype, 0));
10884 if (operand_equal_p (arg00, arg11, 0))
10885 return fold_build2 (code, type,
10886 fold_build2 (BIT_AND_EXPR, itype,
10887 fold_build2 (BIT_XOR_EXPR, itype,
10888 arg01, arg10),
10889 arg00),
10890 build_int_cst (itype, 0));
10892 if (operand_equal_p (arg00, arg10, 0))
10893 return fold_build2 (code, type,
10894 fold_build2 (BIT_AND_EXPR, itype,
10895 fold_build2 (BIT_XOR_EXPR, itype,
10896 arg01, arg11),
10897 arg00),
10898 build_int_cst (itype, 0));
10901 return NULL_TREE;
10903 case LT_EXPR:
10904 case GT_EXPR:
10905 case LE_EXPR:
10906 case GE_EXPR:
10907 tem = fold_comparison (code, type, op0, op1);
10908 if (tem != NULL_TREE)
10909 return tem;
10911 /* Transform comparisons of the form X +- C CMP X. */
10912 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10913 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10914 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10915 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10916 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10917 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10918 && !(flag_wrapv || flag_trapv))))
10920 tree arg01 = TREE_OPERAND (arg0, 1);
10921 enum tree_code code0 = TREE_CODE (arg0);
10922 int is_positive;
10924 if (TREE_CODE (arg01) == REAL_CST)
10925 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10926 else
10927 is_positive = tree_int_cst_sgn (arg01);
10929 /* (X - c) > X becomes false. */
10930 if (code == GT_EXPR
10931 && ((code0 == MINUS_EXPR && is_positive >= 0)
10932 || (code0 == PLUS_EXPR && is_positive <= 0)))
10933 return constant_boolean_node (0, type);
10935 /* Likewise (X + c) < X becomes false. */
10936 if (code == LT_EXPR
10937 && ((code0 == PLUS_EXPR && is_positive >= 0)
10938 || (code0 == MINUS_EXPR && is_positive <= 0)))
10939 return constant_boolean_node (0, type);
10941 /* Convert (X - c) <= X to true. */
10942 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10943 && code == LE_EXPR
10944 && ((code0 == MINUS_EXPR && is_positive >= 0)
10945 || (code0 == PLUS_EXPR && is_positive <= 0)))
10946 return constant_boolean_node (1, type);
10948 /* Convert (X + c) >= X to true. */
10949 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10950 && code == GE_EXPR
10951 && ((code0 == PLUS_EXPR && is_positive >= 0)
10952 || (code0 == MINUS_EXPR && is_positive <= 0)))
10953 return constant_boolean_node (1, type);
10955 if (TREE_CODE (arg01) == INTEGER_CST)
10957 /* Convert X + c > X and X - c < X to true for integers. */
10958 if (code == GT_EXPR
10959 && ((code0 == PLUS_EXPR && is_positive > 0)
10960 || (code0 == MINUS_EXPR && is_positive < 0)))
10961 return constant_boolean_node (1, type);
10963 if (code == LT_EXPR
10964 && ((code0 == MINUS_EXPR && is_positive > 0)
10965 || (code0 == PLUS_EXPR && is_positive < 0)))
10966 return constant_boolean_node (1, type);
10968 /* Convert X + c <= X and X - c >= X to false for integers. */
10969 if (code == LE_EXPR
10970 && ((code0 == PLUS_EXPR && is_positive > 0)
10971 || (code0 == MINUS_EXPR && is_positive < 0)))
10972 return constant_boolean_node (0, type);
10974 if (code == GE_EXPR
10975 && ((code0 == MINUS_EXPR && is_positive > 0)
10976 || (code0 == PLUS_EXPR && is_positive < 0)))
10977 return constant_boolean_node (0, type);
10981 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10982 This transformation affects the cases which are handled in later
10983 optimizations involving comparisons with non-negative constants. */
10984 if (TREE_CODE (arg1) == INTEGER_CST
10985 && TREE_CODE (arg0) != INTEGER_CST
10986 && tree_int_cst_sgn (arg1) > 0)
10988 if (code == GE_EXPR)
10990 arg1 = const_binop (MINUS_EXPR, arg1,
10991 build_int_cst (TREE_TYPE (arg1), 1), 0);
10992 return fold_build2 (GT_EXPR, type, arg0,
10993 fold_convert (TREE_TYPE (arg0), arg1));
10995 if (code == LT_EXPR)
10997 arg1 = const_binop (MINUS_EXPR, arg1,
10998 build_int_cst (TREE_TYPE (arg1), 1), 0);
10999 return fold_build2 (LE_EXPR, type, arg0,
11000 fold_convert (TREE_TYPE (arg0), arg1));
11004 /* Comparisons with the highest or lowest possible integer of
11005 the specified precision will have known values. */
11007 tree arg1_type = TREE_TYPE (arg1);
11008 unsigned int width = TYPE_PRECISION (arg1_type);
11010 if (TREE_CODE (arg1) == INTEGER_CST
11011 && ! TREE_CONSTANT_OVERFLOW (arg1)
11012 && width <= 2 * HOST_BITS_PER_WIDE_INT
11013 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11015 HOST_WIDE_INT signed_max_hi;
11016 unsigned HOST_WIDE_INT signed_max_lo;
11017 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11019 if (width <= HOST_BITS_PER_WIDE_INT)
11021 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11022 - 1;
11023 signed_max_hi = 0;
11024 max_hi = 0;
11026 if (TYPE_UNSIGNED (arg1_type))
11028 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11029 min_lo = 0;
11030 min_hi = 0;
11032 else
11034 max_lo = signed_max_lo;
11035 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11036 min_hi = -1;
11039 else
11041 width -= HOST_BITS_PER_WIDE_INT;
11042 signed_max_lo = -1;
11043 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11044 - 1;
11045 max_lo = -1;
11046 min_lo = 0;
11048 if (TYPE_UNSIGNED (arg1_type))
11050 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11051 min_hi = 0;
11053 else
11055 max_hi = signed_max_hi;
11056 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11060 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11061 && TREE_INT_CST_LOW (arg1) == max_lo)
11062 switch (code)
11064 case GT_EXPR:
11065 return omit_one_operand (type, integer_zero_node, arg0);
11067 case GE_EXPR:
11068 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11070 case LE_EXPR:
11071 return omit_one_operand (type, integer_one_node, arg0);
11073 case LT_EXPR:
11074 return fold_build2 (NE_EXPR, type, arg0, arg1);
11076 /* The GE_EXPR and LT_EXPR cases above are not normally
11077 reached because of previous transformations. */
11079 default:
11080 break;
11082 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11083 == max_hi
11084 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11085 switch (code)
11087 case GT_EXPR:
11088 arg1 = const_binop (PLUS_EXPR, arg1,
11089 build_int_cst (TREE_TYPE (arg1), 1), 0);
11090 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11091 case LE_EXPR:
11092 arg1 = const_binop (PLUS_EXPR, arg1,
11093 build_int_cst (TREE_TYPE (arg1), 1), 0);
11094 return fold_build2 (NE_EXPR, type, arg0, arg1);
11095 default:
11096 break;
11098 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11099 == min_hi
11100 && TREE_INT_CST_LOW (arg1) == min_lo)
11101 switch (code)
11103 case LT_EXPR:
11104 return omit_one_operand (type, integer_zero_node, arg0);
11106 case LE_EXPR:
11107 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11109 case GE_EXPR:
11110 return omit_one_operand (type, integer_one_node, arg0);
11112 case GT_EXPR:
11113 return fold_build2 (NE_EXPR, type, op0, op1);
11115 default:
11116 break;
11118 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11119 == min_hi
11120 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11121 switch (code)
11123 case GE_EXPR:
11124 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11125 return fold_build2 (NE_EXPR, type, arg0, arg1);
11126 case LT_EXPR:
11127 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11128 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11129 default:
11130 break;
11133 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11134 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11135 && TYPE_UNSIGNED (arg1_type)
11136 /* We will flip the signedness of the comparison operator
11137 associated with the mode of arg1, so the sign bit is
11138 specified by this mode. Check that arg1 is the signed
11139 max associated with this sign bit. */
11140 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11141 /* signed_type does not work on pointer types. */
11142 && INTEGRAL_TYPE_P (arg1_type))
11144 /* The following case also applies to X < signed_max+1
11145 and X >= signed_max+1 because previous transformations. */
11146 if (code == LE_EXPR || code == GT_EXPR)
11148 tree st0, st1;
11149 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11150 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11151 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11152 type, fold_convert (st0, arg0),
11153 build_int_cst (st1, 0));
11159 /* If we are comparing an ABS_EXPR with a constant, we can
11160 convert all the cases into explicit comparisons, but they may
11161 well not be faster than doing the ABS and one comparison.
11162 But ABS (X) <= C is a range comparison, which becomes a subtraction
11163 and a comparison, and is probably faster. */
11164 if (code == LE_EXPR
11165 && TREE_CODE (arg1) == INTEGER_CST
11166 && TREE_CODE (arg0) == ABS_EXPR
11167 && ! TREE_SIDE_EFFECTS (arg0)
11168 && (0 != (tem = negate_expr (arg1)))
11169 && TREE_CODE (tem) == INTEGER_CST
11170 && ! TREE_CONSTANT_OVERFLOW (tem))
11171 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11172 build2 (GE_EXPR, type,
11173 TREE_OPERAND (arg0, 0), tem),
11174 build2 (LE_EXPR, type,
11175 TREE_OPERAND (arg0, 0), arg1));
11177 /* Convert ABS_EXPR<x> >= 0 to true. */
11178 if (code == GE_EXPR
11179 && tree_expr_nonnegative_p (arg0)
11180 && (integer_zerop (arg1)
11181 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11182 && real_zerop (arg1))))
11183 return omit_one_operand (type, integer_one_node, arg0);
11185 /* Convert ABS_EXPR<x> < 0 to false. */
11186 if (code == LT_EXPR
11187 && tree_expr_nonnegative_p (arg0)
11188 && (integer_zerop (arg1) || real_zerop (arg1)))
11189 return omit_one_operand (type, integer_zero_node, arg0);
11191 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11192 and similarly for >= into !=. */
11193 if ((code == LT_EXPR || code == GE_EXPR)
11194 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11195 && TREE_CODE (arg1) == LSHIFT_EXPR
11196 && integer_onep (TREE_OPERAND (arg1, 0)))
11197 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11198 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11199 TREE_OPERAND (arg1, 1)),
11200 build_int_cst (TREE_TYPE (arg0), 0));
11202 if ((code == LT_EXPR || code == GE_EXPR)
11203 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11204 && (TREE_CODE (arg1) == NOP_EXPR
11205 || TREE_CODE (arg1) == CONVERT_EXPR)
11206 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11207 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11208 return
11209 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11210 fold_convert (TREE_TYPE (arg0),
11211 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11212 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11213 1))),
11214 build_int_cst (TREE_TYPE (arg0), 0));
11216 return NULL_TREE;
11218 case UNORDERED_EXPR:
11219 case ORDERED_EXPR:
11220 case UNLT_EXPR:
11221 case UNLE_EXPR:
11222 case UNGT_EXPR:
11223 case UNGE_EXPR:
11224 case UNEQ_EXPR:
11225 case LTGT_EXPR:
11226 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11228 t1 = fold_relational_const (code, type, arg0, arg1);
11229 if (t1 != NULL_TREE)
11230 return t1;
11233 /* If the first operand is NaN, the result is constant. */
11234 if (TREE_CODE (arg0) == REAL_CST
11235 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11236 && (code != LTGT_EXPR || ! flag_trapping_math))
11238 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11239 ? integer_zero_node
11240 : integer_one_node;
11241 return omit_one_operand (type, t1, arg1);
11244 /* If the second operand is NaN, the result is constant. */
11245 if (TREE_CODE (arg1) == REAL_CST
11246 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11247 && (code != LTGT_EXPR || ! flag_trapping_math))
11249 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11250 ? integer_zero_node
11251 : integer_one_node;
11252 return omit_one_operand (type, t1, arg0);
11255 /* Simplify unordered comparison of something with itself. */
11256 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11257 && operand_equal_p (arg0, arg1, 0))
11258 return constant_boolean_node (1, type);
11260 if (code == LTGT_EXPR
11261 && !flag_trapping_math
11262 && operand_equal_p (arg0, arg1, 0))
11263 return constant_boolean_node (0, type);
11265 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11267 tree targ0 = strip_float_extensions (arg0);
11268 tree targ1 = strip_float_extensions (arg1);
11269 tree newtype = TREE_TYPE (targ0);
11271 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11272 newtype = TREE_TYPE (targ1);
11274 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11275 return fold_build2 (code, type, fold_convert (newtype, targ0),
11276 fold_convert (newtype, targ1));
11279 return NULL_TREE;
11281 case COMPOUND_EXPR:
11282 /* When pedantic, a compound expression can be neither an lvalue
11283 nor an integer constant expression. */
11284 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11285 return NULL_TREE;
11286 /* Don't let (0, 0) be null pointer constant. */
11287 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11288 : fold_convert (type, arg1);
11289 return pedantic_non_lvalue (tem);
11291 case COMPLEX_EXPR:
11292 if ((TREE_CODE (arg0) == REAL_CST
11293 && TREE_CODE (arg1) == REAL_CST)
11294 || (TREE_CODE (arg0) == INTEGER_CST
11295 && TREE_CODE (arg1) == INTEGER_CST))
11296 return build_complex (type, arg0, arg1);
11297 return NULL_TREE;
11299 case ASSERT_EXPR:
11300 /* An ASSERT_EXPR should never be passed to fold_binary. */
11301 gcc_unreachable ();
11303 default:
11304 return NULL_TREE;
11305 } /* switch (code) */
11308 /* Callback for walk_tree, looking for LABEL_EXPR.
11309 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11310 Do not check the sub-tree of GOTO_EXPR. */
11312 static tree
11313 contains_label_1 (tree *tp,
11314 int *walk_subtrees,
11315 void *data ATTRIBUTE_UNUSED)
11317 switch (TREE_CODE (*tp))
11319 case LABEL_EXPR:
11320 return *tp;
11321 case GOTO_EXPR:
11322 *walk_subtrees = 0;
11323 /* no break */
11324 default:
11325 return NULL_TREE;
11329 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11330 accessible from outside the sub-tree. Returns NULL_TREE if no
11331 addressable label is found. */
11333 static bool
11334 contains_label_p (tree st)
11336 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11339 /* Fold a ternary expression of code CODE and type TYPE with operands
11340 OP0, OP1, and OP2. Return the folded expression if folding is
11341 successful. Otherwise, return NULL_TREE. */
11343 tree
11344 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11346 tree tem;
11347 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11348 enum tree_code_class kind = TREE_CODE_CLASS (code);
11350 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11351 && TREE_CODE_LENGTH (code) == 3);
11353 /* Strip any conversions that don't change the mode. This is safe
11354 for every expression, except for a comparison expression because
11355 its signedness is derived from its operands. So, in the latter
11356 case, only strip conversions that don't change the signedness.
11358 Note that this is done as an internal manipulation within the
11359 constant folder, in order to find the simplest representation of
11360 the arguments so that their form can be studied. In any cases,
11361 the appropriate type conversions should be put back in the tree
11362 that will get out of the constant folder. */
11363 if (op0)
11365 arg0 = op0;
11366 STRIP_NOPS (arg0);
11369 if (op1)
11371 arg1 = op1;
11372 STRIP_NOPS (arg1);
11375 switch (code)
11377 case COMPONENT_REF:
11378 if (TREE_CODE (arg0) == CONSTRUCTOR
11379 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11381 unsigned HOST_WIDE_INT idx;
11382 tree field, value;
11383 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11384 if (field == arg1)
11385 return value;
11387 return NULL_TREE;
11389 case COND_EXPR:
11390 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11391 so all simple results must be passed through pedantic_non_lvalue. */
11392 if (TREE_CODE (arg0) == INTEGER_CST)
11394 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11395 tem = integer_zerop (arg0) ? op2 : op1;
11396 /* Only optimize constant conditions when the selected branch
11397 has the same type as the COND_EXPR. This avoids optimizing
11398 away "c ? x : throw", where the throw has a void type.
11399 Avoid throwing away that operand which contains label. */
11400 if ((!TREE_SIDE_EFFECTS (unused_op)
11401 || !contains_label_p (unused_op))
11402 && (! VOID_TYPE_P (TREE_TYPE (tem))
11403 || VOID_TYPE_P (type)))
11404 return pedantic_non_lvalue (tem);
11405 return NULL_TREE;
11407 if (operand_equal_p (arg1, op2, 0))
11408 return pedantic_omit_one_operand (type, arg1, arg0);
11410 /* If we have A op B ? A : C, we may be able to convert this to a
11411 simpler expression, depending on the operation and the values
11412 of B and C. Signed zeros prevent all of these transformations,
11413 for reasons given above each one.
11415 Also try swapping the arguments and inverting the conditional. */
11416 if (COMPARISON_CLASS_P (arg0)
11417 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11418 arg1, TREE_OPERAND (arg0, 1))
11419 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11421 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11422 if (tem)
11423 return tem;
11426 if (COMPARISON_CLASS_P (arg0)
11427 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11428 op2,
11429 TREE_OPERAND (arg0, 1))
11430 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11432 tem = fold_truth_not_expr (arg0);
11433 if (tem && COMPARISON_CLASS_P (tem))
11435 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11436 if (tem)
11437 return tem;
11441 /* If the second operand is simpler than the third, swap them
11442 since that produces better jump optimization results. */
11443 if (truth_value_p (TREE_CODE (arg0))
11444 && tree_swap_operands_p (op1, op2, false))
11446 /* See if this can be inverted. If it can't, possibly because
11447 it was a floating-point inequality comparison, don't do
11448 anything. */
11449 tem = fold_truth_not_expr (arg0);
11450 if (tem)
11451 return fold_build3 (code, type, tem, op2, op1);
11454 /* Convert A ? 1 : 0 to simply A. */
11455 if (integer_onep (op1)
11456 && integer_zerop (op2)
11457 /* If we try to convert OP0 to our type, the
11458 call to fold will try to move the conversion inside
11459 a COND, which will recurse. In that case, the COND_EXPR
11460 is probably the best choice, so leave it alone. */
11461 && type == TREE_TYPE (arg0))
11462 return pedantic_non_lvalue (arg0);
11464 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11465 over COND_EXPR in cases such as floating point comparisons. */
11466 if (integer_zerop (op1)
11467 && integer_onep (op2)
11468 && truth_value_p (TREE_CODE (arg0)))
11469 return pedantic_non_lvalue (fold_convert (type,
11470 invert_truthvalue (arg0)));
11472 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11473 if (TREE_CODE (arg0) == LT_EXPR
11474 && integer_zerop (TREE_OPERAND (arg0, 1))
11475 && integer_zerop (op2)
11476 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11478 /* sign_bit_p only checks ARG1 bits within A's precision.
11479 If <sign bit of A> has wider type than A, bits outside
11480 of A's precision in <sign bit of A> need to be checked.
11481 If they are all 0, this optimization needs to be done
11482 in unsigned A's type, if they are all 1 in signed A's type,
11483 otherwise this can't be done. */
11484 if (TYPE_PRECISION (TREE_TYPE (tem))
11485 < TYPE_PRECISION (TREE_TYPE (arg1))
11486 && TYPE_PRECISION (TREE_TYPE (tem))
11487 < TYPE_PRECISION (type))
11489 unsigned HOST_WIDE_INT mask_lo;
11490 HOST_WIDE_INT mask_hi;
11491 int inner_width, outer_width;
11492 tree tem_type;
11494 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11495 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11496 if (outer_width > TYPE_PRECISION (type))
11497 outer_width = TYPE_PRECISION (type);
11499 if (outer_width > HOST_BITS_PER_WIDE_INT)
11501 mask_hi = ((unsigned HOST_WIDE_INT) -1
11502 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11503 mask_lo = -1;
11505 else
11507 mask_hi = 0;
11508 mask_lo = ((unsigned HOST_WIDE_INT) -1
11509 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11511 if (inner_width > HOST_BITS_PER_WIDE_INT)
11513 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11514 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11515 mask_lo = 0;
11517 else
11518 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11519 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11521 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11522 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11524 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11525 tem = fold_convert (tem_type, tem);
11527 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11528 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11530 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11531 tem = fold_convert (tem_type, tem);
11533 else
11534 tem = NULL;
11537 if (tem)
11538 return fold_convert (type,
11539 fold_build2 (BIT_AND_EXPR,
11540 TREE_TYPE (tem), tem,
11541 fold_convert (TREE_TYPE (tem),
11542 arg1)));
11545 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11546 already handled above. */
11547 if (TREE_CODE (arg0) == BIT_AND_EXPR
11548 && integer_onep (TREE_OPERAND (arg0, 1))
11549 && integer_zerop (op2)
11550 && integer_pow2p (arg1))
11552 tree tem = TREE_OPERAND (arg0, 0);
11553 STRIP_NOPS (tem);
11554 if (TREE_CODE (tem) == RSHIFT_EXPR
11555 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11556 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11557 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11558 return fold_build2 (BIT_AND_EXPR, type,
11559 TREE_OPERAND (tem, 0), arg1);
11562 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11563 is probably obsolete because the first operand should be a
11564 truth value (that's why we have the two cases above), but let's
11565 leave it in until we can confirm this for all front-ends. */
11566 if (integer_zerop (op2)
11567 && TREE_CODE (arg0) == NE_EXPR
11568 && integer_zerop (TREE_OPERAND (arg0, 1))
11569 && integer_pow2p (arg1)
11570 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11571 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11572 arg1, OEP_ONLY_CONST))
11573 return pedantic_non_lvalue (fold_convert (type,
11574 TREE_OPERAND (arg0, 0)));
11576 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11577 if (integer_zerop (op2)
11578 && truth_value_p (TREE_CODE (arg0))
11579 && truth_value_p (TREE_CODE (arg1)))
11580 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11581 fold_convert (type, arg0),
11582 arg1);
11584 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11585 if (integer_onep (op2)
11586 && truth_value_p (TREE_CODE (arg0))
11587 && truth_value_p (TREE_CODE (arg1)))
11589 /* Only perform transformation if ARG0 is easily inverted. */
11590 tem = fold_truth_not_expr (arg0);
11591 if (tem)
11592 return fold_build2 (TRUTH_ORIF_EXPR, type,
11593 fold_convert (type, tem),
11594 arg1);
11597 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11598 if (integer_zerop (arg1)
11599 && truth_value_p (TREE_CODE (arg0))
11600 && truth_value_p (TREE_CODE (op2)))
11602 /* Only perform transformation if ARG0 is easily inverted. */
11603 tem = fold_truth_not_expr (arg0);
11604 if (tem)
11605 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11606 fold_convert (type, tem),
11607 op2);
11610 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11611 if (integer_onep (arg1)
11612 && truth_value_p (TREE_CODE (arg0))
11613 && truth_value_p (TREE_CODE (op2)))
11614 return fold_build2 (TRUTH_ORIF_EXPR, type,
11615 fold_convert (type, arg0),
11616 op2);
11618 return NULL_TREE;
11620 case CALL_EXPR:
11621 /* Check for a built-in function. */
11622 if (TREE_CODE (op0) == ADDR_EXPR
11623 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11624 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11625 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11626 return NULL_TREE;
11628 case BIT_FIELD_REF:
11629 if (TREE_CODE (arg0) == VECTOR_CST
11630 && type == TREE_TYPE (TREE_TYPE (arg0))
11631 && host_integerp (arg1, 1)
11632 && host_integerp (op2, 1))
11634 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11635 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11637 if (width != 0
11638 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11639 && (idx % width) == 0
11640 && (idx = idx / width)
11641 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11643 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11644 while (idx-- > 0 && elements)
11645 elements = TREE_CHAIN (elements);
11646 if (elements)
11647 return TREE_VALUE (elements);
11648 else
11649 return fold_convert (type, integer_zero_node);
11652 return NULL_TREE;
11654 default:
11655 return NULL_TREE;
11656 } /* switch (code) */
11659 /* Perform constant folding and related simplification of EXPR.
11660 The related simplifications include x*1 => x, x*0 => 0, etc.,
11661 and application of the associative law.
11662 NOP_EXPR conversions may be removed freely (as long as we
11663 are careful not to change the type of the overall expression).
11664 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11665 but we can constant-fold them if they have constant operands. */
11667 #ifdef ENABLE_FOLD_CHECKING
11668 # define fold(x) fold_1 (x)
11669 static tree fold_1 (tree);
11670 static
11671 #endif
11672 tree
11673 fold (tree expr)
11675 const tree t = expr;
11676 enum tree_code code = TREE_CODE (t);
11677 enum tree_code_class kind = TREE_CODE_CLASS (code);
11678 tree tem;
11680 /* Return right away if a constant. */
11681 if (kind == tcc_constant)
11682 return t;
11684 if (IS_EXPR_CODE_CLASS (kind)
11685 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11687 tree type = TREE_TYPE (t);
11688 tree op0, op1, op2;
11690 switch (TREE_CODE_LENGTH (code))
11692 case 1:
11693 op0 = TREE_OPERAND (t, 0);
11694 tem = fold_unary (code, type, op0);
11695 return tem ? tem : expr;
11696 case 2:
11697 op0 = TREE_OPERAND (t, 0);
11698 op1 = TREE_OPERAND (t, 1);
11699 tem = fold_binary (code, type, op0, op1);
11700 return tem ? tem : expr;
11701 case 3:
11702 op0 = TREE_OPERAND (t, 0);
11703 op1 = TREE_OPERAND (t, 1);
11704 op2 = TREE_OPERAND (t, 2);
11705 tem = fold_ternary (code, type, op0, op1, op2);
11706 return tem ? tem : expr;
11707 default:
11708 break;
11712 switch (code)
11714 case CONST_DECL:
11715 return fold (DECL_INITIAL (t));
11717 default:
11718 return t;
11719 } /* switch (code) */
11722 #ifdef ENABLE_FOLD_CHECKING
11723 #undef fold
11725 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11726 static void fold_check_failed (tree, tree);
11727 void print_fold_checksum (tree);
11729 /* When --enable-checking=fold, compute a digest of expr before
11730 and after actual fold call to see if fold did not accidentally
11731 change original expr. */
11733 tree
11734 fold (tree expr)
11736 tree ret;
11737 struct md5_ctx ctx;
11738 unsigned char checksum_before[16], checksum_after[16];
11739 htab_t ht;
11741 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11742 md5_init_ctx (&ctx);
11743 fold_checksum_tree (expr, &ctx, ht);
11744 md5_finish_ctx (&ctx, checksum_before);
11745 htab_empty (ht);
11747 ret = fold_1 (expr);
11749 md5_init_ctx (&ctx);
11750 fold_checksum_tree (expr, &ctx, ht);
11751 md5_finish_ctx (&ctx, checksum_after);
11752 htab_delete (ht);
11754 if (memcmp (checksum_before, checksum_after, 16))
11755 fold_check_failed (expr, ret);
11757 return ret;
11760 void
11761 print_fold_checksum (tree expr)
11763 struct md5_ctx ctx;
11764 unsigned char checksum[16], cnt;
11765 htab_t ht;
11767 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11768 md5_init_ctx (&ctx);
11769 fold_checksum_tree (expr, &ctx, ht);
11770 md5_finish_ctx (&ctx, checksum);
11771 htab_delete (ht);
11772 for (cnt = 0; cnt < 16; ++cnt)
11773 fprintf (stderr, "%02x", checksum[cnt]);
11774 putc ('\n', stderr);
11777 static void
11778 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11780 internal_error ("fold check: original tree changed by fold");
11783 static void
11784 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11786 void **slot;
11787 enum tree_code code;
11788 struct tree_function_decl buf;
11789 int i, len;
11791 recursive_label:
11793 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11794 <= sizeof (struct tree_function_decl))
11795 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11796 if (expr == NULL)
11797 return;
11798 slot = htab_find_slot (ht, expr, INSERT);
11799 if (*slot != NULL)
11800 return;
11801 *slot = expr;
11802 code = TREE_CODE (expr);
11803 if (TREE_CODE_CLASS (code) == tcc_declaration
11804 && DECL_ASSEMBLER_NAME_SET_P (expr))
11806 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11807 memcpy ((char *) &buf, expr, tree_size (expr));
11808 expr = (tree) &buf;
11809 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11811 else if (TREE_CODE_CLASS (code) == tcc_type
11812 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11813 || TYPE_CACHED_VALUES_P (expr)
11814 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11816 /* Allow these fields to be modified. */
11817 memcpy ((char *) &buf, expr, tree_size (expr));
11818 expr = (tree) &buf;
11819 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11820 TYPE_POINTER_TO (expr) = NULL;
11821 TYPE_REFERENCE_TO (expr) = NULL;
11822 if (TYPE_CACHED_VALUES_P (expr))
11824 TYPE_CACHED_VALUES_P (expr) = 0;
11825 TYPE_CACHED_VALUES (expr) = NULL;
11828 md5_process_bytes (expr, tree_size (expr), ctx);
11829 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11830 if (TREE_CODE_CLASS (code) != tcc_type
11831 && TREE_CODE_CLASS (code) != tcc_declaration
11832 && code != TREE_LIST)
11833 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11834 switch (TREE_CODE_CLASS (code))
11836 case tcc_constant:
11837 switch (code)
11839 case STRING_CST:
11840 md5_process_bytes (TREE_STRING_POINTER (expr),
11841 TREE_STRING_LENGTH (expr), ctx);
11842 break;
11843 case COMPLEX_CST:
11844 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11845 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11846 break;
11847 case VECTOR_CST:
11848 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11849 break;
11850 default:
11851 break;
11853 break;
11854 case tcc_exceptional:
11855 switch (code)
11857 case TREE_LIST:
11858 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11859 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11860 expr = TREE_CHAIN (expr);
11861 goto recursive_label;
11862 break;
11863 case TREE_VEC:
11864 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11865 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11866 break;
11867 default:
11868 break;
11870 break;
11871 case tcc_expression:
11872 case tcc_reference:
11873 case tcc_comparison:
11874 case tcc_unary:
11875 case tcc_binary:
11876 case tcc_statement:
11877 len = TREE_CODE_LENGTH (code);
11878 for (i = 0; i < len; ++i)
11879 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11880 break;
11881 case tcc_declaration:
11882 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11883 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11884 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11886 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11887 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11888 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11889 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11890 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11892 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11893 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11895 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11897 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11898 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11899 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11901 break;
11902 case tcc_type:
11903 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11904 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11905 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11906 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11907 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11908 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11909 if (INTEGRAL_TYPE_P (expr)
11910 || SCALAR_FLOAT_TYPE_P (expr))
11912 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11913 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11915 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11916 if (TREE_CODE (expr) == RECORD_TYPE
11917 || TREE_CODE (expr) == UNION_TYPE
11918 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11919 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11920 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11921 break;
11922 default:
11923 break;
11927 #endif
11929 /* Fold a unary tree expression with code CODE of type TYPE with an
11930 operand OP0. Return a folded expression if successful. Otherwise,
11931 return a tree expression with code CODE of type TYPE with an
11932 operand OP0. */
11934 tree
11935 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11937 tree tem;
11938 #ifdef ENABLE_FOLD_CHECKING
11939 unsigned char checksum_before[16], checksum_after[16];
11940 struct md5_ctx ctx;
11941 htab_t ht;
11943 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11944 md5_init_ctx (&ctx);
11945 fold_checksum_tree (op0, &ctx, ht);
11946 md5_finish_ctx (&ctx, checksum_before);
11947 htab_empty (ht);
11948 #endif
11950 tem = fold_unary (code, type, op0);
11951 if (!tem)
11952 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11954 #ifdef ENABLE_FOLD_CHECKING
11955 md5_init_ctx (&ctx);
11956 fold_checksum_tree (op0, &ctx, ht);
11957 md5_finish_ctx (&ctx, checksum_after);
11958 htab_delete (ht);
11960 if (memcmp (checksum_before, checksum_after, 16))
11961 fold_check_failed (op0, tem);
11962 #endif
11963 return tem;
11966 /* Fold a binary tree expression with code CODE of type TYPE with
11967 operands OP0 and OP1. Return a folded expression if successful.
11968 Otherwise, return a tree expression with code CODE of type TYPE
11969 with operands OP0 and OP1. */
11971 tree
11972 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11973 MEM_STAT_DECL)
11975 tree tem;
11976 #ifdef ENABLE_FOLD_CHECKING
11977 unsigned char checksum_before_op0[16],
11978 checksum_before_op1[16],
11979 checksum_after_op0[16],
11980 checksum_after_op1[16];
11981 struct md5_ctx ctx;
11982 htab_t ht;
11984 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11985 md5_init_ctx (&ctx);
11986 fold_checksum_tree (op0, &ctx, ht);
11987 md5_finish_ctx (&ctx, checksum_before_op0);
11988 htab_empty (ht);
11990 md5_init_ctx (&ctx);
11991 fold_checksum_tree (op1, &ctx, ht);
11992 md5_finish_ctx (&ctx, checksum_before_op1);
11993 htab_empty (ht);
11994 #endif
11996 tem = fold_binary (code, type, op0, op1);
11997 if (!tem)
11998 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12000 #ifdef ENABLE_FOLD_CHECKING
12001 md5_init_ctx (&ctx);
12002 fold_checksum_tree (op0, &ctx, ht);
12003 md5_finish_ctx (&ctx, checksum_after_op0);
12004 htab_empty (ht);
12006 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12007 fold_check_failed (op0, tem);
12009 md5_init_ctx (&ctx);
12010 fold_checksum_tree (op1, &ctx, ht);
12011 md5_finish_ctx (&ctx, checksum_after_op1);
12012 htab_delete (ht);
12014 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12015 fold_check_failed (op1, tem);
12016 #endif
12017 return tem;
12020 /* Fold a ternary tree expression with code CODE of type TYPE with
12021 operands OP0, OP1, and OP2. Return a folded expression if
12022 successful. Otherwise, return a tree expression with code CODE of
12023 type TYPE with operands OP0, OP1, and OP2. */
12025 tree
12026 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12027 MEM_STAT_DECL)
12029 tree tem;
12030 #ifdef ENABLE_FOLD_CHECKING
12031 unsigned char checksum_before_op0[16],
12032 checksum_before_op1[16],
12033 checksum_before_op2[16],
12034 checksum_after_op0[16],
12035 checksum_after_op1[16],
12036 checksum_after_op2[16];
12037 struct md5_ctx ctx;
12038 htab_t ht;
12040 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12041 md5_init_ctx (&ctx);
12042 fold_checksum_tree (op0, &ctx, ht);
12043 md5_finish_ctx (&ctx, checksum_before_op0);
12044 htab_empty (ht);
12046 md5_init_ctx (&ctx);
12047 fold_checksum_tree (op1, &ctx, ht);
12048 md5_finish_ctx (&ctx, checksum_before_op1);
12049 htab_empty (ht);
12051 md5_init_ctx (&ctx);
12052 fold_checksum_tree (op2, &ctx, ht);
12053 md5_finish_ctx (&ctx, checksum_before_op2);
12054 htab_empty (ht);
12055 #endif
12057 tem = fold_ternary (code, type, op0, op1, op2);
12058 if (!tem)
12059 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12061 #ifdef ENABLE_FOLD_CHECKING
12062 md5_init_ctx (&ctx);
12063 fold_checksum_tree (op0, &ctx, ht);
12064 md5_finish_ctx (&ctx, checksum_after_op0);
12065 htab_empty (ht);
12067 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12068 fold_check_failed (op0, tem);
12070 md5_init_ctx (&ctx);
12071 fold_checksum_tree (op1, &ctx, ht);
12072 md5_finish_ctx (&ctx, checksum_after_op1);
12073 htab_empty (ht);
12075 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12076 fold_check_failed (op1, tem);
12078 md5_init_ctx (&ctx);
12079 fold_checksum_tree (op2, &ctx, ht);
12080 md5_finish_ctx (&ctx, checksum_after_op2);
12081 htab_delete (ht);
12083 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12084 fold_check_failed (op2, tem);
12085 #endif
12086 return tem;
12089 /* Perform constant folding and related simplification of initializer
12090 expression EXPR. These behave identically to "fold_buildN" but ignore
12091 potential run-time traps and exceptions that fold must preserve. */
12093 #define START_FOLD_INIT \
12094 int saved_signaling_nans = flag_signaling_nans;\
12095 int saved_trapping_math = flag_trapping_math;\
12096 int saved_rounding_math = flag_rounding_math;\
12097 int saved_trapv = flag_trapv;\
12098 int saved_folding_initializer = folding_initializer;\
12099 flag_signaling_nans = 0;\
12100 flag_trapping_math = 0;\
12101 flag_rounding_math = 0;\
12102 flag_trapv = 0;\
12103 folding_initializer = 1;
12105 #define END_FOLD_INIT \
12106 flag_signaling_nans = saved_signaling_nans;\
12107 flag_trapping_math = saved_trapping_math;\
12108 flag_rounding_math = saved_rounding_math;\
12109 flag_trapv = saved_trapv;\
12110 folding_initializer = saved_folding_initializer;
12112 tree
12113 fold_build1_initializer (enum tree_code code, tree type, tree op)
12115 tree result;
12116 START_FOLD_INIT;
12118 result = fold_build1 (code, type, op);
12120 END_FOLD_INIT;
12121 return result;
12124 tree
12125 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12127 tree result;
12128 START_FOLD_INIT;
12130 result = fold_build2 (code, type, op0, op1);
12132 END_FOLD_INIT;
12133 return result;
12136 tree
12137 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12138 tree op2)
12140 tree result;
12141 START_FOLD_INIT;
12143 result = fold_build3 (code, type, op0, op1, op2);
12145 END_FOLD_INIT;
12146 return result;
12149 #undef START_FOLD_INIT
12150 #undef END_FOLD_INIT
12152 /* Determine if first argument is a multiple of second argument. Return 0 if
12153 it is not, or we cannot easily determined it to be.
12155 An example of the sort of thing we care about (at this point; this routine
12156 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12157 fold cases do now) is discovering that
12159 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12161 is a multiple of
12163 SAVE_EXPR (J * 8)
12165 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12167 This code also handles discovering that
12169 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12171 is a multiple of 8 so we don't have to worry about dealing with a
12172 possible remainder.
12174 Note that we *look* inside a SAVE_EXPR only to determine how it was
12175 calculated; it is not safe for fold to do much of anything else with the
12176 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12177 at run time. For example, the latter example above *cannot* be implemented
12178 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12179 evaluation time of the original SAVE_EXPR is not necessarily the same at
12180 the time the new expression is evaluated. The only optimization of this
12181 sort that would be valid is changing
12183 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12185 divided by 8 to
12187 SAVE_EXPR (I) * SAVE_EXPR (J)
12189 (where the same SAVE_EXPR (J) is used in the original and the
12190 transformed version). */
12192 static int
12193 multiple_of_p (tree type, tree top, tree bottom)
12195 if (operand_equal_p (top, bottom, 0))
12196 return 1;
12198 if (TREE_CODE (type) != INTEGER_TYPE)
12199 return 0;
12201 switch (TREE_CODE (top))
12203 case BIT_AND_EXPR:
12204 /* Bitwise and provides a power of two multiple. If the mask is
12205 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12206 if (!integer_pow2p (bottom))
12207 return 0;
12208 /* FALLTHRU */
12210 case MULT_EXPR:
12211 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12212 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12214 case PLUS_EXPR:
12215 case MINUS_EXPR:
12216 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12217 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12219 case LSHIFT_EXPR:
12220 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12222 tree op1, t1;
12224 op1 = TREE_OPERAND (top, 1);
12225 /* const_binop may not detect overflow correctly,
12226 so check for it explicitly here. */
12227 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12228 > TREE_INT_CST_LOW (op1)
12229 && TREE_INT_CST_HIGH (op1) == 0
12230 && 0 != (t1 = fold_convert (type,
12231 const_binop (LSHIFT_EXPR,
12232 size_one_node,
12233 op1, 0)))
12234 && ! TREE_OVERFLOW (t1))
12235 return multiple_of_p (type, t1, bottom);
12237 return 0;
12239 case NOP_EXPR:
12240 /* Can't handle conversions from non-integral or wider integral type. */
12241 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12242 || (TYPE_PRECISION (type)
12243 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12244 return 0;
12246 /* .. fall through ... */
12248 case SAVE_EXPR:
12249 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12251 case INTEGER_CST:
12252 if (TREE_CODE (bottom) != INTEGER_CST
12253 || (TYPE_UNSIGNED (type)
12254 && (tree_int_cst_sgn (top) < 0
12255 || tree_int_cst_sgn (bottom) < 0)))
12256 return 0;
12257 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12258 top, bottom, 0));
12260 default:
12261 return 0;
12265 /* Return true if `t' is known to be non-negative. */
12267 bool
12268 tree_expr_nonnegative_p (tree t)
12270 if (t == error_mark_node)
12271 return false;
12273 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12274 return true;
12276 switch (TREE_CODE (t))
12278 case SSA_NAME:
12279 /* Query VRP to see if it has recorded any information about
12280 the range of this object. */
12281 return ssa_name_nonnegative_p (t);
12283 case ABS_EXPR:
12284 /* We can't return 1 if flag_wrapv is set because
12285 ABS_EXPR<INT_MIN> = INT_MIN. */
12286 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12287 return true;
12288 break;
12290 case INTEGER_CST:
12291 return tree_int_cst_sgn (t) >= 0;
12293 case REAL_CST:
12294 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12296 case PLUS_EXPR:
12297 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12298 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12299 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12301 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12302 both unsigned and at least 2 bits shorter than the result. */
12303 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12304 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12305 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12307 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12308 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12309 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12310 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12312 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12313 TYPE_PRECISION (inner2)) + 1;
12314 return prec < TYPE_PRECISION (TREE_TYPE (t));
12317 break;
12319 case MULT_EXPR:
12320 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12322 /* x * x for floating point x is always non-negative. */
12323 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12324 return true;
12325 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12326 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12329 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12330 both unsigned and their total bits is shorter than the result. */
12331 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12332 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12333 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12335 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12336 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12337 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12338 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12339 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12340 < TYPE_PRECISION (TREE_TYPE (t));
12342 return false;
12344 case BIT_AND_EXPR:
12345 case MAX_EXPR:
12346 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12347 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12349 case BIT_IOR_EXPR:
12350 case BIT_XOR_EXPR:
12351 case MIN_EXPR:
12352 case RDIV_EXPR:
12353 case TRUNC_DIV_EXPR:
12354 case CEIL_DIV_EXPR:
12355 case FLOOR_DIV_EXPR:
12356 case ROUND_DIV_EXPR:
12357 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12358 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12360 case TRUNC_MOD_EXPR:
12361 case CEIL_MOD_EXPR:
12362 case FLOOR_MOD_EXPR:
12363 case ROUND_MOD_EXPR:
12364 case SAVE_EXPR:
12365 case NON_LVALUE_EXPR:
12366 case FLOAT_EXPR:
12367 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12369 case COMPOUND_EXPR:
12370 case MODIFY_EXPR:
12371 case GIMPLE_MODIFY_STMT:
12372 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12374 case BIND_EXPR:
12375 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12377 case COND_EXPR:
12378 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12379 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12381 case NOP_EXPR:
12383 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12384 tree outer_type = TREE_TYPE (t);
12386 if (TREE_CODE (outer_type) == REAL_TYPE)
12388 if (TREE_CODE (inner_type) == REAL_TYPE)
12389 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12390 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12392 if (TYPE_UNSIGNED (inner_type))
12393 return true;
12394 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12397 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12399 if (TREE_CODE (inner_type) == REAL_TYPE)
12400 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12401 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12402 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12403 && TYPE_UNSIGNED (inner_type);
12406 break;
12408 case TARGET_EXPR:
12410 tree temp = TARGET_EXPR_SLOT (t);
12411 t = TARGET_EXPR_INITIAL (t);
12413 /* If the initializer is non-void, then it's a normal expression
12414 that will be assigned to the slot. */
12415 if (!VOID_TYPE_P (t))
12416 return tree_expr_nonnegative_p (t);
12418 /* Otherwise, the initializer sets the slot in some way. One common
12419 way is an assignment statement at the end of the initializer. */
12420 while (1)
12422 if (TREE_CODE (t) == BIND_EXPR)
12423 t = expr_last (BIND_EXPR_BODY (t));
12424 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12425 || TREE_CODE (t) == TRY_CATCH_EXPR)
12426 t = expr_last (TREE_OPERAND (t, 0));
12427 else if (TREE_CODE (t) == STATEMENT_LIST)
12428 t = expr_last (t);
12429 else
12430 break;
12432 if ((TREE_CODE (t) == MODIFY_EXPR
12433 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12434 && GENERIC_TREE_OPERAND (t, 0) == temp)
12435 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12437 return false;
12440 case CALL_EXPR:
12442 tree fndecl = get_callee_fndecl (t);
12443 tree arglist = TREE_OPERAND (t, 1);
12444 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12445 switch (DECL_FUNCTION_CODE (fndecl))
12447 CASE_FLT_FN (BUILT_IN_ACOS):
12448 CASE_FLT_FN (BUILT_IN_ACOSH):
12449 CASE_FLT_FN (BUILT_IN_CABS):
12450 CASE_FLT_FN (BUILT_IN_COSH):
12451 CASE_FLT_FN (BUILT_IN_ERFC):
12452 CASE_FLT_FN (BUILT_IN_EXP):
12453 CASE_FLT_FN (BUILT_IN_EXP10):
12454 CASE_FLT_FN (BUILT_IN_EXP2):
12455 CASE_FLT_FN (BUILT_IN_FABS):
12456 CASE_FLT_FN (BUILT_IN_FDIM):
12457 CASE_FLT_FN (BUILT_IN_HYPOT):
12458 CASE_FLT_FN (BUILT_IN_POW10):
12459 CASE_INT_FN (BUILT_IN_FFS):
12460 CASE_INT_FN (BUILT_IN_PARITY):
12461 CASE_INT_FN (BUILT_IN_POPCOUNT):
12462 case BUILT_IN_BSWAP32:
12463 case BUILT_IN_BSWAP64:
12464 /* Always true. */
12465 return true;
12467 CASE_FLT_FN (BUILT_IN_SQRT):
12468 /* sqrt(-0.0) is -0.0. */
12469 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12470 return true;
12471 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12473 CASE_FLT_FN (BUILT_IN_ASINH):
12474 CASE_FLT_FN (BUILT_IN_ATAN):
12475 CASE_FLT_FN (BUILT_IN_ATANH):
12476 CASE_FLT_FN (BUILT_IN_CBRT):
12477 CASE_FLT_FN (BUILT_IN_CEIL):
12478 CASE_FLT_FN (BUILT_IN_ERF):
12479 CASE_FLT_FN (BUILT_IN_EXPM1):
12480 CASE_FLT_FN (BUILT_IN_FLOOR):
12481 CASE_FLT_FN (BUILT_IN_FMOD):
12482 CASE_FLT_FN (BUILT_IN_FREXP):
12483 CASE_FLT_FN (BUILT_IN_LCEIL):
12484 CASE_FLT_FN (BUILT_IN_LDEXP):
12485 CASE_FLT_FN (BUILT_IN_LFLOOR):
12486 CASE_FLT_FN (BUILT_IN_LLCEIL):
12487 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12488 CASE_FLT_FN (BUILT_IN_LLRINT):
12489 CASE_FLT_FN (BUILT_IN_LLROUND):
12490 CASE_FLT_FN (BUILT_IN_LRINT):
12491 CASE_FLT_FN (BUILT_IN_LROUND):
12492 CASE_FLT_FN (BUILT_IN_MODF):
12493 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12494 CASE_FLT_FN (BUILT_IN_RINT):
12495 CASE_FLT_FN (BUILT_IN_ROUND):
12496 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12497 CASE_FLT_FN (BUILT_IN_SINH):
12498 CASE_FLT_FN (BUILT_IN_TANH):
12499 CASE_FLT_FN (BUILT_IN_TRUNC):
12500 /* True if the 1st argument is nonnegative. */
12501 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12503 CASE_FLT_FN (BUILT_IN_FMAX):
12504 /* True if the 1st OR 2nd arguments are nonnegative. */
12505 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12506 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12508 CASE_FLT_FN (BUILT_IN_FMIN):
12509 /* True if the 1st AND 2nd arguments are nonnegative. */
12510 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12511 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12513 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12514 /* True if the 2nd argument is nonnegative. */
12515 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12517 CASE_FLT_FN (BUILT_IN_POWI):
12518 /* True if the 1st argument is nonnegative or the second
12519 argument is an even integer. */
12520 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12522 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12523 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12524 return true;
12526 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12528 CASE_FLT_FN (BUILT_IN_POW):
12529 /* True if the 1st argument is nonnegative or the second
12530 argument is an even integer valued real. */
12531 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12533 REAL_VALUE_TYPE c;
12534 HOST_WIDE_INT n;
12536 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12537 n = real_to_integer (&c);
12538 if ((n & 1) == 0)
12540 REAL_VALUE_TYPE cint;
12541 real_from_integer (&cint, VOIDmode, n,
12542 n < 0 ? -1 : 0, 0);
12543 if (real_identical (&c, &cint))
12544 return true;
12547 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12549 default:
12550 break;
12554 /* ... fall through ... */
12556 default:
12557 if (truth_value_p (TREE_CODE (t)))
12558 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12559 return true;
12562 /* We don't know sign of `t', so be conservative and return false. */
12563 return false;
12566 /* Return true when T is an address and is known to be nonzero.
12567 For floating point we further ensure that T is not denormal.
12568 Similar logic is present in nonzero_address in rtlanal.h. */
12570 bool
12571 tree_expr_nonzero_p (tree t)
12573 tree type = TREE_TYPE (t);
12575 /* Doing something useful for floating point would need more work. */
12576 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12577 return false;
12579 switch (TREE_CODE (t))
12581 case SSA_NAME:
12582 /* Query VRP to see if it has recorded any information about
12583 the range of this object. */
12584 return ssa_name_nonzero_p (t);
12586 case ABS_EXPR:
12587 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12589 case INTEGER_CST:
12590 /* We used to test for !integer_zerop here. This does not work correctly
12591 if TREE_CONSTANT_OVERFLOW (t). */
12592 return (TREE_INT_CST_LOW (t) != 0
12593 || TREE_INT_CST_HIGH (t) != 0);
12595 case PLUS_EXPR:
12596 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12598 /* With the presence of negative values it is hard
12599 to say something. */
12600 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12601 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12602 return false;
12603 /* One of operands must be positive and the other non-negative. */
12604 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12605 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12607 break;
12609 case MULT_EXPR:
12610 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12612 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12613 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12615 break;
12617 case NOP_EXPR:
12619 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12620 tree outer_type = TREE_TYPE (t);
12622 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12623 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12625 break;
12627 case ADDR_EXPR:
12629 tree base = get_base_address (TREE_OPERAND (t, 0));
12631 if (!base)
12632 return false;
12634 /* Weak declarations may link to NULL. */
12635 if (VAR_OR_FUNCTION_DECL_P (base))
12636 return !DECL_WEAK (base);
12638 /* Constants are never weak. */
12639 if (CONSTANT_CLASS_P (base))
12640 return true;
12642 return false;
12645 case COND_EXPR:
12646 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12647 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12649 case MIN_EXPR:
12650 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12651 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12653 case MAX_EXPR:
12654 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12656 /* When both operands are nonzero, then MAX must be too. */
12657 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12658 return true;
12660 /* MAX where operand 0 is positive is positive. */
12661 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12663 /* MAX where operand 1 is positive is positive. */
12664 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12665 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12666 return true;
12667 break;
12669 case COMPOUND_EXPR:
12670 case MODIFY_EXPR:
12671 case GIMPLE_MODIFY_STMT:
12672 case BIND_EXPR:
12673 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12675 case SAVE_EXPR:
12676 case NON_LVALUE_EXPR:
12677 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12679 case BIT_IOR_EXPR:
12680 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12681 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12683 case CALL_EXPR:
12684 return alloca_call_p (t);
12686 default:
12687 break;
12689 return false;
12692 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12693 attempt to fold the expression to a constant without modifying TYPE,
12694 OP0 or OP1.
12696 If the expression could be simplified to a constant, then return
12697 the constant. If the expression would not be simplified to a
12698 constant, then return NULL_TREE. */
12700 tree
12701 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12703 tree tem = fold_binary (code, type, op0, op1);
12704 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12707 /* Given the components of a unary expression CODE, TYPE and OP0,
12708 attempt to fold the expression to a constant without modifying
12709 TYPE or OP0.
12711 If the expression could be simplified to a constant, then return
12712 the constant. If the expression would not be simplified to a
12713 constant, then return NULL_TREE. */
12715 tree
12716 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12718 tree tem = fold_unary (code, type, op0);
12719 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12722 /* If EXP represents referencing an element in a constant string
12723 (either via pointer arithmetic or array indexing), return the
12724 tree representing the value accessed, otherwise return NULL. */
12726 tree
12727 fold_read_from_constant_string (tree exp)
12729 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12731 tree exp1 = TREE_OPERAND (exp, 0);
12732 tree index;
12733 tree string;
12735 if (TREE_CODE (exp) == INDIRECT_REF)
12736 string = string_constant (exp1, &index);
12737 else
12739 tree low_bound = array_ref_low_bound (exp);
12740 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12742 /* Optimize the special-case of a zero lower bound.
12744 We convert the low_bound to sizetype to avoid some problems
12745 with constant folding. (E.g. suppose the lower bound is 1,
12746 and its mode is QI. Without the conversion,l (ARRAY
12747 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12748 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12749 if (! integer_zerop (low_bound))
12750 index = size_diffop (index, fold_convert (sizetype, low_bound));
12752 string = exp1;
12755 if (string
12756 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12757 && TREE_CODE (string) == STRING_CST
12758 && TREE_CODE (index) == INTEGER_CST
12759 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12760 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12761 == MODE_INT)
12762 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12763 return fold_convert (TREE_TYPE (exp),
12764 build_int_cst (NULL_TREE,
12765 (TREE_STRING_POINTER (string)
12766 [TREE_INT_CST_LOW (index)])));
12768 return NULL;
12771 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12772 an integer constant or real constant.
12774 TYPE is the type of the result. */
12776 static tree
12777 fold_negate_const (tree arg0, tree type)
12779 tree t = NULL_TREE;
12781 switch (TREE_CODE (arg0))
12783 case INTEGER_CST:
12785 unsigned HOST_WIDE_INT low;
12786 HOST_WIDE_INT high;
12787 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12788 TREE_INT_CST_HIGH (arg0),
12789 &low, &high);
12790 t = build_int_cst_wide (type, low, high);
12791 t = force_fit_type (t, 1,
12792 (overflow | TREE_OVERFLOW (arg0))
12793 && !TYPE_UNSIGNED (type),
12794 TREE_CONSTANT_OVERFLOW (arg0));
12795 break;
12798 case REAL_CST:
12799 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12800 break;
12802 default:
12803 gcc_unreachable ();
12806 return t;
12809 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12810 an integer constant or real constant.
12812 TYPE is the type of the result. */
12814 tree
12815 fold_abs_const (tree arg0, tree type)
12817 tree t = NULL_TREE;
12819 switch (TREE_CODE (arg0))
12821 case INTEGER_CST:
12822 /* If the value is unsigned, then the absolute value is
12823 the same as the ordinary value. */
12824 if (TYPE_UNSIGNED (type))
12825 t = arg0;
12826 /* Similarly, if the value is non-negative. */
12827 else if (INT_CST_LT (integer_minus_one_node, arg0))
12828 t = arg0;
12829 /* If the value is negative, then the absolute value is
12830 its negation. */
12831 else
12833 unsigned HOST_WIDE_INT low;
12834 HOST_WIDE_INT high;
12835 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12836 TREE_INT_CST_HIGH (arg0),
12837 &low, &high);
12838 t = build_int_cst_wide (type, low, high);
12839 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12840 TREE_CONSTANT_OVERFLOW (arg0));
12842 break;
12844 case REAL_CST:
12845 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12846 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12847 else
12848 t = arg0;
12849 break;
12851 default:
12852 gcc_unreachable ();
12855 return t;
12858 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12859 constant. TYPE is the type of the result. */
12861 static tree
12862 fold_not_const (tree arg0, tree type)
12864 tree t = NULL_TREE;
12866 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12868 t = build_int_cst_wide (type,
12869 ~ TREE_INT_CST_LOW (arg0),
12870 ~ TREE_INT_CST_HIGH (arg0));
12871 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12872 TREE_CONSTANT_OVERFLOW (arg0));
12874 return t;
12877 /* Given CODE, a relational operator, the target type, TYPE and two
12878 constant operands OP0 and OP1, return the result of the
12879 relational operation. If the result is not a compile time
12880 constant, then return NULL_TREE. */
12882 static tree
12883 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12885 int result, invert;
12887 /* From here on, the only cases we handle are when the result is
12888 known to be a constant. */
12890 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12892 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12893 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12895 /* Handle the cases where either operand is a NaN. */
12896 if (real_isnan (c0) || real_isnan (c1))
12898 switch (code)
12900 case EQ_EXPR:
12901 case ORDERED_EXPR:
12902 result = 0;
12903 break;
12905 case NE_EXPR:
12906 case UNORDERED_EXPR:
12907 case UNLT_EXPR:
12908 case UNLE_EXPR:
12909 case UNGT_EXPR:
12910 case UNGE_EXPR:
12911 case UNEQ_EXPR:
12912 result = 1;
12913 break;
12915 case LT_EXPR:
12916 case LE_EXPR:
12917 case GT_EXPR:
12918 case GE_EXPR:
12919 case LTGT_EXPR:
12920 if (flag_trapping_math)
12921 return NULL_TREE;
12922 result = 0;
12923 break;
12925 default:
12926 gcc_unreachable ();
12929 return constant_boolean_node (result, type);
12932 return constant_boolean_node (real_compare (code, c0, c1), type);
12935 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12937 To compute GT, swap the arguments and do LT.
12938 To compute GE, do LT and invert the result.
12939 To compute LE, swap the arguments, do LT and invert the result.
12940 To compute NE, do EQ and invert the result.
12942 Therefore, the code below must handle only EQ and LT. */
12944 if (code == LE_EXPR || code == GT_EXPR)
12946 tree tem = op0;
12947 op0 = op1;
12948 op1 = tem;
12949 code = swap_tree_comparison (code);
12952 /* Note that it is safe to invert for real values here because we
12953 have already handled the one case that it matters. */
12955 invert = 0;
12956 if (code == NE_EXPR || code == GE_EXPR)
12958 invert = 1;
12959 code = invert_tree_comparison (code, false);
12962 /* Compute a result for LT or EQ if args permit;
12963 Otherwise return T. */
12964 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12966 if (code == EQ_EXPR)
12967 result = tree_int_cst_equal (op0, op1);
12968 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12969 result = INT_CST_LT_UNSIGNED (op0, op1);
12970 else
12971 result = INT_CST_LT (op0, op1);
12973 else
12974 return NULL_TREE;
12976 if (invert)
12977 result ^= 1;
12978 return constant_boolean_node (result, type);
12981 /* Build an expression for the a clean point containing EXPR with type TYPE.
12982 Don't build a cleanup point expression for EXPR which don't have side
12983 effects. */
12985 tree
12986 fold_build_cleanup_point_expr (tree type, tree expr)
12988 /* If the expression does not have side effects then we don't have to wrap
12989 it with a cleanup point expression. */
12990 if (!TREE_SIDE_EFFECTS (expr))
12991 return expr;
12993 /* If the expression is a return, check to see if the expression inside the
12994 return has no side effects or the right hand side of the modify expression
12995 inside the return. If either don't have side effects set we don't need to
12996 wrap the expression in a cleanup point expression. Note we don't check the
12997 left hand side of the modify because it should always be a return decl. */
12998 if (TREE_CODE (expr) == RETURN_EXPR)
13000 tree op = TREE_OPERAND (expr, 0);
13001 if (!op || !TREE_SIDE_EFFECTS (op))
13002 return expr;
13003 op = TREE_OPERAND (op, 1);
13004 if (!TREE_SIDE_EFFECTS (op))
13005 return expr;
13008 return build1 (CLEANUP_POINT_EXPR, type, expr);
13011 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13012 avoid confusing the gimplify process. */
13014 tree
13015 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13017 /* The size of the object is not relevant when talking about its address. */
13018 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13019 t = TREE_OPERAND (t, 0);
13021 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13022 if (TREE_CODE (t) == INDIRECT_REF
13023 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13025 t = TREE_OPERAND (t, 0);
13026 if (TREE_TYPE (t) != ptrtype)
13027 t = build1 (NOP_EXPR, ptrtype, t);
13029 else
13031 tree base = t;
13033 while (handled_component_p (base))
13034 base = TREE_OPERAND (base, 0);
13035 if (DECL_P (base))
13036 TREE_ADDRESSABLE (base) = 1;
13038 t = build1 (ADDR_EXPR, ptrtype, t);
13041 return t;
13044 tree
13045 build_fold_addr_expr (tree t)
13047 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13050 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13051 of an indirection through OP0, or NULL_TREE if no simplification is
13052 possible. */
13054 tree
13055 fold_indirect_ref_1 (tree type, tree op0)
13057 tree sub = op0;
13058 tree subtype;
13060 STRIP_NOPS (sub);
13061 subtype = TREE_TYPE (sub);
13062 if (!POINTER_TYPE_P (subtype))
13063 return NULL_TREE;
13065 if (TREE_CODE (sub) == ADDR_EXPR)
13067 tree op = TREE_OPERAND (sub, 0);
13068 tree optype = TREE_TYPE (op);
13069 /* *&CONST_DECL -> to the value of the const decl. */
13070 if (TREE_CODE (op) == CONST_DECL)
13071 return DECL_INITIAL (op);
13072 /* *&p => p; make sure to handle *&"str"[cst] here. */
13073 if (type == optype)
13075 tree fop = fold_read_from_constant_string (op);
13076 if (fop)
13077 return fop;
13078 else
13079 return op;
13081 /* *(foo *)&fooarray => fooarray[0] */
13082 else if (TREE_CODE (optype) == ARRAY_TYPE
13083 && type == TREE_TYPE (optype))
13085 tree type_domain = TYPE_DOMAIN (optype);
13086 tree min_val = size_zero_node;
13087 if (type_domain && TYPE_MIN_VALUE (type_domain))
13088 min_val = TYPE_MIN_VALUE (type_domain);
13089 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13091 /* *(foo *)&complexfoo => __real__ complexfoo */
13092 else if (TREE_CODE (optype) == COMPLEX_TYPE
13093 && type == TREE_TYPE (optype))
13094 return fold_build1 (REALPART_EXPR, type, op);
13097 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13098 if (TREE_CODE (sub) == PLUS_EXPR
13099 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13101 tree op00 = TREE_OPERAND (sub, 0);
13102 tree op01 = TREE_OPERAND (sub, 1);
13103 tree op00type;
13105 STRIP_NOPS (op00);
13106 op00type = TREE_TYPE (op00);
13107 if (TREE_CODE (op00) == ADDR_EXPR
13108 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13109 && type == TREE_TYPE (TREE_TYPE (op00type)))
13111 tree size = TYPE_SIZE_UNIT (type);
13112 if (tree_int_cst_equal (size, op01))
13113 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13117 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13118 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13119 && type == TREE_TYPE (TREE_TYPE (subtype)))
13121 tree type_domain;
13122 tree min_val = size_zero_node;
13123 sub = build_fold_indirect_ref (sub);
13124 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13125 if (type_domain && TYPE_MIN_VALUE (type_domain))
13126 min_val = TYPE_MIN_VALUE (type_domain);
13127 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13130 return NULL_TREE;
13133 /* Builds an expression for an indirection through T, simplifying some
13134 cases. */
13136 tree
13137 build_fold_indirect_ref (tree t)
13139 tree type = TREE_TYPE (TREE_TYPE (t));
13140 tree sub = fold_indirect_ref_1 (type, t);
13142 if (sub)
13143 return sub;
13144 else
13145 return build1 (INDIRECT_REF, type, t);
13148 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13150 tree
13151 fold_indirect_ref (tree t)
13153 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13155 if (sub)
13156 return sub;
13157 else
13158 return t;
13161 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13162 whose result is ignored. The type of the returned tree need not be
13163 the same as the original expression. */
13165 tree
13166 fold_ignored_result (tree t)
13168 if (!TREE_SIDE_EFFECTS (t))
13169 return integer_zero_node;
13171 for (;;)
13172 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13174 case tcc_unary:
13175 t = TREE_OPERAND (t, 0);
13176 break;
13178 case tcc_binary:
13179 case tcc_comparison:
13180 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13181 t = TREE_OPERAND (t, 0);
13182 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13183 t = TREE_OPERAND (t, 1);
13184 else
13185 return t;
13186 break;
13188 case tcc_expression:
13189 switch (TREE_CODE (t))
13191 case COMPOUND_EXPR:
13192 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13193 return t;
13194 t = TREE_OPERAND (t, 0);
13195 break;
13197 case COND_EXPR:
13198 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13199 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13200 return t;
13201 t = TREE_OPERAND (t, 0);
13202 break;
13204 default:
13205 return t;
13207 break;
13209 default:
13210 return t;
13214 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13215 This can only be applied to objects of a sizetype. */
13217 tree
13218 round_up (tree value, int divisor)
13220 tree div = NULL_TREE;
13222 gcc_assert (divisor > 0);
13223 if (divisor == 1)
13224 return value;
13226 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13227 have to do anything. Only do this when we are not given a const,
13228 because in that case, this check is more expensive than just
13229 doing it. */
13230 if (TREE_CODE (value) != INTEGER_CST)
13232 div = build_int_cst (TREE_TYPE (value), divisor);
13234 if (multiple_of_p (TREE_TYPE (value), value, div))
13235 return value;
13238 /* If divisor is a power of two, simplify this to bit manipulation. */
13239 if (divisor == (divisor & -divisor))
13241 tree t;
13243 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13244 value = size_binop (PLUS_EXPR, value, t);
13245 t = build_int_cst (TREE_TYPE (value), -divisor);
13246 value = size_binop (BIT_AND_EXPR, value, t);
13248 else
13250 if (!div)
13251 div = build_int_cst (TREE_TYPE (value), divisor);
13252 value = size_binop (CEIL_DIV_EXPR, value, div);
13253 value = size_binop (MULT_EXPR, value, div);
13256 return value;
13259 /* Likewise, but round down. */
13261 tree
13262 round_down (tree value, int divisor)
13264 tree div = NULL_TREE;
13266 gcc_assert (divisor > 0);
13267 if (divisor == 1)
13268 return value;
13270 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13271 have to do anything. Only do this when we are not given a const,
13272 because in that case, this check is more expensive than just
13273 doing it. */
13274 if (TREE_CODE (value) != INTEGER_CST)
13276 div = build_int_cst (TREE_TYPE (value), divisor);
13278 if (multiple_of_p (TREE_TYPE (value), value, div))
13279 return value;
13282 /* If divisor is a power of two, simplify this to bit manipulation. */
13283 if (divisor == (divisor & -divisor))
13285 tree t;
13287 t = build_int_cst (TREE_TYPE (value), -divisor);
13288 value = size_binop (BIT_AND_EXPR, value, t);
13290 else
13292 if (!div)
13293 div = build_int_cst (TREE_TYPE (value), divisor);
13294 value = size_binop (FLOOR_DIV_EXPR, value, div);
13295 value = size_binop (MULT_EXPR, value, div);
13298 return value;
13301 /* Returns the pointer to the base of the object addressed by EXP and
13302 extracts the information about the offset of the access, storing it
13303 to PBITPOS and POFFSET. */
13305 static tree
13306 split_address_to_core_and_offset (tree exp,
13307 HOST_WIDE_INT *pbitpos, tree *poffset)
13309 tree core;
13310 enum machine_mode mode;
13311 int unsignedp, volatilep;
13312 HOST_WIDE_INT bitsize;
13314 if (TREE_CODE (exp) == ADDR_EXPR)
13316 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13317 poffset, &mode, &unsignedp, &volatilep,
13318 false);
13319 core = build_fold_addr_expr (core);
13321 else
13323 core = exp;
13324 *pbitpos = 0;
13325 *poffset = NULL_TREE;
13328 return core;
13331 /* Returns true if addresses of E1 and E2 differ by a constant, false
13332 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13334 bool
13335 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13337 tree core1, core2;
13338 HOST_WIDE_INT bitpos1, bitpos2;
13339 tree toffset1, toffset2, tdiff, type;
13341 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13342 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13344 if (bitpos1 % BITS_PER_UNIT != 0
13345 || bitpos2 % BITS_PER_UNIT != 0
13346 || !operand_equal_p (core1, core2, 0))
13347 return false;
13349 if (toffset1 && toffset2)
13351 type = TREE_TYPE (toffset1);
13352 if (type != TREE_TYPE (toffset2))
13353 toffset2 = fold_convert (type, toffset2);
13355 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13356 if (!cst_and_fits_in_hwi (tdiff))
13357 return false;
13359 *diff = int_cst_value (tdiff);
13361 else if (toffset1 || toffset2)
13363 /* If only one of the offsets is non-constant, the difference cannot
13364 be a constant. */
13365 return false;
13367 else
13368 *diff = 0;
13370 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13371 return true;
13374 /* Simplify the floating point expression EXP when the sign of the
13375 result is not significant. Return NULL_TREE if no simplification
13376 is possible. */
13378 tree
13379 fold_strip_sign_ops (tree exp)
13381 tree arg0, arg1;
13383 switch (TREE_CODE (exp))
13385 case ABS_EXPR:
13386 case NEGATE_EXPR:
13387 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13388 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13390 case MULT_EXPR:
13391 case RDIV_EXPR:
13392 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13393 return NULL_TREE;
13394 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13395 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13396 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13397 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13398 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13399 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13400 break;
13402 case COMPOUND_EXPR:
13403 arg0 = TREE_OPERAND (exp, 0);
13404 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13405 if (arg1)
13406 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13407 break;
13409 case COND_EXPR:
13410 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13411 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13412 if (arg0 || arg1)
13413 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13414 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13415 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13416 break;
13418 case CALL_EXPR:
13420 const enum built_in_function fcode = builtin_mathfn_code (exp);
13421 switch (fcode)
13423 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13424 /* Strip copysign function call, return the 1st argument. */
13425 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13426 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13427 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13429 default:
13430 /* Strip sign ops from the argument of "odd" math functions. */
13431 if (negate_mathfn_p (fcode))
13433 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13434 if (arg0)
13435 return build_function_call_expr (get_callee_fndecl (exp),
13436 build_tree_list (NULL_TREE,
13437 arg0));
13439 break;
13442 break;
13444 default:
13445 break;
13447 return NULL_TREE;