Missing piece for last change
[official-gcc.git] / gcc / fold-const.c
blobe9e3f9b128c9949848099ab06851b7c1d873666e
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
114 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
115 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
116 enum machine_mode *, int *, int *,
117 tree *, tree *);
118 static int all_ones_mask_p (const_tree, int);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
122 static tree range_predecessor (tree);
123 static tree range_successor (tree);
124 static tree make_range (tree, int *, tree *, tree *, bool *);
125 static tree build_range_check (tree, tree, int, tree, tree);
126 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 tree);
128 static tree fold_range_test (enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
133 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
134 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
135 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
151 addition.
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 sign. */
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 #define LOWPART(x) \
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
173 static void
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 static void
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 HOST_WIDE_INT *hi)
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
205 unsigned int prec;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = POINTER_SIZE;
211 else
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 else
226 h1 = 0;
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
246 h1 = -1;
248 else
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 h1 = -1;
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 *lv = l1;
259 *hv = h1;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
280 tree
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
283 bool overflowed)
285 int sign_extended_type;
286 bool overflow;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
298 if (overflowed
299 || overflowable < 0
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
307 return t;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 bool unsigned_p)
327 unsigned HOST_WIDE_INT l;
328 HOST_WIDE_INT h;
330 l = l1 + l2;
331 h = h1 + h2 + (l < l1);
333 *lv = l;
334 *hv = h;
336 if (unsigned_p)
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 else
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
351 if (l1 == 0)
353 *lv = 0;
354 *hv = - h1;
355 return (*hv & h1) < 0;
357 else
359 *lv = -l1;
360 *hv = ~h1;
361 return 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 bool unsigned_p)
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
381 int i, j, k;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
388 memset (prod, 0, sizeof prod);
390 for (i = 0; i < 4; i++)
392 carry = 0;
393 for (j = 0; j < 4; j++)
395 k = i + j;
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 carry += prod[k];
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
403 prod[i + 4] = carry;
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
409 /* Unsigned overflow is immediate. */
410 if (unsigned_p)
411 return (toplow | tophigh) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
415 if (h1 < 0)
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 if (h2 < 0)
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 void
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
439 unsigned HOST_WIDE_INT signmask;
441 if (count < 0)
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 return;
447 if (SHIFT_COUNT_TRUNCATED)
448 count %= prec;
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
454 *hv = 0;
455 *lv = 0;
457 else if (count >= HOST_BITS_PER_WIDE_INT)
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
460 *lv = 0;
462 else
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 *lv = l1 << count;
469 /* Sign extend all bits that are beyond the precision. */
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
483 else
485 *hv = signmask;
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 void
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 int arith)
502 unsigned HOST_WIDE_INT signmask;
504 signmask = (arith
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 : 0);
508 if (SHIFT_COUNT_TRUNCATED)
509 count %= prec;
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
515 *hv = 0;
516 *lv = 0;
518 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *hv = 0;
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
523 else
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 *lv = ((l1 >> count)
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count >= (HOST_WIDE_INT)prec)
534 *hv = signmask;
535 *lv = signmask;
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
544 else
546 *hv = signmask;
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 void
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
565 count %= prec;
566 if (count < 0)
567 count += prec;
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
571 *lv = s1l | s2l;
572 *hv = s1h | s2h;
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 void
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
587 count %= prec;
588 if (count < 0)
589 count += prec;
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
593 *lv = s1l | s2l;
594 *hv = s1h | s2h;
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 or EXACT_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT *hrem)
616 int quo_neg = 0;
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
619 int i, j;
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
626 int overflow = 0;
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
632 if (!uns)
634 if (hnum < 0)
636 quo_neg = ~ quo_neg;
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
640 overflow = 1;
642 if (hden < 0)
644 quo_neg = ~ quo_neg;
645 neg_double (lden, hden, &lden, &hden);
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
651 *hquo = *hrem = 0;
652 /* This unsigned division rounds toward zero. */
653 *lquo = lnum / lden;
654 goto finish_up;
657 if (hnum == 0)
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
660 *hquo = *lquo = 0;
661 *hrem = hnum;
662 *lrem = lnum;
663 goto finish_up;
666 memset (quo, 0, sizeof quo);
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
682 carry = work % lden;
685 else
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
694 if (den[i] != 0)
696 den_hi_sig = i;
697 break;
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale = BASE / (den[den_hi_sig] + 1);
704 if (scale > 1)
705 { /* scale divisor and dividend */
706 carry = 0;
707 for (i = 0; i <= 4 - 1; i++)
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
714 num[4] = carry;
715 carry = 0;
716 for (i = 0; i <= 4 - 1; i++)
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
725 num_hi_sig = 4;
727 /* Main loop */
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
739 else
740 quo_est = BASE - 1;
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
744 if (tmp < BASE
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
747 quo_est--;
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
753 carry = 0;
754 for (j = 0; j <= den_hi_sig; j++)
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 quo_est--;
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
776 num [num_hi_sig] += carry;
779 /* Store the quotient digit. */
780 quo[i] = quo_est;
784 decode (quo, lquo, hquo);
786 finish_up:
787 /* If result is negative, make it so. */
788 if (quo_neg)
789 neg_double (*lquo, *hquo, lquo, hquo);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
796 switch (code)
798 case TRUNC_DIV_EXPR:
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 return overflow;
803 case FLOOR_DIV_EXPR:
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 /* quo = quo - 1; */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
809 lquo, hquo);
811 else
812 return overflow;
813 break;
815 case CEIL_DIV_EXPR:
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
824 break;
826 case ROUND_DIV_EXPR:
827 case ROUND_MOD_EXPR: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
834 /* Get absolute values. */
835 if (*hrem < 0)
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 if (hden < 0)
838 neg_double (lden, hden, &labs_den, &habs_den);
840 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, &ltwice, &htwice);
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den <= ltwice)))
850 if (*hquo < 0)
851 /* quo = quo - 1; */
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 else
855 /* quo = quo + 1; */
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
857 lquo, hquo);
859 else
860 return overflow;
862 break;
864 default:
865 gcc_unreachable ();
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 return overflow;
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
879 static tree
880 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
895 uns = false;
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
898 type);
900 else
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
908 return NULL_TREE;
910 return build_int_cst_wide (type, quol, quoh);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
920 used. */
922 static int fold_deferring_overflow_warnings;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
939 void
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
952 deferred code. */
954 void
955 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
957 const char *warnmsg;
958 location_t locus;
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
964 if (fold_deferred_overflow_warning != NULL
965 && code != 0
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
968 return;
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
974 if (!issue || warnmsg == NULL)
975 return;
977 if (gimple_no_warning_p (stmt))
978 return;
980 /* Use the smallest code level when deciding to issue the
981 warning. */
982 if (code == 0 || code > (int) fold_deferred_overflow_code)
983 code = fold_deferred_overflow_code;
985 if (!issue_strict_overflow_warning (code))
986 return;
988 if (stmt == NULL)
989 locus = input_location;
990 else
991 locus = gimple_location (stmt);
992 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
995 /* Stop deferring overflow warnings, ignoring any deferred
996 warnings. */
998 void
999 fold_undefer_and_ignore_overflow_warnings (void)
1001 fold_undefer_overflow_warnings (false, NULL, 0);
1004 /* Whether we are deferring overflow warnings. */
1006 bool
1007 fold_deferring_overflow_warnings_p (void)
1009 return fold_deferring_overflow_warnings > 0;
1012 /* This is called when we fold something based on the fact that signed
1013 overflow is undefined. */
1015 static void
1016 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1018 if (fold_deferring_overflow_warnings > 0)
1020 if (fold_deferred_overflow_warning == NULL
1021 || wc < fold_deferred_overflow_code)
1023 fold_deferred_overflow_warning = gmsgid;
1024 fold_deferred_overflow_code = wc;
1027 else if (issue_strict_overflow_warning (wc))
1028 warning (OPT_Wstrict_overflow, gmsgid);
1031 /* Return true if the built-in mathematical function specified by CODE
1032 is odd, i.e. -f(x) == f(-x). */
1034 static bool
1035 negate_mathfn_p (enum built_in_function code)
1037 switch (code)
1039 CASE_FLT_FN (BUILT_IN_ASIN):
1040 CASE_FLT_FN (BUILT_IN_ASINH):
1041 CASE_FLT_FN (BUILT_IN_ATAN):
1042 CASE_FLT_FN (BUILT_IN_ATANH):
1043 CASE_FLT_FN (BUILT_IN_CASIN):
1044 CASE_FLT_FN (BUILT_IN_CASINH):
1045 CASE_FLT_FN (BUILT_IN_CATAN):
1046 CASE_FLT_FN (BUILT_IN_CATANH):
1047 CASE_FLT_FN (BUILT_IN_CBRT):
1048 CASE_FLT_FN (BUILT_IN_CPROJ):
1049 CASE_FLT_FN (BUILT_IN_CSIN):
1050 CASE_FLT_FN (BUILT_IN_CSINH):
1051 CASE_FLT_FN (BUILT_IN_CTAN):
1052 CASE_FLT_FN (BUILT_IN_CTANH):
1053 CASE_FLT_FN (BUILT_IN_ERF):
1054 CASE_FLT_FN (BUILT_IN_LLROUND):
1055 CASE_FLT_FN (BUILT_IN_LROUND):
1056 CASE_FLT_FN (BUILT_IN_ROUND):
1057 CASE_FLT_FN (BUILT_IN_SIN):
1058 CASE_FLT_FN (BUILT_IN_SINH):
1059 CASE_FLT_FN (BUILT_IN_TAN):
1060 CASE_FLT_FN (BUILT_IN_TANH):
1061 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 return true;
1064 CASE_FLT_FN (BUILT_IN_LLRINT):
1065 CASE_FLT_FN (BUILT_IN_LRINT):
1066 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1067 CASE_FLT_FN (BUILT_IN_RINT):
1068 return !flag_rounding_math;
1070 default:
1071 break;
1073 return false;
1076 /* Check whether we may negate an integer constant T without causing
1077 overflow. */
1079 bool
1080 may_negate_without_overflow_p (const_tree t)
1082 unsigned HOST_WIDE_INT val;
1083 unsigned int prec;
1084 tree type;
1086 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1088 type = TREE_TYPE (t);
1089 if (TYPE_UNSIGNED (type))
1090 return false;
1092 prec = TYPE_PRECISION (type);
1093 if (prec > HOST_BITS_PER_WIDE_INT)
1095 if (TREE_INT_CST_LOW (t) != 0)
1096 return true;
1097 prec -= HOST_BITS_PER_WIDE_INT;
1098 val = TREE_INT_CST_HIGH (t);
1100 else
1101 val = TREE_INT_CST_LOW (t);
1102 if (prec < HOST_BITS_PER_WIDE_INT)
1103 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1104 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1107 /* Determine whether an expression T can be cheaply negated using
1108 the function negate_expr without introducing undefined overflow. */
1110 static bool
1111 negate_expr_p (tree t)
1113 tree type;
1115 if (t == 0)
1116 return false;
1118 type = TREE_TYPE (t);
1120 STRIP_SIGN_NOPS (t);
1121 switch (TREE_CODE (t))
1123 case INTEGER_CST:
1124 if (TYPE_OVERFLOW_WRAPS (type))
1125 return true;
1127 /* Check that -CST will not overflow type. */
1128 return may_negate_without_overflow_p (t);
1129 case BIT_NOT_EXPR:
1130 return (INTEGRAL_TYPE_P (type)
1131 && TYPE_OVERFLOW_WRAPS (type));
1133 case FIXED_CST:
1134 case REAL_CST:
1135 case NEGATE_EXPR:
1136 return true;
1138 case COMPLEX_CST:
1139 return negate_expr_p (TREE_REALPART (t))
1140 && negate_expr_p (TREE_IMAGPART (t));
1142 case COMPLEX_EXPR:
1143 return negate_expr_p (TREE_OPERAND (t, 0))
1144 && negate_expr_p (TREE_OPERAND (t, 1));
1146 case CONJ_EXPR:
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1149 case PLUS_EXPR:
1150 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1151 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1152 return false;
1153 /* -(A + B) -> (-B) - A. */
1154 if (negate_expr_p (TREE_OPERAND (t, 1))
1155 && reorder_operands_p (TREE_OPERAND (t, 0),
1156 TREE_OPERAND (t, 1)))
1157 return true;
1158 /* -(A + B) -> (-A) - B. */
1159 return negate_expr_p (TREE_OPERAND (t, 0));
1161 case MINUS_EXPR:
1162 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1163 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1164 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1165 && reorder_operands_p (TREE_OPERAND (t, 0),
1166 TREE_OPERAND (t, 1));
1168 case MULT_EXPR:
1169 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1170 break;
1172 /* Fall through. */
1174 case RDIV_EXPR:
1175 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1176 return negate_expr_p (TREE_OPERAND (t, 1))
1177 || negate_expr_p (TREE_OPERAND (t, 0));
1178 break;
1180 case TRUNC_DIV_EXPR:
1181 case ROUND_DIV_EXPR:
1182 case FLOOR_DIV_EXPR:
1183 case CEIL_DIV_EXPR:
1184 case EXACT_DIV_EXPR:
1185 /* In general we can't negate A / B, because if A is INT_MIN and
1186 B is 1, we may turn this into INT_MIN / -1 which is undefined
1187 and actually traps on some architectures. But if overflow is
1188 undefined, we can negate, because - (INT_MIN / 1) is an
1189 overflow. */
1190 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1191 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1192 break;
1193 return negate_expr_p (TREE_OPERAND (t, 1))
1194 || negate_expr_p (TREE_OPERAND (t, 0));
1196 case NOP_EXPR:
1197 /* Negate -((double)float) as (double)(-float). */
1198 if (TREE_CODE (type) == REAL_TYPE)
1200 tree tem = strip_float_extensions (t);
1201 if (tem != t)
1202 return negate_expr_p (tem);
1204 break;
1206 case CALL_EXPR:
1207 /* Negate -f(x) as f(-x). */
1208 if (negate_mathfn_p (builtin_mathfn_code (t)))
1209 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1210 break;
1212 case RSHIFT_EXPR:
1213 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1214 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1216 tree op1 = TREE_OPERAND (t, 1);
1217 if (TREE_INT_CST_HIGH (op1) == 0
1218 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1219 == TREE_INT_CST_LOW (op1))
1220 return true;
1222 break;
1224 default:
1225 break;
1227 return false;
1230 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1231 simplification is possible.
1232 If negate_expr_p would return true for T, NULL_TREE will never be
1233 returned. */
1235 static tree
1236 fold_negate_expr (tree t)
1238 tree type = TREE_TYPE (t);
1239 tree tem;
1241 switch (TREE_CODE (t))
1243 /* Convert - (~A) to A + 1. */
1244 case BIT_NOT_EXPR:
1245 if (INTEGRAL_TYPE_P (type))
1246 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1247 build_int_cst (type, 1));
1248 break;
1250 case INTEGER_CST:
1251 tem = fold_negate_const (t, type);
1252 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1253 || !TYPE_OVERFLOW_TRAPS (type))
1254 return tem;
1255 break;
1257 case REAL_CST:
1258 tem = fold_negate_const (t, type);
1259 /* Two's complement FP formats, such as c4x, may overflow. */
1260 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1261 return tem;
1262 break;
1264 case FIXED_CST:
1265 tem = fold_negate_const (t, type);
1266 return tem;
1268 case COMPLEX_CST:
1270 tree rpart = negate_expr (TREE_REALPART (t));
1271 tree ipart = negate_expr (TREE_IMAGPART (t));
1273 if ((TREE_CODE (rpart) == REAL_CST
1274 && TREE_CODE (ipart) == REAL_CST)
1275 || (TREE_CODE (rpart) == INTEGER_CST
1276 && TREE_CODE (ipart) == INTEGER_CST))
1277 return build_complex (type, rpart, ipart);
1279 break;
1281 case COMPLEX_EXPR:
1282 if (negate_expr_p (t))
1283 return fold_build2 (COMPLEX_EXPR, type,
1284 fold_negate_expr (TREE_OPERAND (t, 0)),
1285 fold_negate_expr (TREE_OPERAND (t, 1)));
1286 break;
1288 case CONJ_EXPR:
1289 if (negate_expr_p (t))
1290 return fold_build1 (CONJ_EXPR, type,
1291 fold_negate_expr (TREE_OPERAND (t, 0)));
1292 break;
1294 case NEGATE_EXPR:
1295 case NEGATENV_EXPR:
1296 return TREE_OPERAND (t, 0);
1298 case PLUS_EXPR:
1299 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1300 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1302 /* -(A + B) -> (-B) - A. */
1303 if (negate_expr_p (TREE_OPERAND (t, 1))
1304 && reorder_operands_p (TREE_OPERAND (t, 0),
1305 TREE_OPERAND (t, 1)))
1307 tem = negate_expr (TREE_OPERAND (t, 1));
1308 return fold_build2 (MINUS_EXPR, type,
1309 tem, TREE_OPERAND (t, 0));
1312 /* -(A + B) -> (-A) - B. */
1313 if (negate_expr_p (TREE_OPERAND (t, 0)))
1315 tem = negate_expr (TREE_OPERAND (t, 0));
1316 return fold_build2 (MINUS_EXPR, type,
1317 tem, TREE_OPERAND (t, 1));
1320 break;
1322 case MINUS_EXPR:
1323 /* - (A - B) -> B - A */
1324 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1325 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1326 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1327 return fold_build2 (MINUS_EXPR, type,
1328 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1329 break;
1331 case MULT_EXPR:
1332 if (TYPE_UNSIGNED (type))
1333 break;
1335 /* Fall through. */
1337 case RDIV_EXPR:
1338 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1340 tem = TREE_OPERAND (t, 1);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 TREE_OPERAND (t, 0), negate_expr (tem));
1344 tem = TREE_OPERAND (t, 0);
1345 if (negate_expr_p (tem))
1346 return fold_build2 (TREE_CODE (t), type,
1347 negate_expr (tem), TREE_OPERAND (t, 1));
1349 break;
1351 case TRUNC_DIV_EXPR:
1352 case ROUND_DIV_EXPR:
1353 case FLOOR_DIV_EXPR:
1354 case CEIL_DIV_EXPR:
1355 case EXACT_DIV_EXPR:
1356 /* In general we can't negate A / B, because if A is INT_MIN and
1357 B is 1, we may turn this into INT_MIN / -1 which is undefined
1358 and actually traps on some architectures. But if overflow is
1359 undefined, we can negate, because - (INT_MIN / 1) is an
1360 overflow. */
1361 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1363 const char * const warnmsg = G_("assuming signed overflow does not "
1364 "occur when negating a division");
1365 tem = TREE_OPERAND (t, 1);
1366 if (negate_expr_p (tem))
1368 if (INTEGRAL_TYPE_P (type)
1369 && (TREE_CODE (tem) != INTEGER_CST
1370 || integer_onep (tem)))
1371 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1372 return fold_build2 (TREE_CODE (t), type,
1373 TREE_OPERAND (t, 0), negate_expr (tem));
1375 tem = TREE_OPERAND (t, 0);
1376 if (negate_expr_p (tem))
1378 if (INTEGRAL_TYPE_P (type)
1379 && (TREE_CODE (tem) != INTEGER_CST
1380 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1381 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1382 return fold_build2 (TREE_CODE (t), type,
1383 negate_expr (tem), TREE_OPERAND (t, 1));
1386 break;
1388 case NOP_EXPR:
1389 /* Convert -((double)float) into (double)(-float). */
1390 if (TREE_CODE (type) == REAL_TYPE)
1392 tem = strip_float_extensions (t);
1393 if (tem != t && negate_expr_p (tem))
1394 return fold_convert (type, negate_expr (tem));
1396 break;
1398 case CALL_EXPR:
1399 /* Negate -f(x) as f(-x). */
1400 if (negate_mathfn_p (builtin_mathfn_code (t))
1401 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1403 tree fndecl, arg;
1405 fndecl = get_callee_fndecl (t);
1406 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1407 return build_call_expr (fndecl, 1, arg);
1409 break;
1411 case RSHIFT_EXPR:
1412 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1413 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1415 tree op1 = TREE_OPERAND (t, 1);
1416 if (TREE_INT_CST_HIGH (op1) == 0
1417 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1418 == TREE_INT_CST_LOW (op1))
1420 tree ntype = TYPE_UNSIGNED (type)
1421 ? signed_type_for (type)
1422 : unsigned_type_for (type);
1423 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1424 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1425 return fold_convert (type, temp);
1428 break;
1430 default:
1431 break;
1434 return NULL_TREE;
1437 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1438 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1439 return NULL_TREE. */
1441 static tree
1442 negate_expr (tree t)
1444 tree type, tem;
1446 if (t == NULL_TREE)
1447 return NULL_TREE;
1449 type = TREE_TYPE (t);
1450 STRIP_SIGN_NOPS (t);
1452 tem = fold_negate_expr (t);
1453 if (!tem)
1454 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1455 return fold_convert (type, tem);
1458 /* Split a tree IN into a constant, literal and variable parts that could be
1459 combined with CODE to make IN. "constant" means an expression with
1460 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1461 commutative arithmetic operation. Store the constant part into *CONP,
1462 the literal in *LITP and return the variable part. If a part isn't
1463 present, set it to null. If the tree does not decompose in this way,
1464 return the entire tree as the variable part and the other parts as null.
1466 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1467 case, we negate an operand that was subtracted. Except if it is a
1468 literal for which we use *MINUS_LITP instead.
1470 If NEGATE_P is true, we are negating all of IN, again except a literal
1471 for which we use *MINUS_LITP instead.
1473 If IN is itself a literal or constant, return it as appropriate.
1475 Note that we do not guarantee that any of the three values will be the
1476 same type as IN, but they will have the same signedness and mode. */
1478 static tree
1479 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1480 tree *minus_litp, int negate_p)
1482 tree var = 0;
1484 *conp = 0;
1485 *litp = 0;
1486 *minus_litp = 0;
1488 /* Strip any conversions that don't change the machine mode or signedness. */
1489 STRIP_SIGN_NOPS (in);
1491 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1492 || TREE_CODE (in) == FIXED_CST)
1493 *litp = in;
1494 else if (TREE_CODE (in) == code
1495 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1496 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1497 /* We can associate addition and subtraction together (even
1498 though the C standard doesn't say so) for integers because
1499 the value is not affected. For reals, the value might be
1500 affected, so we can't. */
1501 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1502 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1504 tree op0 = TREE_OPERAND (in, 0);
1505 tree op1 = TREE_OPERAND (in, 1);
1506 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1507 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1509 /* First see if either of the operands is a literal, then a constant. */
1510 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1511 || TREE_CODE (op0) == FIXED_CST)
1512 *litp = op0, op0 = 0;
1513 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1514 || TREE_CODE (op1) == FIXED_CST)
1515 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1517 if (op0 != 0 && TREE_CONSTANT (op0))
1518 *conp = op0, op0 = 0;
1519 else if (op1 != 0 && TREE_CONSTANT (op1))
1520 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1522 /* If we haven't dealt with either operand, this is not a case we can
1523 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1524 if (op0 != 0 && op1 != 0)
1525 var = in;
1526 else if (op0 != 0)
1527 var = op0;
1528 else
1529 var = op1, neg_var_p = neg1_p;
1531 /* Now do any needed negations. */
1532 if (neg_litp_p)
1533 *minus_litp = *litp, *litp = 0;
1534 if (neg_conp_p)
1535 *conp = negate_expr (*conp);
1536 if (neg_var_p)
1537 var = negate_expr (var);
1539 else if (TREE_CONSTANT (in))
1540 *conp = in;
1541 else
1542 var = in;
1544 if (negate_p)
1546 if (*litp)
1547 *minus_litp = *litp, *litp = 0;
1548 else if (*minus_litp)
1549 *litp = *minus_litp, *minus_litp = 0;
1550 *conp = negate_expr (*conp);
1551 var = negate_expr (var);
1554 return var;
1557 /* Re-associate trees split by the above function. T1 and T2 are either
1558 expressions to associate or null. Return the new expression, if any. If
1559 we build an operation, do it in TYPE and with CODE. */
1561 static tree
1562 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1564 if (t1 == 0)
1565 return t2;
1566 else if (t2 == 0)
1567 return t1;
1569 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1570 try to fold this since we will have infinite recursion. But do
1571 deal with any NEGATE_EXPRs. */
1572 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1573 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1575 if (code == PLUS_EXPR)
1577 if (TREE_CODE (t1) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1579 fold_convert (type, TREE_OPERAND (t1, 0)));
1580 else if (TREE_CODE (t2) == NEGATE_EXPR)
1581 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1582 fold_convert (type, TREE_OPERAND (t2, 0)));
1583 else if (integer_zerop (t2))
1584 return fold_convert (type, t1);
1586 else if (code == MINUS_EXPR)
1588 if (integer_zerop (t2))
1589 return fold_convert (type, t1);
1592 return build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 return fold_build2 (code, type, fold_convert (type, t1),
1597 fold_convert (type, t2));
1600 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1601 for use in int_const_binop, size_binop and size_diffop. */
1603 static bool
1604 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1606 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1607 return false;
1608 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1609 return false;
1611 switch (code)
1613 case LSHIFT_EXPR:
1614 case RSHIFT_EXPR:
1615 case LROTATE_EXPR:
1616 case RROTATE_EXPR:
1617 return true;
1619 default:
1620 break;
1623 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1624 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1625 && TYPE_MODE (type1) == TYPE_MODE (type2);
1629 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1630 to produce a new constant. Return NULL_TREE if we don't know how
1631 to evaluate CODE at compile-time.
1633 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1635 tree
1636 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1638 unsigned HOST_WIDE_INT int1l, int2l;
1639 HOST_WIDE_INT int1h, int2h;
1640 unsigned HOST_WIDE_INT low;
1641 HOST_WIDE_INT hi;
1642 unsigned HOST_WIDE_INT garbagel;
1643 HOST_WIDE_INT garbageh;
1644 tree t;
1645 tree type = TREE_TYPE (arg1);
1646 int uns = TYPE_UNSIGNED (type);
1647 int is_sizetype
1648 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1649 int overflow = 0;
1651 int1l = TREE_INT_CST_LOW (arg1);
1652 int1h = TREE_INT_CST_HIGH (arg1);
1653 int2l = TREE_INT_CST_LOW (arg2);
1654 int2h = TREE_INT_CST_HIGH (arg2);
1656 switch (code)
1658 case BIT_IOR_EXPR:
1659 low = int1l | int2l, hi = int1h | int2h;
1660 break;
1662 case BIT_XOR_EXPR:
1663 low = int1l ^ int2l, hi = int1h ^ int2h;
1664 break;
1666 case BIT_AND_EXPR:
1667 low = int1l & int2l, hi = int1h & int2h;
1668 break;
1670 case RSHIFT_EXPR:
1671 int2l = -int2l;
1672 case LSHIFT_EXPR:
1673 /* It's unclear from the C standard whether shifts can overflow.
1674 The following code ignores overflow; perhaps a C standard
1675 interpretation ruling is needed. */
1676 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1677 &low, &hi, !uns);
1678 break;
1680 case RROTATE_EXPR:
1681 int2l = - int2l;
1682 case LROTATE_EXPR:
1683 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1684 &low, &hi);
1685 break;
1687 case PLUS_EXPR:
1688 case PLUSNV_EXPR:
1689 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1690 break;
1692 case MINUS_EXPR:
1693 case MINUSNV_EXPR:
1694 neg_double (int2l, int2h, &low, &hi);
1695 add_double (int1l, int1h, low, hi, &low, &hi);
1696 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1697 break;
1699 case MULT_EXPR:
1700 case MULTNV_EXPR:
1701 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1702 break;
1704 case TRUNC_DIV_EXPR:
1705 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1706 case EXACT_DIV_EXPR:
1707 /* This is a shortcut for a common special case. */
1708 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1709 && !TREE_OVERFLOW (arg1)
1710 && !TREE_OVERFLOW (arg2)
1711 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1713 if (code == CEIL_DIV_EXPR)
1714 int1l += int2l - 1;
1716 low = int1l / int2l, hi = 0;
1717 break;
1720 /* ... fall through ... */
1722 case ROUND_DIV_EXPR:
1723 if (int2h == 0 && int2l == 0)
1724 return NULL_TREE;
1725 if (int2h == 0 && int2l == 1)
1727 low = int1l, hi = int1h;
1728 break;
1730 if (int1l == int2l && int1h == int2h
1731 && ! (int1l == 0 && int1h == 0))
1733 low = 1, hi = 0;
1734 break;
1736 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1737 &low, &hi, &garbagel, &garbageh);
1738 break;
1740 case TRUNC_MOD_EXPR:
1741 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1742 /* This is a shortcut for a common special case. */
1743 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1744 && !TREE_OVERFLOW (arg1)
1745 && !TREE_OVERFLOW (arg2)
1746 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1748 if (code == CEIL_MOD_EXPR)
1749 int1l += int2l - 1;
1750 low = int1l % int2l, hi = 0;
1751 break;
1754 /* ... fall through ... */
1756 case ROUND_MOD_EXPR:
1757 if (int2h == 0 && int2l == 0)
1758 return NULL_TREE;
1759 overflow = div_and_round_double (code, uns,
1760 int1l, int1h, int2l, int2h,
1761 &garbagel, &garbageh, &low, &hi);
1762 break;
1764 case MIN_EXPR:
1765 case MAX_EXPR:
1766 if (uns)
1767 low = (((unsigned HOST_WIDE_INT) int1h
1768 < (unsigned HOST_WIDE_INT) int2h)
1769 || (((unsigned HOST_WIDE_INT) int1h
1770 == (unsigned HOST_WIDE_INT) int2h)
1771 && int1l < int2l));
1772 else
1773 low = (int1h < int2h
1774 || (int1h == int2h && int1l < int2l));
1776 if (low == (code == MIN_EXPR))
1777 low = int1l, hi = int1h;
1778 else
1779 low = int2l, hi = int2h;
1780 break;
1782 default:
1783 return NULL_TREE;
1786 if (notrunc)
1788 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1790 /* Propagate overflow flags ourselves. */
1791 if (((!uns || is_sizetype) && overflow)
1792 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1794 t = copy_node (t);
1795 TREE_OVERFLOW (t) = 1;
1798 else
1799 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1800 ((!uns || is_sizetype) && overflow)
1801 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1803 return t;
1806 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1807 constant. We assume ARG1 and ARG2 have the same data type, or at least
1808 are the same kind of constant and the same machine mode. Return zero if
1809 combining the constants is not allowed in the current operating mode.
1811 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1813 static tree
1814 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1816 /* Sanity check for the recursive cases. */
1817 if (!arg1 || !arg2)
1818 return NULL_TREE;
1820 STRIP_NOPS (arg1);
1821 STRIP_NOPS (arg2);
1823 if (TREE_CODE (arg1) == INTEGER_CST)
1824 return int_const_binop (code, arg1, arg2, notrunc);
1826 if (TREE_CODE (arg1) == REAL_CST)
1828 enum machine_mode mode;
1829 REAL_VALUE_TYPE d1;
1830 REAL_VALUE_TYPE d2;
1831 REAL_VALUE_TYPE value;
1832 REAL_VALUE_TYPE result;
1833 bool inexact;
1834 tree t, type;
1836 /* The following codes are handled by real_arithmetic. */
1837 switch (code)
1839 case PLUS_EXPR:
1840 case MINUS_EXPR:
1841 case MULT_EXPR:
1842 case RDIV_EXPR:
1843 case MIN_EXPR:
1844 case MAX_EXPR:
1845 break;
1847 default:
1848 return NULL_TREE;
1851 d1 = TREE_REAL_CST (arg1);
1852 d2 = TREE_REAL_CST (arg2);
1854 type = TREE_TYPE (arg1);
1855 mode = TYPE_MODE (type);
1857 /* Don't perform operation if we honor signaling NaNs and
1858 either operand is a NaN. */
1859 if (HONOR_SNANS (mode)
1860 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1861 return NULL_TREE;
1863 /* Don't perform operation if it would raise a division
1864 by zero exception. */
1865 if (code == RDIV_EXPR
1866 && REAL_VALUES_EQUAL (d2, dconst0)
1867 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1868 return NULL_TREE;
1870 /* If either operand is a NaN, just return it. Otherwise, set up
1871 for floating-point trap; we return an overflow. */
1872 if (REAL_VALUE_ISNAN (d1))
1873 return arg1;
1874 else if (REAL_VALUE_ISNAN (d2))
1875 return arg2;
1877 inexact = real_arithmetic (&value, code, &d1, &d2);
1878 real_convert (&result, mode, &value);
1880 /* Don't constant fold this floating point operation if
1881 the result has overflowed and flag_trapping_math. */
1882 if (flag_trapping_math
1883 && MODE_HAS_INFINITIES (mode)
1884 && REAL_VALUE_ISINF (result)
1885 && !REAL_VALUE_ISINF (d1)
1886 && !REAL_VALUE_ISINF (d2))
1887 return NULL_TREE;
1889 /* Don't constant fold this floating point operation if the
1890 result may dependent upon the run-time rounding mode and
1891 flag_rounding_math is set, or if GCC's software emulation
1892 is unable to accurately represent the result. */
1893 if ((flag_rounding_math
1894 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1895 && (inexact || !real_identical (&result, &value)))
1896 return NULL_TREE;
1898 t = build_real (type, result);
1900 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1901 return t;
1904 if (TREE_CODE (arg1) == FIXED_CST)
1906 FIXED_VALUE_TYPE f1;
1907 FIXED_VALUE_TYPE f2;
1908 FIXED_VALUE_TYPE result;
1909 tree t, type;
1910 int sat_p;
1911 bool overflow_p;
1913 /* The following codes are handled by fixed_arithmetic. */
1914 switch (code)
1916 case PLUS_EXPR:
1917 case MINUS_EXPR:
1918 case MULT_EXPR:
1919 case TRUNC_DIV_EXPR:
1920 f2 = TREE_FIXED_CST (arg2);
1921 break;
1923 case LSHIFT_EXPR:
1924 case RSHIFT_EXPR:
1925 f2.data.high = TREE_INT_CST_HIGH (arg2);
1926 f2.data.low = TREE_INT_CST_LOW (arg2);
1927 f2.mode = SImode;
1928 break;
1930 default:
1931 return NULL_TREE;
1934 f1 = TREE_FIXED_CST (arg1);
1935 type = TREE_TYPE (arg1);
1936 sat_p = TYPE_SATURATING (type);
1937 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1938 t = build_fixed (type, result);
1939 /* Propagate overflow flags. */
1940 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1942 TREE_OVERFLOW (t) = 1;
1943 TREE_CONSTANT_OVERFLOW (t) = 1;
1945 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1946 TREE_CONSTANT_OVERFLOW (t) = 1;
1947 return t;
1950 if (TREE_CODE (arg1) == COMPLEX_CST)
1952 tree type = TREE_TYPE (arg1);
1953 tree r1 = TREE_REALPART (arg1);
1954 tree i1 = TREE_IMAGPART (arg1);
1955 tree r2 = TREE_REALPART (arg2);
1956 tree i2 = TREE_IMAGPART (arg2);
1957 tree real, imag;
1959 switch (code)
1961 case PLUS_EXPR:
1962 case MINUS_EXPR:
1963 real = const_binop (code, r1, r2, notrunc);
1964 imag = const_binop (code, i1, i2, notrunc);
1965 break;
1967 case MULT_EXPR:
1968 real = const_binop (MINUS_EXPR,
1969 const_binop (MULT_EXPR, r1, r2, notrunc),
1970 const_binop (MULT_EXPR, i1, i2, notrunc),
1971 notrunc);
1972 imag = const_binop (PLUS_EXPR,
1973 const_binop (MULT_EXPR, r1, i2, notrunc),
1974 const_binop (MULT_EXPR, i1, r2, notrunc),
1975 notrunc);
1976 break;
1978 case RDIV_EXPR:
1980 tree magsquared
1981 = const_binop (PLUS_EXPR,
1982 const_binop (MULT_EXPR, r2, r2, notrunc),
1983 const_binop (MULT_EXPR, i2, i2, notrunc),
1984 notrunc);
1985 tree t1
1986 = const_binop (PLUS_EXPR,
1987 const_binop (MULT_EXPR, r1, r2, notrunc),
1988 const_binop (MULT_EXPR, i1, i2, notrunc),
1989 notrunc);
1990 tree t2
1991 = const_binop (MINUS_EXPR,
1992 const_binop (MULT_EXPR, i1, r2, notrunc),
1993 const_binop (MULT_EXPR, r1, i2, notrunc),
1994 notrunc);
1996 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1997 code = TRUNC_DIV_EXPR;
1999 real = const_binop (code, t1, magsquared, notrunc);
2000 imag = const_binop (code, t2, magsquared, notrunc);
2002 break;
2004 default:
2005 return NULL_TREE;
2008 if (real && imag)
2009 return build_complex (type, real, imag);
2012 return NULL_TREE;
2015 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2016 indicates which particular sizetype to create. */
2018 tree
2019 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2021 return build_int_cst (sizetype_tab[(int) kind], number);
2024 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2025 is a tree code. The type of the result is taken from the operands.
2026 Both must be equivalent integer types, ala int_binop_types_match_p.
2027 If the operands are constant, so is the result. */
2029 tree
2030 size_binop (enum tree_code code, tree arg0, tree arg1)
2032 tree type = TREE_TYPE (arg0);
2034 if (arg0 == error_mark_node || arg1 == error_mark_node)
2035 return error_mark_node;
2037 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2038 TREE_TYPE (arg1)));
2040 /* Handle the special case of two integer constants faster. */
2041 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2043 /* And some specific cases even faster than that. */
2044 if (code == PLUS_EXPR)
2046 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2047 return arg1;
2048 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2049 return arg0;
2051 else if (code == MINUS_EXPR)
2053 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2054 return arg0;
2056 else if (code == MULT_EXPR)
2058 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2059 return arg1;
2062 /* Handle general case of two integer constants. */
2063 return int_const_binop (code, arg0, arg1, 0);
2066 return fold_build2 (code, type, arg0, arg1);
2069 /* Given two values, either both of sizetype or both of bitsizetype,
2070 compute the difference between the two values. Return the value
2071 in signed type corresponding to the type of the operands. */
2073 tree
2074 size_diffop (tree arg0, tree arg1)
2076 tree type = TREE_TYPE (arg0);
2077 tree ctype;
2079 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2080 TREE_TYPE (arg1)));
2082 /* If the type is already signed, just do the simple thing. */
2083 if (!TYPE_UNSIGNED (type))
2084 return size_binop (MINUS_EXPR, arg0, arg1);
2086 if (type == sizetype)
2087 ctype = ssizetype;
2088 else if (type == bitsizetype)
2089 ctype = sbitsizetype;
2090 else
2091 ctype = signed_type_for (type);
2093 /* If either operand is not a constant, do the conversions to the signed
2094 type and subtract. The hardware will do the right thing with any
2095 overflow in the subtraction. */
2096 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2097 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2098 fold_convert (ctype, arg1));
2100 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2101 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2102 overflow) and negate (which can't either). Special-case a result
2103 of zero while we're here. */
2104 if (tree_int_cst_equal (arg0, arg1))
2105 return build_int_cst (ctype, 0);
2106 else if (tree_int_cst_lt (arg1, arg0))
2107 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2108 else
2109 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2110 fold_convert (ctype, size_binop (MINUS_EXPR,
2111 arg1, arg0)));
2114 /* A subroutine of fold_convert_const handling conversions of an
2115 INTEGER_CST to another integer type. */
2117 static tree
2118 fold_convert_const_int_from_int (tree type, const_tree arg1)
2120 tree t;
2122 /* Given an integer constant, make new constant with new type,
2123 appropriately sign-extended or truncated. */
2124 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2125 TREE_INT_CST_HIGH (arg1),
2126 /* Don't set the overflow when
2127 converting from a pointer, */
2128 !POINTER_TYPE_P (TREE_TYPE (arg1))
2129 /* or to a sizetype with same signedness
2130 and the precision is unchanged.
2131 ??? sizetype is always sign-extended,
2132 but its signedness depends on the
2133 frontend. Thus we see spurious overflows
2134 here if we do not check this. */
2135 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2136 == TYPE_PRECISION (type))
2137 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2138 == TYPE_UNSIGNED (type))
2139 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2140 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2141 || (TREE_CODE (type) == INTEGER_TYPE
2142 && TYPE_IS_SIZETYPE (type)))),
2143 (TREE_INT_CST_HIGH (arg1) < 0
2144 && (TYPE_UNSIGNED (type)
2145 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2146 | TREE_OVERFLOW (arg1));
2148 return t;
2151 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2152 to an integer type. */
2154 static tree
2155 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2157 int overflow = 0;
2158 tree t;
2160 /* The following code implements the floating point to integer
2161 conversion rules required by the Java Language Specification,
2162 that IEEE NaNs are mapped to zero and values that overflow
2163 the target precision saturate, i.e. values greater than
2164 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2165 are mapped to INT_MIN. These semantics are allowed by the
2166 C and C++ standards that simply state that the behavior of
2167 FP-to-integer conversion is unspecified upon overflow. */
2169 HOST_WIDE_INT high, low;
2170 REAL_VALUE_TYPE r;
2171 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2173 switch (code)
2175 case FIX_TRUNC_EXPR:
2176 real_trunc (&r, VOIDmode, &x);
2177 break;
2179 default:
2180 gcc_unreachable ();
2183 /* If R is NaN, return zero and show we have an overflow. */
2184 if (REAL_VALUE_ISNAN (r))
2186 overflow = 1;
2187 high = 0;
2188 low = 0;
2191 /* See if R is less than the lower bound or greater than the
2192 upper bound. */
2194 if (! overflow)
2196 tree lt = TYPE_MIN_VALUE (type);
2197 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2198 if (REAL_VALUES_LESS (r, l))
2200 overflow = 1;
2201 high = TREE_INT_CST_HIGH (lt);
2202 low = TREE_INT_CST_LOW (lt);
2206 if (! overflow)
2208 tree ut = TYPE_MAX_VALUE (type);
2209 if (ut)
2211 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2212 if (REAL_VALUES_LESS (u, r))
2214 overflow = 1;
2215 high = TREE_INT_CST_HIGH (ut);
2216 low = TREE_INT_CST_LOW (ut);
2221 if (! overflow)
2222 REAL_VALUE_TO_INT (&low, &high, r);
2224 t = force_fit_type_double (type, low, high, -1,
2225 overflow | TREE_OVERFLOW (arg1));
2226 return t;
2229 /* A subroutine of fold_convert_const handling conversions of a
2230 FIXED_CST to an integer type. */
2232 static tree
2233 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2235 tree t;
2236 double_int temp, temp_trunc;
2237 unsigned int mode;
2239 /* Right shift FIXED_CST to temp by fbit. */
2240 temp = TREE_FIXED_CST (arg1).data;
2241 mode = TREE_FIXED_CST (arg1).mode;
2242 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2244 lshift_double (temp.low, temp.high,
2245 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2246 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2248 /* Left shift temp to temp_trunc by fbit. */
2249 lshift_double (temp.low, temp.high,
2250 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2251 &temp_trunc.low, &temp_trunc.high,
2252 SIGNED_FIXED_POINT_MODE_P (mode));
2254 else
2256 temp.low = 0;
2257 temp.high = 0;
2258 temp_trunc.low = 0;
2259 temp_trunc.high = 0;
2262 /* If FIXED_CST is negative, we need to round the value toward 0.
2263 By checking if the fractional bits are not zero to add 1 to temp. */
2264 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2265 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2267 double_int one;
2268 one.low = 1;
2269 one.high = 0;
2270 temp = double_int_add (temp, one);
2273 /* Given a fixed-point constant, make new constant with new type,
2274 appropriately sign-extended or truncated. */
2275 t = force_fit_type_double (type, temp.low, temp.high, -1,
2276 (temp.high < 0
2277 && (TYPE_UNSIGNED (type)
2278 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2279 | TREE_OVERFLOW (arg1));
2281 return t;
2284 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2285 to another floating point type. */
2287 static tree
2288 fold_convert_const_real_from_real (tree type, const_tree arg1)
2290 REAL_VALUE_TYPE value;
2291 tree t;
2293 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2294 t = build_real (type, value);
2296 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2297 return t;
2300 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2301 to a floating point type. */
2303 static tree
2304 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2306 REAL_VALUE_TYPE value;
2307 tree t;
2309 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2310 t = build_real (type, value);
2312 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2313 TREE_CONSTANT_OVERFLOW (t)
2314 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2315 return t;
2318 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2319 to another fixed-point type. */
2321 static tree
2322 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2324 FIXED_VALUE_TYPE value;
2325 tree t;
2326 bool overflow_p;
2328 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2329 TYPE_SATURATING (type));
2330 t = build_fixed (type, value);
2332 /* Propagate overflow flags. */
2333 if (overflow_p | TREE_OVERFLOW (arg1))
2335 TREE_OVERFLOW (t) = 1;
2336 TREE_CONSTANT_OVERFLOW (t) = 1;
2338 else if (TREE_CONSTANT_OVERFLOW (arg1))
2339 TREE_CONSTANT_OVERFLOW (t) = 1;
2340 return t;
2343 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2344 to a fixed-point type. */
2346 static tree
2347 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2349 FIXED_VALUE_TYPE value;
2350 tree t;
2351 bool overflow_p;
2353 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2354 TREE_INT_CST (arg1),
2355 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2356 TYPE_SATURATING (type));
2357 t = build_fixed (type, value);
2359 /* Propagate overflow flags. */
2360 if (overflow_p | TREE_OVERFLOW (arg1))
2362 TREE_OVERFLOW (t) = 1;
2363 TREE_CONSTANT_OVERFLOW (t) = 1;
2365 else if (TREE_CONSTANT_OVERFLOW (arg1))
2366 TREE_CONSTANT_OVERFLOW (t) = 1;
2367 return t;
2370 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2371 to a fixed-point type. */
2373 static tree
2374 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2376 FIXED_VALUE_TYPE value;
2377 tree t;
2378 bool overflow_p;
2380 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2381 &TREE_REAL_CST (arg1),
2382 TYPE_SATURATING (type));
2383 t = build_fixed (type, value);
2385 /* Propagate overflow flags. */
2386 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2389 TREE_CONSTANT_OVERFLOW (t) = 1;
2391 else if (TREE_CONSTANT_OVERFLOW (arg1))
2392 TREE_CONSTANT_OVERFLOW (t) = 1;
2393 return t;
2396 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2397 type TYPE. If no simplification can be done return NULL_TREE. */
2399 static tree
2400 fold_convert_const (enum tree_code code, tree type, tree arg1)
2402 if (TREE_TYPE (arg1) == type)
2403 return arg1;
2405 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2406 || TREE_CODE (type) == OFFSET_TYPE)
2408 if (TREE_CODE (arg1) == INTEGER_CST)
2409 return fold_convert_const_int_from_int (type, arg1);
2410 else if (TREE_CODE (arg1) == REAL_CST)
2411 return fold_convert_const_int_from_real (code, type, arg1);
2412 else if (TREE_CODE (arg1) == FIXED_CST)
2413 return fold_convert_const_int_from_fixed (type, arg1);
2415 else if (TREE_CODE (type) == REAL_TYPE)
2417 if (TREE_CODE (arg1) == INTEGER_CST)
2418 return build_real_from_int_cst (type, arg1);
2419 else if (TREE_CODE (arg1) == REAL_CST)
2420 return fold_convert_const_real_from_real (type, arg1);
2421 else if (TREE_CODE (arg1) == FIXED_CST)
2422 return fold_convert_const_real_from_fixed (type, arg1);
2424 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2426 if (TREE_CODE (arg1) == FIXED_CST)
2427 return fold_convert_const_fixed_from_fixed (type, arg1);
2428 else if (TREE_CODE (arg1) == INTEGER_CST)
2429 return fold_convert_const_fixed_from_int (type, arg1);
2430 else if (TREE_CODE (arg1) == REAL_CST)
2431 return fold_convert_const_fixed_from_real (type, arg1);
2433 return NULL_TREE;
2436 /* Construct a vector of zero elements of vector type TYPE. */
2438 static tree
2439 build_zero_vector (tree type)
2441 tree elem, list;
2442 int i, units;
2444 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2445 units = TYPE_VECTOR_SUBPARTS (type);
2447 list = NULL_TREE;
2448 for (i = 0; i < units; i++)
2449 list = tree_cons (NULL_TREE, elem, list);
2450 return build_vector (type, list);
2453 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2455 bool
2456 fold_convertible_p (const_tree type, const_tree arg)
2458 tree orig = TREE_TYPE (arg);
2460 if (type == orig)
2461 return true;
2463 if (TREE_CODE (arg) == ERROR_MARK
2464 || TREE_CODE (type) == ERROR_MARK
2465 || TREE_CODE (orig) == ERROR_MARK)
2466 return false;
2468 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2469 return true;
2471 switch (TREE_CODE (type))
2473 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2474 case POINTER_TYPE: case REFERENCE_TYPE:
2475 case OFFSET_TYPE:
2476 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2477 || TREE_CODE (orig) == OFFSET_TYPE)
2478 return true;
2479 return (TREE_CODE (orig) == VECTOR_TYPE
2480 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2482 case REAL_TYPE:
2483 case FIXED_POINT_TYPE:
2484 case COMPLEX_TYPE:
2485 case VECTOR_TYPE:
2486 case VOID_TYPE:
2487 return TREE_CODE (type) == TREE_CODE (orig);
2489 default:
2490 return false;
2494 /* Convert expression ARG to type TYPE. Used by the middle-end for
2495 simple conversions in preference to calling the front-end's convert. */
2497 tree
2498 fold_convert (tree type, tree arg)
2500 tree orig = TREE_TYPE (arg);
2501 tree tem;
2503 if (type == orig)
2504 return arg;
2506 if (TREE_CODE (arg) == ERROR_MARK
2507 || TREE_CODE (type) == ERROR_MARK
2508 || TREE_CODE (orig) == ERROR_MARK)
2509 return error_mark_node;
2511 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2512 return fold_build1 (NOP_EXPR, type, arg);
2514 switch (TREE_CODE (type))
2516 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2517 case POINTER_TYPE: case REFERENCE_TYPE:
2518 case OFFSET_TYPE:
2519 if (TREE_CODE (arg) == INTEGER_CST)
2521 tem = fold_convert_const (NOP_EXPR, type, arg);
2522 if (tem != NULL_TREE)
2523 return tem;
2525 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2526 || TREE_CODE (orig) == OFFSET_TYPE)
2527 return fold_build1 (NOP_EXPR, type, arg);
2528 if (TREE_CODE (orig) == COMPLEX_TYPE)
2530 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2531 return fold_convert (type, tem);
2533 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2534 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2535 return fold_build1 (NOP_EXPR, type, arg);
2537 case REAL_TYPE:
2538 if (TREE_CODE (arg) == INTEGER_CST)
2540 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2542 return tem;
2544 else if (TREE_CODE (arg) == REAL_CST)
2546 tem = fold_convert_const (NOP_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2548 return tem;
2550 else if (TREE_CODE (arg) == FIXED_CST)
2552 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2553 if (tem != NULL_TREE)
2554 return tem;
2557 switch (TREE_CODE (orig))
2559 case INTEGER_TYPE:
2560 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2561 case POINTER_TYPE: case REFERENCE_TYPE:
2562 return fold_build1 (FLOAT_EXPR, type, arg);
2564 case REAL_TYPE:
2565 return fold_build1 (NOP_EXPR, type, arg);
2567 case FIXED_POINT_TYPE:
2568 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2570 case COMPLEX_TYPE:
2571 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2572 return fold_convert (type, tem);
2574 default:
2575 gcc_unreachable ();
2578 case FIXED_POINT_TYPE:
2579 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2580 || TREE_CODE (arg) == REAL_CST)
2582 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2583 if (tem != NULL_TREE)
2584 return tem;
2587 switch (TREE_CODE (orig))
2589 case FIXED_POINT_TYPE:
2590 case INTEGER_TYPE:
2591 case ENUMERAL_TYPE:
2592 case BOOLEAN_TYPE:
2593 case REAL_TYPE:
2594 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2596 case COMPLEX_TYPE:
2597 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2598 return fold_convert (type, tem);
2600 default:
2601 gcc_unreachable ();
2604 case COMPLEX_TYPE:
2605 switch (TREE_CODE (orig))
2607 case INTEGER_TYPE:
2608 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2609 case POINTER_TYPE: case REFERENCE_TYPE:
2610 case REAL_TYPE:
2611 case FIXED_POINT_TYPE:
2612 return build2 (COMPLEX_EXPR, type,
2613 fold_convert (TREE_TYPE (type), arg),
2614 fold_convert (TREE_TYPE (type), integer_zero_node));
2615 case COMPLEX_TYPE:
2617 tree rpart, ipart;
2619 if (TREE_CODE (arg) == COMPLEX_EXPR)
2621 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2622 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2623 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2626 arg = save_expr (arg);
2627 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2628 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2629 rpart = fold_convert (TREE_TYPE (type), rpart);
2630 ipart = fold_convert (TREE_TYPE (type), ipart);
2631 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2634 default:
2635 gcc_unreachable ();
2638 case VECTOR_TYPE:
2639 if (integer_zerop (arg))
2640 return build_zero_vector (type);
2641 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2642 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2643 || TREE_CODE (orig) == VECTOR_TYPE);
2644 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2646 case VOID_TYPE:
2647 tem = fold_ignored_result (arg);
2648 if (TREE_CODE (tem) == MODIFY_EXPR)
2649 return tem;
2650 return fold_build1 (NOP_EXPR, type, tem);
2652 default:
2653 gcc_unreachable ();
2657 /* Return false if expr can be assumed not to be an lvalue, true
2658 otherwise. */
2660 static bool
2661 maybe_lvalue_p (const_tree x)
2663 /* We only need to wrap lvalue tree codes. */
2664 switch (TREE_CODE (x))
2666 case VAR_DECL:
2667 case PARM_DECL:
2668 case RESULT_DECL:
2669 case LABEL_DECL:
2670 case FUNCTION_DECL:
2671 case SSA_NAME:
2673 case COMPONENT_REF:
2674 case INDIRECT_REF:
2675 case ALIGN_INDIRECT_REF:
2676 case MISALIGNED_INDIRECT_REF:
2677 case ARRAY_REF:
2678 case ARRAY_RANGE_REF:
2679 case BIT_FIELD_REF:
2680 case OBJ_TYPE_REF:
2682 case REALPART_EXPR:
2683 case IMAGPART_EXPR:
2684 case PREINCREMENT_EXPR:
2685 case PREDECREMENT_EXPR:
2686 case SAVE_EXPR:
2687 case TRY_CATCH_EXPR:
2688 case WITH_CLEANUP_EXPR:
2689 case COMPOUND_EXPR:
2690 case MODIFY_EXPR:
2691 case TARGET_EXPR:
2692 case COND_EXPR:
2693 case BIND_EXPR:
2694 case MIN_EXPR:
2695 case MAX_EXPR:
2696 break;
2698 default:
2699 /* Assume the worst for front-end tree codes. */
2700 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2701 break;
2702 return false;
2705 return true;
2708 /* Return an expr equal to X but certainly not valid as an lvalue. */
2710 tree
2711 non_lvalue (tree x)
2713 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2714 us. */
2715 if (in_gimple_form)
2716 return x;
2718 if (! maybe_lvalue_p (x))
2719 return x;
2720 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2723 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2724 Zero means allow extended lvalues. */
2726 int pedantic_lvalues;
2728 /* When pedantic, return an expr equal to X but certainly not valid as a
2729 pedantic lvalue. Otherwise, return X. */
2731 static tree
2732 pedantic_non_lvalue (tree x)
2734 if (pedantic_lvalues)
2735 return non_lvalue (x);
2736 else
2737 return x;
2740 /* Given a tree comparison code, return the code that is the logical inverse
2741 of the given code. It is not safe to do this for floating-point
2742 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2743 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2745 enum tree_code
2746 invert_tree_comparison (enum tree_code code, bool honor_nans)
2748 if (honor_nans && flag_trapping_math)
2749 return ERROR_MARK;
2751 switch (code)
2753 case EQ_EXPR:
2754 return NE_EXPR;
2755 case NE_EXPR:
2756 return EQ_EXPR;
2757 case GT_EXPR:
2758 return honor_nans ? UNLE_EXPR : LE_EXPR;
2759 case GE_EXPR:
2760 return honor_nans ? UNLT_EXPR : LT_EXPR;
2761 case LT_EXPR:
2762 return honor_nans ? UNGE_EXPR : GE_EXPR;
2763 case LE_EXPR:
2764 return honor_nans ? UNGT_EXPR : GT_EXPR;
2765 case LTGT_EXPR:
2766 return UNEQ_EXPR;
2767 case UNEQ_EXPR:
2768 return LTGT_EXPR;
2769 case UNGT_EXPR:
2770 return LE_EXPR;
2771 case UNGE_EXPR:
2772 return LT_EXPR;
2773 case UNLT_EXPR:
2774 return GE_EXPR;
2775 case UNLE_EXPR:
2776 return GT_EXPR;
2777 case ORDERED_EXPR:
2778 return UNORDERED_EXPR;
2779 case UNORDERED_EXPR:
2780 return ORDERED_EXPR;
2781 default:
2782 gcc_unreachable ();
2786 /* Similar, but return the comparison that results if the operands are
2787 swapped. This is safe for floating-point. */
2789 enum tree_code
2790 swap_tree_comparison (enum tree_code code)
2792 switch (code)
2794 case EQ_EXPR:
2795 case NE_EXPR:
2796 case ORDERED_EXPR:
2797 case UNORDERED_EXPR:
2798 case LTGT_EXPR:
2799 case UNEQ_EXPR:
2800 return code;
2801 case GT_EXPR:
2802 return LT_EXPR;
2803 case GE_EXPR:
2804 return LE_EXPR;
2805 case LT_EXPR:
2806 return GT_EXPR;
2807 case LE_EXPR:
2808 return GE_EXPR;
2809 case UNGT_EXPR:
2810 return UNLT_EXPR;
2811 case UNGE_EXPR:
2812 return UNLE_EXPR;
2813 case UNLT_EXPR:
2814 return UNGT_EXPR;
2815 case UNLE_EXPR:
2816 return UNGE_EXPR;
2817 default:
2818 gcc_unreachable ();
2823 /* Convert a comparison tree code from an enum tree_code representation
2824 into a compcode bit-based encoding. This function is the inverse of
2825 compcode_to_comparison. */
2827 static enum comparison_code
2828 comparison_to_compcode (enum tree_code code)
2830 switch (code)
2832 case LT_EXPR:
2833 return COMPCODE_LT;
2834 case EQ_EXPR:
2835 return COMPCODE_EQ;
2836 case LE_EXPR:
2837 return COMPCODE_LE;
2838 case GT_EXPR:
2839 return COMPCODE_GT;
2840 case NE_EXPR:
2841 return COMPCODE_NE;
2842 case GE_EXPR:
2843 return COMPCODE_GE;
2844 case ORDERED_EXPR:
2845 return COMPCODE_ORD;
2846 case UNORDERED_EXPR:
2847 return COMPCODE_UNORD;
2848 case UNLT_EXPR:
2849 return COMPCODE_UNLT;
2850 case UNEQ_EXPR:
2851 return COMPCODE_UNEQ;
2852 case UNLE_EXPR:
2853 return COMPCODE_UNLE;
2854 case UNGT_EXPR:
2855 return COMPCODE_UNGT;
2856 case LTGT_EXPR:
2857 return COMPCODE_LTGT;
2858 case UNGE_EXPR:
2859 return COMPCODE_UNGE;
2860 default:
2861 gcc_unreachable ();
2865 /* Convert a compcode bit-based encoding of a comparison operator back
2866 to GCC's enum tree_code representation. This function is the
2867 inverse of comparison_to_compcode. */
2869 static enum tree_code
2870 compcode_to_comparison (enum comparison_code code)
2872 switch (code)
2874 case COMPCODE_LT:
2875 return LT_EXPR;
2876 case COMPCODE_EQ:
2877 return EQ_EXPR;
2878 case COMPCODE_LE:
2879 return LE_EXPR;
2880 case COMPCODE_GT:
2881 return GT_EXPR;
2882 case COMPCODE_NE:
2883 return NE_EXPR;
2884 case COMPCODE_GE:
2885 return GE_EXPR;
2886 case COMPCODE_ORD:
2887 return ORDERED_EXPR;
2888 case COMPCODE_UNORD:
2889 return UNORDERED_EXPR;
2890 case COMPCODE_UNLT:
2891 return UNLT_EXPR;
2892 case COMPCODE_UNEQ:
2893 return UNEQ_EXPR;
2894 case COMPCODE_UNLE:
2895 return UNLE_EXPR;
2896 case COMPCODE_UNGT:
2897 return UNGT_EXPR;
2898 case COMPCODE_LTGT:
2899 return LTGT_EXPR;
2900 case COMPCODE_UNGE:
2901 return UNGE_EXPR;
2902 default:
2903 gcc_unreachable ();
2907 /* Return a tree for the comparison which is the combination of
2908 doing the AND or OR (depending on CODE) of the two operations LCODE
2909 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2910 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2911 if this makes the transformation invalid. */
2913 tree
2914 combine_comparisons (enum tree_code code, enum tree_code lcode,
2915 enum tree_code rcode, tree truth_type,
2916 tree ll_arg, tree lr_arg)
2918 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2919 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2920 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2921 enum comparison_code compcode;
2923 switch (code)
2925 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2926 compcode = lcompcode & rcompcode;
2927 break;
2929 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2930 compcode = lcompcode | rcompcode;
2931 break;
2933 default:
2934 return NULL_TREE;
2937 if (!honor_nans)
2939 /* Eliminate unordered comparisons, as well as LTGT and ORD
2940 which are not used unless the mode has NaNs. */
2941 compcode &= ~COMPCODE_UNORD;
2942 if (compcode == COMPCODE_LTGT)
2943 compcode = COMPCODE_NE;
2944 else if (compcode == COMPCODE_ORD)
2945 compcode = COMPCODE_TRUE;
2947 else if (flag_trapping_math)
2949 /* Check that the original operation and the optimized ones will trap
2950 under the same condition. */
2951 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2952 && (lcompcode != COMPCODE_EQ)
2953 && (lcompcode != COMPCODE_ORD);
2954 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2955 && (rcompcode != COMPCODE_EQ)
2956 && (rcompcode != COMPCODE_ORD);
2957 bool trap = (compcode & COMPCODE_UNORD) == 0
2958 && (compcode != COMPCODE_EQ)
2959 && (compcode != COMPCODE_ORD);
2961 /* In a short-circuited boolean expression the LHS might be
2962 such that the RHS, if evaluated, will never trap. For
2963 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2964 if neither x nor y is NaN. (This is a mixed blessing: for
2965 example, the expression above will never trap, hence
2966 optimizing it to x < y would be invalid). */
2967 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2968 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2969 rtrap = false;
2971 /* If the comparison was short-circuited, and only the RHS
2972 trapped, we may now generate a spurious trap. */
2973 if (rtrap && !ltrap
2974 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2975 return NULL_TREE;
2977 /* If we changed the conditions that cause a trap, we lose. */
2978 if ((ltrap || rtrap) != trap)
2979 return NULL_TREE;
2982 if (compcode == COMPCODE_TRUE)
2983 return constant_boolean_node (true, truth_type);
2984 else if (compcode == COMPCODE_FALSE)
2985 return constant_boolean_node (false, truth_type);
2986 else
2987 return fold_build2 (compcode_to_comparison (compcode),
2988 truth_type, ll_arg, lr_arg);
2991 /* Return nonzero if two operands (typically of the same tree node)
2992 are necessarily equal. If either argument has side-effects this
2993 function returns zero. FLAGS modifies behavior as follows:
2995 If OEP_ONLY_CONST is set, only return nonzero for constants.
2996 This function tests whether the operands are indistinguishable;
2997 it does not test whether they are equal using C's == operation.
2998 The distinction is important for IEEE floating point, because
2999 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3000 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3002 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3003 even though it may hold multiple values during a function.
3004 This is because a GCC tree node guarantees that nothing else is
3005 executed between the evaluation of its "operands" (which may often
3006 be evaluated in arbitrary order). Hence if the operands themselves
3007 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3008 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3009 unset means assuming isochronic (or instantaneous) tree equivalence.
3010 Unless comparing arbitrary expression trees, such as from different
3011 statements, this flag can usually be left unset.
3013 If OEP_PURE_SAME is set, then pure functions with identical arguments
3014 are considered the same. It is used when the caller has other ways
3015 to ensure that global memory is unchanged in between. */
3018 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3020 /* If either is ERROR_MARK, they aren't equal. */
3021 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3022 return 0;
3024 /* Check equality of integer constants before bailing out due to
3025 precision differences. */
3026 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3027 return tree_int_cst_equal (arg0, arg1);
3029 /* If both types don't have the same signedness, then we can't consider
3030 them equal. We must check this before the STRIP_NOPS calls
3031 because they may change the signedness of the arguments. As pointers
3032 strictly don't have a signedness, require either two pointers or
3033 two non-pointers as well. */
3034 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3035 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3036 return 0;
3038 /* If both types don't have the same precision, then it is not safe
3039 to strip NOPs. */
3040 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3041 return 0;
3043 STRIP_NOPS (arg0);
3044 STRIP_NOPS (arg1);
3046 /* In case both args are comparisons but with different comparison
3047 code, try to swap the comparison operands of one arg to produce
3048 a match and compare that variant. */
3049 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3050 && COMPARISON_CLASS_P (arg0)
3051 && COMPARISON_CLASS_P (arg1))
3053 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3055 if (TREE_CODE (arg0) == swap_code)
3056 return operand_equal_p (TREE_OPERAND (arg0, 0),
3057 TREE_OPERAND (arg1, 1), flags)
3058 && operand_equal_p (TREE_OPERAND (arg0, 1),
3059 TREE_OPERAND (arg1, 0), flags);
3062 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3063 /* This is needed for conversions and for COMPONENT_REF.
3064 Might as well play it safe and always test this. */
3065 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3066 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3067 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3068 return 0;
3070 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3071 We don't care about side effects in that case because the SAVE_EXPR
3072 takes care of that for us. In all other cases, two expressions are
3073 equal if they have no side effects. If we have two identical
3074 expressions with side effects that should be treated the same due
3075 to the only side effects being identical SAVE_EXPR's, that will
3076 be detected in the recursive calls below. */
3077 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3078 && (TREE_CODE (arg0) == SAVE_EXPR
3079 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3080 return 1;
3082 /* Next handle constant cases, those for which we can return 1 even
3083 if ONLY_CONST is set. */
3084 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3085 switch (TREE_CODE (arg0))
3087 case INTEGER_CST:
3088 return tree_int_cst_equal (arg0, arg1);
3090 case FIXED_CST:
3091 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3092 TREE_FIXED_CST (arg1));
3094 case REAL_CST:
3095 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3096 TREE_REAL_CST (arg1)))
3097 return 1;
3100 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3102 /* If we do not distinguish between signed and unsigned zero,
3103 consider them equal. */
3104 if (real_zerop (arg0) && real_zerop (arg1))
3105 return 1;
3107 return 0;
3109 case VECTOR_CST:
3111 tree v1, v2;
3113 v1 = TREE_VECTOR_CST_ELTS (arg0);
3114 v2 = TREE_VECTOR_CST_ELTS (arg1);
3115 while (v1 && v2)
3117 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3118 flags))
3119 return 0;
3120 v1 = TREE_CHAIN (v1);
3121 v2 = TREE_CHAIN (v2);
3124 return v1 == v2;
3127 case COMPLEX_CST:
3128 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3129 flags)
3130 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3131 flags));
3133 case STRING_CST:
3134 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3135 && ! memcmp (TREE_STRING_POINTER (arg0),
3136 TREE_STRING_POINTER (arg1),
3137 TREE_STRING_LENGTH (arg0)));
3139 case ADDR_EXPR:
3140 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3142 default:
3143 break;
3146 if (flags & OEP_ONLY_CONST)
3147 return 0;
3149 /* Define macros to test an operand from arg0 and arg1 for equality and a
3150 variant that allows null and views null as being different from any
3151 non-null value. In the latter case, if either is null, the both
3152 must be; otherwise, do the normal comparison. */
3153 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3154 TREE_OPERAND (arg1, N), flags)
3156 #define OP_SAME_WITH_NULL(N) \
3157 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3158 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3160 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3162 case tcc_unary:
3163 /* Two conversions are equal only if signedness and modes match. */
3164 switch (TREE_CODE (arg0))
3166 CASE_CONVERT:
3167 case FIX_TRUNC_EXPR:
3168 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3169 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3170 return 0;
3171 break;
3172 default:
3173 break;
3176 return OP_SAME (0);
3179 case tcc_comparison:
3180 case tcc_binary:
3181 if (OP_SAME (0) && OP_SAME (1))
3182 return 1;
3184 /* For commutative ops, allow the other order. */
3185 return (commutative_tree_code (TREE_CODE (arg0))
3186 && operand_equal_p (TREE_OPERAND (arg0, 0),
3187 TREE_OPERAND (arg1, 1), flags)
3188 && operand_equal_p (TREE_OPERAND (arg0, 1),
3189 TREE_OPERAND (arg1, 0), flags));
3191 case tcc_reference:
3192 /* If either of the pointer (or reference) expressions we are
3193 dereferencing contain a side effect, these cannot be equal. */
3194 if (TREE_SIDE_EFFECTS (arg0)
3195 || TREE_SIDE_EFFECTS (arg1))
3196 return 0;
3198 switch (TREE_CODE (arg0))
3200 case INDIRECT_REF:
3201 case ALIGN_INDIRECT_REF:
3202 case MISALIGNED_INDIRECT_REF:
3203 case REALPART_EXPR:
3204 case IMAGPART_EXPR:
3205 return OP_SAME (0);
3207 case ARRAY_REF:
3208 case ARRAY_RANGE_REF:
3209 /* Operands 2 and 3 may be null.
3210 Compare the array index by value if it is constant first as we
3211 may have different types but same value here. */
3212 return (OP_SAME (0)
3213 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3214 TREE_OPERAND (arg1, 1))
3215 || OP_SAME (1))
3216 && OP_SAME_WITH_NULL (2)
3217 && OP_SAME_WITH_NULL (3));
3219 case COMPONENT_REF:
3220 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3221 may be NULL when we're called to compare MEM_EXPRs. */
3222 return OP_SAME_WITH_NULL (0)
3223 && OP_SAME (1)
3224 && OP_SAME_WITH_NULL (2);
3226 case BIT_FIELD_REF:
3227 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3229 default:
3230 return 0;
3233 case tcc_expression:
3234 switch (TREE_CODE (arg0))
3236 case ADDR_EXPR:
3237 case TRUTH_NOT_EXPR:
3238 return OP_SAME (0);
3240 case TRUTH_ANDIF_EXPR:
3241 case TRUTH_ORIF_EXPR:
3242 return OP_SAME (0) && OP_SAME (1);
3244 case TRUTH_AND_EXPR:
3245 case TRUTH_OR_EXPR:
3246 case TRUTH_XOR_EXPR:
3247 if (OP_SAME (0) && OP_SAME (1))
3248 return 1;
3250 /* Otherwise take into account this is a commutative operation. */
3251 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3252 TREE_OPERAND (arg1, 1), flags)
3253 && operand_equal_p (TREE_OPERAND (arg0, 1),
3254 TREE_OPERAND (arg1, 0), flags));
3256 case COND_EXPR:
3257 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3259 default:
3260 return 0;
3263 case tcc_vl_exp:
3264 switch (TREE_CODE (arg0))
3266 case CALL_EXPR:
3267 /* If the CALL_EXPRs call different functions, then they
3268 clearly can not be equal. */
3269 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3270 flags))
3271 return 0;
3274 unsigned int cef = call_expr_flags (arg0);
3275 if (flags & OEP_PURE_SAME)
3276 cef &= ECF_CONST | ECF_PURE;
3277 else
3278 cef &= ECF_CONST;
3279 if (!cef)
3280 return 0;
3283 /* Now see if all the arguments are the same. */
3285 const_call_expr_arg_iterator iter0, iter1;
3286 const_tree a0, a1;
3287 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3288 a1 = first_const_call_expr_arg (arg1, &iter1);
3289 a0 && a1;
3290 a0 = next_const_call_expr_arg (&iter0),
3291 a1 = next_const_call_expr_arg (&iter1))
3292 if (! operand_equal_p (a0, a1, flags))
3293 return 0;
3295 /* If we get here and both argument lists are exhausted
3296 then the CALL_EXPRs are equal. */
3297 return ! (a0 || a1);
3299 default:
3300 return 0;
3303 case tcc_declaration:
3304 /* Consider __builtin_sqrt equal to sqrt. */
3305 return (TREE_CODE (arg0) == FUNCTION_DECL
3306 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3307 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3308 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3310 default:
3311 return 0;
3314 #undef OP_SAME
3315 #undef OP_SAME_WITH_NULL
3318 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3319 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3321 When in doubt, return 0. */
3323 static int
3324 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3326 int unsignedp1, unsignedpo;
3327 tree primarg0, primarg1, primother;
3328 unsigned int correct_width;
3330 if (operand_equal_p (arg0, arg1, 0))
3331 return 1;
3333 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3334 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3335 return 0;
3337 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3338 and see if the inner values are the same. This removes any
3339 signedness comparison, which doesn't matter here. */
3340 primarg0 = arg0, primarg1 = arg1;
3341 STRIP_NOPS (primarg0);
3342 STRIP_NOPS (primarg1);
3343 if (operand_equal_p (primarg0, primarg1, 0))
3344 return 1;
3346 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3347 actual comparison operand, ARG0.
3349 First throw away any conversions to wider types
3350 already present in the operands. */
3352 primarg1 = get_narrower (arg1, &unsignedp1);
3353 primother = get_narrower (other, &unsignedpo);
3355 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3356 if (unsignedp1 == unsignedpo
3357 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3358 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3360 tree type = TREE_TYPE (arg0);
3362 /* Make sure shorter operand is extended the right way
3363 to match the longer operand. */
3364 primarg1 = fold_convert (signed_or_unsigned_type_for
3365 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3367 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3368 return 1;
3371 return 0;
3374 /* See if ARG is an expression that is either a comparison or is performing
3375 arithmetic on comparisons. The comparisons must only be comparing
3376 two different values, which will be stored in *CVAL1 and *CVAL2; if
3377 they are nonzero it means that some operands have already been found.
3378 No variables may be used anywhere else in the expression except in the
3379 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3380 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3382 If this is true, return 1. Otherwise, return zero. */
3384 static int
3385 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3387 enum tree_code code = TREE_CODE (arg);
3388 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3390 /* We can handle some of the tcc_expression cases here. */
3391 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3392 tclass = tcc_unary;
3393 else if (tclass == tcc_expression
3394 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3395 || code == COMPOUND_EXPR))
3396 tclass = tcc_binary;
3398 else if (tclass == tcc_expression && code == SAVE_EXPR
3399 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3401 /* If we've already found a CVAL1 or CVAL2, this expression is
3402 two complex to handle. */
3403 if (*cval1 || *cval2)
3404 return 0;
3406 tclass = tcc_unary;
3407 *save_p = 1;
3410 switch (tclass)
3412 case tcc_unary:
3413 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3415 case tcc_binary:
3416 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3417 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3418 cval1, cval2, save_p));
3420 case tcc_constant:
3421 return 1;
3423 case tcc_expression:
3424 if (code == COND_EXPR)
3425 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3426 cval1, cval2, save_p)
3427 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3428 cval1, cval2, save_p)
3429 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3430 cval1, cval2, save_p));
3431 return 0;
3433 case tcc_comparison:
3434 /* First see if we can handle the first operand, then the second. For
3435 the second operand, we know *CVAL1 can't be zero. It must be that
3436 one side of the comparison is each of the values; test for the
3437 case where this isn't true by failing if the two operands
3438 are the same. */
3440 if (operand_equal_p (TREE_OPERAND (arg, 0),
3441 TREE_OPERAND (arg, 1), 0))
3442 return 0;
3444 if (*cval1 == 0)
3445 *cval1 = TREE_OPERAND (arg, 0);
3446 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3448 else if (*cval2 == 0)
3449 *cval2 = TREE_OPERAND (arg, 0);
3450 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3452 else
3453 return 0;
3455 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3457 else if (*cval2 == 0)
3458 *cval2 = TREE_OPERAND (arg, 1);
3459 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3461 else
3462 return 0;
3464 return 1;
3466 default:
3467 return 0;
3471 /* ARG is a tree that is known to contain just arithmetic operations and
3472 comparisons. Evaluate the operations in the tree substituting NEW0 for
3473 any occurrence of OLD0 as an operand of a comparison and likewise for
3474 NEW1 and OLD1. */
3476 static tree
3477 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3479 tree type = TREE_TYPE (arg);
3480 enum tree_code code = TREE_CODE (arg);
3481 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3483 /* We can handle some of the tcc_expression cases here. */
3484 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3485 tclass = tcc_unary;
3486 else if (tclass == tcc_expression
3487 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3488 tclass = tcc_binary;
3490 switch (tclass)
3492 case tcc_unary:
3493 return fold_build1 (code, type,
3494 eval_subst (TREE_OPERAND (arg, 0),
3495 old0, new0, old1, new1));
3497 case tcc_binary:
3498 return fold_build2 (code, type,
3499 eval_subst (TREE_OPERAND (arg, 0),
3500 old0, new0, old1, new1),
3501 eval_subst (TREE_OPERAND (arg, 1),
3502 old0, new0, old1, new1));
3504 case tcc_expression:
3505 switch (code)
3507 case SAVE_EXPR:
3508 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3510 case COMPOUND_EXPR:
3511 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3513 case COND_EXPR:
3514 return fold_build3 (code, type,
3515 eval_subst (TREE_OPERAND (arg, 0),
3516 old0, new0, old1, new1),
3517 eval_subst (TREE_OPERAND (arg, 1),
3518 old0, new0, old1, new1),
3519 eval_subst (TREE_OPERAND (arg, 2),
3520 old0, new0, old1, new1));
3521 default:
3522 break;
3524 /* Fall through - ??? */
3526 case tcc_comparison:
3528 tree arg0 = TREE_OPERAND (arg, 0);
3529 tree arg1 = TREE_OPERAND (arg, 1);
3531 /* We need to check both for exact equality and tree equality. The
3532 former will be true if the operand has a side-effect. In that
3533 case, we know the operand occurred exactly once. */
3535 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3536 arg0 = new0;
3537 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3538 arg0 = new1;
3540 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3541 arg1 = new0;
3542 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3543 arg1 = new1;
3545 return fold_build2 (code, type, arg0, arg1);
3548 default:
3549 return arg;
3553 /* Return a tree for the case when the result of an expression is RESULT
3554 converted to TYPE and OMITTED was previously an operand of the expression
3555 but is now not needed (e.g., we folded OMITTED * 0).
3557 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3558 the conversion of RESULT to TYPE. */
3560 tree
3561 omit_one_operand (tree type, tree result, tree omitted)
3563 tree t = fold_convert (type, result);
3565 /* If the resulting operand is an empty statement, just return the omitted
3566 statement casted to void. */
3567 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3568 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3570 if (TREE_SIDE_EFFECTS (omitted))
3571 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3573 return non_lvalue (t);
3576 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3578 static tree
3579 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3581 tree t = fold_convert (type, result);
3583 /* If the resulting operand is an empty statement, just return the omitted
3584 statement casted to void. */
3585 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3586 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3588 if (TREE_SIDE_EFFECTS (omitted))
3589 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3591 return pedantic_non_lvalue (t);
3594 /* Return a tree for the case when the result of an expression is RESULT
3595 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3596 of the expression but are now not needed.
3598 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3599 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3600 evaluated before OMITTED2. Otherwise, if neither has side effects,
3601 just do the conversion of RESULT to TYPE. */
3603 tree
3604 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3606 tree t = fold_convert (type, result);
3608 if (TREE_SIDE_EFFECTS (omitted2))
3609 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3610 if (TREE_SIDE_EFFECTS (omitted1))
3611 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3613 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3617 /* Return a simplified tree node for the truth-negation of ARG. This
3618 never alters ARG itself. We assume that ARG is an operation that
3619 returns a truth value (0 or 1).
3621 FIXME: one would think we would fold the result, but it causes
3622 problems with the dominator optimizer. */
3624 tree
3625 fold_truth_not_expr (tree arg)
3627 tree type = TREE_TYPE (arg);
3628 enum tree_code code = TREE_CODE (arg);
3630 /* If this is a comparison, we can simply invert it, except for
3631 floating-point non-equality comparisons, in which case we just
3632 enclose a TRUTH_NOT_EXPR around what we have. */
3634 if (TREE_CODE_CLASS (code) == tcc_comparison)
3636 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3637 if (FLOAT_TYPE_P (op_type)
3638 && flag_trapping_math
3639 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3640 && code != NE_EXPR && code != EQ_EXPR)
3641 return NULL_TREE;
3642 else
3644 code = invert_tree_comparison (code,
3645 HONOR_NANS (TYPE_MODE (op_type)));
3646 if (code == ERROR_MARK)
3647 return NULL_TREE;
3648 else
3649 return build2 (code, type,
3650 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3654 switch (code)
3656 case INTEGER_CST:
3657 return constant_boolean_node (integer_zerop (arg), type);
3659 case TRUTH_AND_EXPR:
3660 return build2 (TRUTH_OR_EXPR, type,
3661 invert_truthvalue (TREE_OPERAND (arg, 0)),
3662 invert_truthvalue (TREE_OPERAND (arg, 1)));
3664 case TRUTH_OR_EXPR:
3665 return build2 (TRUTH_AND_EXPR, type,
3666 invert_truthvalue (TREE_OPERAND (arg, 0)),
3667 invert_truthvalue (TREE_OPERAND (arg, 1)));
3669 case TRUTH_XOR_EXPR:
3670 /* Here we can invert either operand. We invert the first operand
3671 unless the second operand is a TRUTH_NOT_EXPR in which case our
3672 result is the XOR of the first operand with the inside of the
3673 negation of the second operand. */
3675 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3676 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3677 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3678 else
3679 return build2 (TRUTH_XOR_EXPR, type,
3680 invert_truthvalue (TREE_OPERAND (arg, 0)),
3681 TREE_OPERAND (arg, 1));
3683 case TRUTH_ANDIF_EXPR:
3684 return build2 (TRUTH_ORIF_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 invert_truthvalue (TREE_OPERAND (arg, 1)));
3688 case TRUTH_ORIF_EXPR:
3689 return build2 (TRUTH_ANDIF_EXPR, type,
3690 invert_truthvalue (TREE_OPERAND (arg, 0)),
3691 invert_truthvalue (TREE_OPERAND (arg, 1)));
3693 case TRUTH_NOT_EXPR:
3694 return TREE_OPERAND (arg, 0);
3696 case COND_EXPR:
3698 tree arg1 = TREE_OPERAND (arg, 1);
3699 tree arg2 = TREE_OPERAND (arg, 2);
3700 /* A COND_EXPR may have a throw as one operand, which
3701 then has void type. Just leave void operands
3702 as they are. */
3703 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3704 VOID_TYPE_P (TREE_TYPE (arg1))
3705 ? arg1 : invert_truthvalue (arg1),
3706 VOID_TYPE_P (TREE_TYPE (arg2))
3707 ? arg2 : invert_truthvalue (arg2));
3710 case COMPOUND_EXPR:
3711 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3712 invert_truthvalue (TREE_OPERAND (arg, 1)));
3714 case NON_LVALUE_EXPR:
3715 return invert_truthvalue (TREE_OPERAND (arg, 0));
3717 case NOP_EXPR:
3718 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3719 return build1 (TRUTH_NOT_EXPR, type, arg);
3721 case CONVERT_EXPR:
3722 case FLOAT_EXPR:
3723 return build1 (TREE_CODE (arg), type,
3724 invert_truthvalue (TREE_OPERAND (arg, 0)));
3726 case BIT_AND_EXPR:
3727 if (!integer_onep (TREE_OPERAND (arg, 1)))
3728 break;
3729 return build2 (EQ_EXPR, type, arg,
3730 build_int_cst (type, 0));
3732 case SAVE_EXPR:
3733 return build1 (TRUTH_NOT_EXPR, type, arg);
3735 case CLEANUP_POINT_EXPR:
3736 return build1 (CLEANUP_POINT_EXPR, type,
3737 invert_truthvalue (TREE_OPERAND (arg, 0)));
3739 default:
3740 break;
3743 return NULL_TREE;
3746 /* Return a simplified tree node for the truth-negation of ARG. This
3747 never alters ARG itself. We assume that ARG is an operation that
3748 returns a truth value (0 or 1).
3750 FIXME: one would think we would fold the result, but it causes
3751 problems with the dominator optimizer. */
3753 tree
3754 invert_truthvalue (tree arg)
3756 tree tem;
3758 if (TREE_CODE (arg) == ERROR_MARK)
3759 return arg;
3761 tem = fold_truth_not_expr (arg);
3762 if (!tem)
3763 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3765 return tem;
3768 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3769 operands are another bit-wise operation with a common input. If so,
3770 distribute the bit operations to save an operation and possibly two if
3771 constants are involved. For example, convert
3772 (A | B) & (A | C) into A | (B & C)
3773 Further simplification will occur if B and C are constants.
3775 If this optimization cannot be done, 0 will be returned. */
3777 static tree
3778 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3780 tree common;
3781 tree left, right;
3783 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3784 || TREE_CODE (arg0) == code
3785 || (TREE_CODE (arg0) != BIT_AND_EXPR
3786 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3787 return 0;
3789 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3791 common = TREE_OPERAND (arg0, 0);
3792 left = TREE_OPERAND (arg0, 1);
3793 right = TREE_OPERAND (arg1, 1);
3795 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3797 common = TREE_OPERAND (arg0, 0);
3798 left = TREE_OPERAND (arg0, 1);
3799 right = TREE_OPERAND (arg1, 0);
3801 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3803 common = TREE_OPERAND (arg0, 1);
3804 left = TREE_OPERAND (arg0, 0);
3805 right = TREE_OPERAND (arg1, 1);
3807 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3809 common = TREE_OPERAND (arg0, 1);
3810 left = TREE_OPERAND (arg0, 0);
3811 right = TREE_OPERAND (arg1, 0);
3813 else
3814 return 0;
3816 common = fold_convert (type, common);
3817 left = fold_convert (type, left);
3818 right = fold_convert (type, right);
3819 return fold_build2 (TREE_CODE (arg0), type, common,
3820 fold_build2 (code, type, left, right));
3823 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3824 with code CODE. This optimization is unsafe. */
3825 static tree
3826 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3828 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3829 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3831 /* (A / C) +- (B / C) -> (A +- B) / C. */
3832 if (mul0 == mul1
3833 && operand_equal_p (TREE_OPERAND (arg0, 1),
3834 TREE_OPERAND (arg1, 1), 0))
3835 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3836 fold_build2 (code, type,
3837 TREE_OPERAND (arg0, 0),
3838 TREE_OPERAND (arg1, 0)),
3839 TREE_OPERAND (arg0, 1));
3841 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3842 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3843 TREE_OPERAND (arg1, 0), 0)
3844 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3845 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3847 REAL_VALUE_TYPE r0, r1;
3848 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3849 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3850 if (!mul0)
3851 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3852 if (!mul1)
3853 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3854 real_arithmetic (&r0, code, &r0, &r1);
3855 return fold_build2 (MULT_EXPR, type,
3856 TREE_OPERAND (arg0, 0),
3857 build_real (type, r0));
3860 return NULL_TREE;
3863 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3864 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3866 static tree
3867 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3868 HOST_WIDE_INT bitpos, int unsignedp)
3870 tree result, bftype;
3872 if (bitpos == 0)
3874 tree size = TYPE_SIZE (TREE_TYPE (inner));
3875 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3876 || POINTER_TYPE_P (TREE_TYPE (inner)))
3877 && host_integerp (size, 0)
3878 && tree_low_cst (size, 0) == bitsize)
3879 return fold_convert (type, inner);
3882 bftype = type;
3883 if (TYPE_PRECISION (bftype) != bitsize
3884 || TYPE_UNSIGNED (bftype) == !unsignedp)
3885 bftype = build_nonstandard_integer_type (bitsize, 0);
3887 result = build3 (BIT_FIELD_REF, bftype, inner,
3888 size_int (bitsize), bitsize_int (bitpos));
3890 if (bftype != type)
3891 result = fold_convert (type, result);
3893 return result;
3896 /* Optimize a bit-field compare.
3898 There are two cases: First is a compare against a constant and the
3899 second is a comparison of two items where the fields are at the same
3900 bit position relative to the start of a chunk (byte, halfword, word)
3901 large enough to contain it. In these cases we can avoid the shift
3902 implicit in bitfield extractions.
3904 For constants, we emit a compare of the shifted constant with the
3905 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3906 compared. For two fields at the same position, we do the ANDs with the
3907 similar mask and compare the result of the ANDs.
3909 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3910 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3911 are the left and right operands of the comparison, respectively.
3913 If the optimization described above can be done, we return the resulting
3914 tree. Otherwise we return zero. */
3916 static tree
3917 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3918 tree lhs, tree rhs)
3920 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3921 tree type = TREE_TYPE (lhs);
3922 tree signed_type, unsigned_type;
3923 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3924 enum machine_mode lmode, rmode, nmode;
3925 int lunsignedp, runsignedp;
3926 int lvolatilep = 0, rvolatilep = 0;
3927 tree linner, rinner = NULL_TREE;
3928 tree mask;
3929 tree offset;
3931 /* Get all the information about the extractions being done. If the bit size
3932 if the same as the size of the underlying object, we aren't doing an
3933 extraction at all and so can do nothing. We also don't want to
3934 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3935 then will no longer be able to replace it. */
3936 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3937 &lunsignedp, &lvolatilep, false);
3938 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3939 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3940 return 0;
3942 if (!const_p)
3944 /* If this is not a constant, we can only do something if bit positions,
3945 sizes, and signedness are the same. */
3946 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3947 &runsignedp, &rvolatilep, false);
3949 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3950 || lunsignedp != runsignedp || offset != 0
3951 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3952 return 0;
3955 /* See if we can find a mode to refer to this field. We should be able to,
3956 but fail if we can't. */
3957 nmode = get_best_mode (lbitsize, lbitpos,
3958 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3959 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3960 TYPE_ALIGN (TREE_TYPE (rinner))),
3961 word_mode, lvolatilep || rvolatilep);
3962 if (nmode == VOIDmode)
3963 return 0;
3965 /* Set signed and unsigned types of the precision of this mode for the
3966 shifts below. */
3967 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3968 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3970 /* Compute the bit position and size for the new reference and our offset
3971 within it. If the new reference is the same size as the original, we
3972 won't optimize anything, so return zero. */
3973 nbitsize = GET_MODE_BITSIZE (nmode);
3974 nbitpos = lbitpos & ~ (nbitsize - 1);
3975 lbitpos -= nbitpos;
3976 if (nbitsize == lbitsize)
3977 return 0;
3979 if (BYTES_BIG_ENDIAN)
3980 lbitpos = nbitsize - lbitsize - lbitpos;
3982 /* Make the mask to be used against the extracted field. */
3983 mask = build_int_cst_type (unsigned_type, -1);
3984 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3985 mask = const_binop (RSHIFT_EXPR, mask,
3986 size_int (nbitsize - lbitsize - lbitpos), 0);
3988 if (! const_p)
3989 /* If not comparing with constant, just rework the comparison
3990 and return. */
3991 return fold_build2 (code, compare_type,
3992 fold_build2 (BIT_AND_EXPR, unsigned_type,
3993 make_bit_field_ref (linner,
3994 unsigned_type,
3995 nbitsize, nbitpos,
3997 mask),
3998 fold_build2 (BIT_AND_EXPR, unsigned_type,
3999 make_bit_field_ref (rinner,
4000 unsigned_type,
4001 nbitsize, nbitpos,
4003 mask));
4005 /* Otherwise, we are handling the constant case. See if the constant is too
4006 big for the field. Warn and return a tree of for 0 (false) if so. We do
4007 this not only for its own sake, but to avoid having to test for this
4008 error case below. If we didn't, we might generate wrong code.
4010 For unsigned fields, the constant shifted right by the field length should
4011 be all zero. For signed fields, the high-order bits should agree with
4012 the sign bit. */
4014 if (lunsignedp)
4016 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4017 fold_convert (unsigned_type, rhs),
4018 size_int (lbitsize), 0)))
4020 warning (0, "comparison is always %d due to width of bit-field",
4021 code == NE_EXPR);
4022 return constant_boolean_node (code == NE_EXPR, compare_type);
4025 else
4027 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4028 size_int (lbitsize - 1), 0);
4029 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4031 warning (0, "comparison is always %d due to width of bit-field",
4032 code == NE_EXPR);
4033 return constant_boolean_node (code == NE_EXPR, compare_type);
4037 /* Single-bit compares should always be against zero. */
4038 if (lbitsize == 1 && ! integer_zerop (rhs))
4040 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4041 rhs = build_int_cst (type, 0);
4044 /* Make a new bitfield reference, shift the constant over the
4045 appropriate number of bits and mask it with the computed mask
4046 (in case this was a signed field). If we changed it, make a new one. */
4047 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4048 if (lvolatilep)
4050 TREE_SIDE_EFFECTS (lhs) = 1;
4051 TREE_THIS_VOLATILE (lhs) = 1;
4054 rhs = const_binop (BIT_AND_EXPR,
4055 const_binop (LSHIFT_EXPR,
4056 fold_convert (unsigned_type, rhs),
4057 size_int (lbitpos), 0),
4058 mask, 0);
4060 return build2 (code, compare_type,
4061 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4062 rhs);
4065 /* Subroutine for fold_truthop: decode a field reference.
4067 If EXP is a comparison reference, we return the innermost reference.
4069 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4070 set to the starting bit number.
4072 If the innermost field can be completely contained in a mode-sized
4073 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4075 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4076 otherwise it is not changed.
4078 *PUNSIGNEDP is set to the signedness of the field.
4080 *PMASK is set to the mask used. This is either contained in a
4081 BIT_AND_EXPR or derived from the width of the field.
4083 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4085 Return 0 if this is not a component reference or is one that we can't
4086 do anything with. */
4088 static tree
4089 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4090 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4091 int *punsignedp, int *pvolatilep,
4092 tree *pmask, tree *pand_mask)
4094 tree outer_type = 0;
4095 tree and_mask = 0;
4096 tree mask, inner, offset;
4097 tree unsigned_type;
4098 unsigned int precision;
4100 /* All the optimizations using this function assume integer fields.
4101 There are problems with FP fields since the type_for_size call
4102 below can fail for, e.g., XFmode. */
4103 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4104 return 0;
4106 /* We are interested in the bare arrangement of bits, so strip everything
4107 that doesn't affect the machine mode. However, record the type of the
4108 outermost expression if it may matter below. */
4109 if (CONVERT_EXPR_P (exp)
4110 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4111 outer_type = TREE_TYPE (exp);
4112 STRIP_NOPS (exp);
4114 if (TREE_CODE (exp) == BIT_AND_EXPR)
4116 and_mask = TREE_OPERAND (exp, 1);
4117 exp = TREE_OPERAND (exp, 0);
4118 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4119 if (TREE_CODE (and_mask) != INTEGER_CST)
4120 return 0;
4123 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4124 punsignedp, pvolatilep, false);
4125 if ((inner == exp && and_mask == 0)
4126 || *pbitsize < 0 || offset != 0
4127 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4128 return 0;
4130 /* If the number of bits in the reference is the same as the bitsize of
4131 the outer type, then the outer type gives the signedness. Otherwise
4132 (in case of a small bitfield) the signedness is unchanged. */
4133 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4134 *punsignedp = TYPE_UNSIGNED (outer_type);
4136 /* Compute the mask to access the bitfield. */
4137 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4138 precision = TYPE_PRECISION (unsigned_type);
4140 mask = build_int_cst_type (unsigned_type, -1);
4142 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4143 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4145 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4146 if (and_mask != 0)
4147 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4148 fold_convert (unsigned_type, and_mask), mask);
4150 *pmask = mask;
4151 *pand_mask = and_mask;
4152 return inner;
4155 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4156 bit positions. */
4158 static int
4159 all_ones_mask_p (const_tree mask, int size)
4161 tree type = TREE_TYPE (mask);
4162 unsigned int precision = TYPE_PRECISION (type);
4163 tree tmask;
4165 tmask = build_int_cst_type (signed_type_for (type), -1);
4167 return
4168 tree_int_cst_equal (mask,
4169 const_binop (RSHIFT_EXPR,
4170 const_binop (LSHIFT_EXPR, tmask,
4171 size_int (precision - size),
4173 size_int (precision - size), 0));
4176 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4177 represents the sign bit of EXP's type. If EXP represents a sign
4178 or zero extension, also test VAL against the unextended type.
4179 The return value is the (sub)expression whose sign bit is VAL,
4180 or NULL_TREE otherwise. */
4182 static tree
4183 sign_bit_p (tree exp, const_tree val)
4185 unsigned HOST_WIDE_INT mask_lo, lo;
4186 HOST_WIDE_INT mask_hi, hi;
4187 int width;
4188 tree t;
4190 /* Tree EXP must have an integral type. */
4191 t = TREE_TYPE (exp);
4192 if (! INTEGRAL_TYPE_P (t))
4193 return NULL_TREE;
4195 /* Tree VAL must be an integer constant. */
4196 if (TREE_CODE (val) != INTEGER_CST
4197 || TREE_OVERFLOW (val))
4198 return NULL_TREE;
4200 width = TYPE_PRECISION (t);
4201 if (width > HOST_BITS_PER_WIDE_INT)
4203 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4204 lo = 0;
4206 mask_hi = ((unsigned HOST_WIDE_INT) -1
4207 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4208 mask_lo = -1;
4210 else
4212 hi = 0;
4213 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4215 mask_hi = 0;
4216 mask_lo = ((unsigned HOST_WIDE_INT) -1
4217 >> (HOST_BITS_PER_WIDE_INT - width));
4220 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4221 treat VAL as if it were unsigned. */
4222 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4223 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4224 return exp;
4226 /* Handle extension from a narrower type. */
4227 if (TREE_CODE (exp) == NOP_EXPR
4228 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4229 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4231 return NULL_TREE;
4234 /* Subroutine for fold_truthop: determine if an operand is simple enough
4235 to be evaluated unconditionally. */
4237 static int
4238 simple_operand_p (const_tree exp)
4240 /* Strip any conversions that don't change the machine mode. */
4241 STRIP_NOPS (exp);
4243 return (CONSTANT_CLASS_P (exp)
4244 || TREE_CODE (exp) == SSA_NAME
4245 || (DECL_P (exp)
4246 && ! TREE_ADDRESSABLE (exp)
4247 && ! TREE_THIS_VOLATILE (exp)
4248 && ! DECL_NONLOCAL (exp)
4249 /* Don't regard global variables as simple. They may be
4250 allocated in ways unknown to the compiler (shared memory,
4251 #pragma weak, etc). */
4252 && ! TREE_PUBLIC (exp)
4253 && ! DECL_EXTERNAL (exp)
4254 /* Loading a static variable is unduly expensive, but global
4255 registers aren't expensive. */
4256 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4259 /* The following functions are subroutines to fold_range_test and allow it to
4260 try to change a logical combination of comparisons into a range test.
4262 For example, both
4263 X == 2 || X == 3 || X == 4 || X == 5
4265 X >= 2 && X <= 5
4266 are converted to
4267 (unsigned) (X - 2) <= 3
4269 We describe each set of comparisons as being either inside or outside
4270 a range, using a variable named like IN_P, and then describe the
4271 range with a lower and upper bound. If one of the bounds is omitted,
4272 it represents either the highest or lowest value of the type.
4274 In the comments below, we represent a range by two numbers in brackets
4275 preceded by a "+" to designate being inside that range, or a "-" to
4276 designate being outside that range, so the condition can be inverted by
4277 flipping the prefix. An omitted bound is represented by a "-". For
4278 example, "- [-, 10]" means being outside the range starting at the lowest
4279 possible value and ending at 10, in other words, being greater than 10.
4280 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4281 always false.
4283 We set up things so that the missing bounds are handled in a consistent
4284 manner so neither a missing bound nor "true" and "false" need to be
4285 handled using a special case. */
4287 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4288 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4289 and UPPER1_P are nonzero if the respective argument is an upper bound
4290 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4291 must be specified for a comparison. ARG1 will be converted to ARG0's
4292 type if both are specified. */
4294 static tree
4295 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4296 tree arg1, int upper1_p)
4298 tree tem;
4299 int result;
4300 int sgn0, sgn1;
4302 /* If neither arg represents infinity, do the normal operation.
4303 Else, if not a comparison, return infinity. Else handle the special
4304 comparison rules. Note that most of the cases below won't occur, but
4305 are handled for consistency. */
4307 if (arg0 != 0 && arg1 != 0)
4309 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4310 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4311 STRIP_NOPS (tem);
4312 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4315 if (TREE_CODE_CLASS (code) != tcc_comparison)
4316 return 0;
4318 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4319 for neither. In real maths, we cannot assume open ended ranges are
4320 the same. But, this is computer arithmetic, where numbers are finite.
4321 We can therefore make the transformation of any unbounded range with
4322 the value Z, Z being greater than any representable number. This permits
4323 us to treat unbounded ranges as equal. */
4324 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4325 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4326 switch (code)
4328 case EQ_EXPR:
4329 result = sgn0 == sgn1;
4330 break;
4331 case NE_EXPR:
4332 result = sgn0 != sgn1;
4333 break;
4334 case LT_EXPR:
4335 result = sgn0 < sgn1;
4336 break;
4337 case LE_EXPR:
4338 result = sgn0 <= sgn1;
4339 break;
4340 case GT_EXPR:
4341 result = sgn0 > sgn1;
4342 break;
4343 case GE_EXPR:
4344 result = sgn0 >= sgn1;
4345 break;
4346 default:
4347 gcc_unreachable ();
4350 return constant_boolean_node (result, type);
4353 /* Given EXP, a logical expression, set the range it is testing into
4354 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4355 actually being tested. *PLOW and *PHIGH will be made of the same
4356 type as the returned expression. If EXP is not a comparison, we
4357 will most likely not be returning a useful value and range. Set
4358 *STRICT_OVERFLOW_P to true if the return value is only valid
4359 because signed overflow is undefined; otherwise, do not change
4360 *STRICT_OVERFLOW_P. */
4362 static tree
4363 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4364 bool *strict_overflow_p)
4366 enum tree_code code;
4367 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4368 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4369 int in_p, n_in_p;
4370 tree low, high, n_low, n_high;
4372 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4373 and see if we can refine the range. Some of the cases below may not
4374 happen, but it doesn't seem worth worrying about this. We "continue"
4375 the outer loop when we've changed something; otherwise we "break"
4376 the switch, which will "break" the while. */
4378 in_p = 0;
4379 low = high = build_int_cst (TREE_TYPE (exp), 0);
4381 while (1)
4383 code = TREE_CODE (exp);
4384 exp_type = TREE_TYPE (exp);
4386 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4388 if (TREE_OPERAND_LENGTH (exp) > 0)
4389 arg0 = TREE_OPERAND (exp, 0);
4390 if (TREE_CODE_CLASS (code) == tcc_comparison
4391 || TREE_CODE_CLASS (code) == tcc_unary
4392 || TREE_CODE_CLASS (code) == tcc_binary)
4393 arg0_type = TREE_TYPE (arg0);
4394 if (TREE_CODE_CLASS (code) == tcc_binary
4395 || TREE_CODE_CLASS (code) == tcc_comparison
4396 || (TREE_CODE_CLASS (code) == tcc_expression
4397 && TREE_OPERAND_LENGTH (exp) > 1))
4398 arg1 = TREE_OPERAND (exp, 1);
4401 switch (code)
4403 case TRUTH_NOT_EXPR:
4404 in_p = ! in_p, exp = arg0;
4405 continue;
4407 case EQ_EXPR: case NE_EXPR:
4408 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4409 /* We can only do something if the range is testing for zero
4410 and if the second operand is an integer constant. Note that
4411 saying something is "in" the range we make is done by
4412 complementing IN_P since it will set in the initial case of
4413 being not equal to zero; "out" is leaving it alone. */
4414 if (low == 0 || high == 0
4415 || ! integer_zerop (low) || ! integer_zerop (high)
4416 || TREE_CODE (arg1) != INTEGER_CST)
4417 break;
4419 switch (code)
4421 case NE_EXPR: /* - [c, c] */
4422 low = high = arg1;
4423 break;
4424 case EQ_EXPR: /* + [c, c] */
4425 in_p = ! in_p, low = high = arg1;
4426 break;
4427 case GT_EXPR: /* - [-, c] */
4428 low = 0, high = arg1;
4429 break;
4430 case GE_EXPR: /* + [c, -] */
4431 in_p = ! in_p, low = arg1, high = 0;
4432 break;
4433 case LT_EXPR: /* - [c, -] */
4434 low = arg1, high = 0;
4435 break;
4436 case LE_EXPR: /* + [-, c] */
4437 in_p = ! in_p, low = 0, high = arg1;
4438 break;
4439 default:
4440 gcc_unreachable ();
4443 /* If this is an unsigned comparison, we also know that EXP is
4444 greater than or equal to zero. We base the range tests we make
4445 on that fact, so we record it here so we can parse existing
4446 range tests. We test arg0_type since often the return type
4447 of, e.g. EQ_EXPR, is boolean. */
4448 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4450 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4451 in_p, low, high, 1,
4452 build_int_cst (arg0_type, 0),
4453 NULL_TREE))
4454 break;
4456 in_p = n_in_p, low = n_low, high = n_high;
4458 /* If the high bound is missing, but we have a nonzero low
4459 bound, reverse the range so it goes from zero to the low bound
4460 minus 1. */
4461 if (high == 0 && low && ! integer_zerop (low))
4463 in_p = ! in_p;
4464 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4465 integer_one_node, 0);
4466 low = build_int_cst (arg0_type, 0);
4470 exp = arg0;
4471 continue;
4473 case NEGATE_EXPR:
4474 /* (-x) IN [a,b] -> x in [-b, -a] */
4475 n_low = range_binop (MINUS_EXPR, exp_type,
4476 build_int_cst (exp_type, 0),
4477 0, high, 1);
4478 n_high = range_binop (MINUS_EXPR, exp_type,
4479 build_int_cst (exp_type, 0),
4480 0, low, 0);
4481 low = n_low, high = n_high;
4482 exp = arg0;
4483 continue;
4485 case BIT_NOT_EXPR:
4486 /* ~ X -> -X - 1 */
4487 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4488 build_int_cst (exp_type, 1));
4489 continue;
4491 case PLUS_EXPR: case MINUS_EXPR:
4492 if (TREE_CODE (arg1) != INTEGER_CST)
4493 break;
4495 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4496 move a constant to the other side. */
4497 if (!TYPE_UNSIGNED (arg0_type)
4498 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4499 break;
4501 /* If EXP is signed, any overflow in the computation is undefined,
4502 so we don't worry about it so long as our computations on
4503 the bounds don't overflow. For unsigned, overflow is defined
4504 and this is exactly the right thing. */
4505 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4506 arg0_type, low, 0, arg1, 0);
4507 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4508 arg0_type, high, 1, arg1, 0);
4509 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4510 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4511 break;
4513 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4514 *strict_overflow_p = true;
4516 /* Check for an unsigned range which has wrapped around the maximum
4517 value thus making n_high < n_low, and normalize it. */
4518 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4520 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4521 integer_one_node, 0);
4522 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4523 integer_one_node, 0);
4525 /* If the range is of the form +/- [ x+1, x ], we won't
4526 be able to normalize it. But then, it represents the
4527 whole range or the empty set, so make it
4528 +/- [ -, - ]. */
4529 if (tree_int_cst_equal (n_low, low)
4530 && tree_int_cst_equal (n_high, high))
4531 low = high = 0;
4532 else
4533 in_p = ! in_p;
4535 else
4536 low = n_low, high = n_high;
4538 exp = arg0;
4539 continue;
4541 CASE_CONVERT: case NON_LVALUE_EXPR:
4542 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4543 break;
4545 if (! INTEGRAL_TYPE_P (arg0_type)
4546 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4547 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4548 break;
4550 n_low = low, n_high = high;
4552 if (n_low != 0)
4553 n_low = fold_convert (arg0_type, n_low);
4555 if (n_high != 0)
4556 n_high = fold_convert (arg0_type, n_high);
4559 /* If we're converting arg0 from an unsigned type, to exp,
4560 a signed type, we will be doing the comparison as unsigned.
4561 The tests above have already verified that LOW and HIGH
4562 are both positive.
4564 So we have to ensure that we will handle large unsigned
4565 values the same way that the current signed bounds treat
4566 negative values. */
4568 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4570 tree high_positive;
4571 tree equiv_type;
4572 /* For fixed-point modes, we need to pass the saturating flag
4573 as the 2nd parameter. */
4574 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4575 equiv_type = lang_hooks.types.type_for_mode
4576 (TYPE_MODE (arg0_type),
4577 TYPE_SATURATING (arg0_type));
4578 else
4579 equiv_type = lang_hooks.types.type_for_mode
4580 (TYPE_MODE (arg0_type), 1);
4582 /* A range without an upper bound is, naturally, unbounded.
4583 Since convert would have cropped a very large value, use
4584 the max value for the destination type. */
4585 high_positive
4586 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4587 : TYPE_MAX_VALUE (arg0_type);
4589 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4590 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4591 fold_convert (arg0_type,
4592 high_positive),
4593 build_int_cst (arg0_type, 1));
4595 /* If the low bound is specified, "and" the range with the
4596 range for which the original unsigned value will be
4597 positive. */
4598 if (low != 0)
4600 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4601 1, n_low, n_high, 1,
4602 fold_convert (arg0_type,
4603 integer_zero_node),
4604 high_positive))
4605 break;
4607 in_p = (n_in_p == in_p);
4609 else
4611 /* Otherwise, "or" the range with the range of the input
4612 that will be interpreted as negative. */
4613 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4614 0, n_low, n_high, 1,
4615 fold_convert (arg0_type,
4616 integer_zero_node),
4617 high_positive))
4618 break;
4620 in_p = (in_p != n_in_p);
4624 exp = arg0;
4625 low = n_low, high = n_high;
4626 continue;
4628 default:
4629 break;
4632 break;
4635 /* If EXP is a constant, we can evaluate whether this is true or false. */
4636 if (TREE_CODE (exp) == INTEGER_CST)
4638 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4639 exp, 0, low, 0))
4640 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4641 exp, 1, high, 1)));
4642 low = high = 0;
4643 exp = 0;
4646 *pin_p = in_p, *plow = low, *phigh = high;
4647 return exp;
4650 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4651 type, TYPE, return an expression to test if EXP is in (or out of, depending
4652 on IN_P) the range. Return 0 if the test couldn't be created. */
4654 static tree
4655 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4657 tree etype = TREE_TYPE (exp);
4658 tree value;
4660 #ifdef HAVE_canonicalize_funcptr_for_compare
4661 /* Disable this optimization for function pointer expressions
4662 on targets that require function pointer canonicalization. */
4663 if (HAVE_canonicalize_funcptr_for_compare
4664 && TREE_CODE (etype) == POINTER_TYPE
4665 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4666 return NULL_TREE;
4667 #endif
4669 if (! in_p)
4671 value = build_range_check (type, exp, 1, low, high);
4672 if (value != 0)
4673 return invert_truthvalue (value);
4675 return 0;
4678 if (low == 0 && high == 0)
4679 return build_int_cst (type, 1);
4681 if (low == 0)
4682 return fold_build2 (LE_EXPR, type, exp,
4683 fold_convert (etype, high));
4685 if (high == 0)
4686 return fold_build2 (GE_EXPR, type, exp,
4687 fold_convert (etype, low));
4689 if (operand_equal_p (low, high, 0))
4690 return fold_build2 (EQ_EXPR, type, exp,
4691 fold_convert (etype, low));
4693 if (integer_zerop (low))
4695 if (! TYPE_UNSIGNED (etype))
4697 etype = unsigned_type_for (etype);
4698 high = fold_convert (etype, high);
4699 exp = fold_convert (etype, exp);
4701 return build_range_check (type, exp, 1, 0, high);
4704 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4705 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4707 unsigned HOST_WIDE_INT lo;
4708 HOST_WIDE_INT hi;
4709 int prec;
4711 prec = TYPE_PRECISION (etype);
4712 if (prec <= HOST_BITS_PER_WIDE_INT)
4714 hi = 0;
4715 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4717 else
4719 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4720 lo = (unsigned HOST_WIDE_INT) -1;
4723 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4725 if (TYPE_UNSIGNED (etype))
4727 tree signed_etype = signed_type_for (etype);
4728 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4729 etype
4730 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4731 else
4732 etype = signed_etype;
4733 exp = fold_convert (etype, exp);
4735 return fold_build2 (GT_EXPR, type, exp,
4736 build_int_cst (etype, 0));
4740 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4741 This requires wrap-around arithmetics for the type of the expression. */
4742 switch (TREE_CODE (etype))
4744 case INTEGER_TYPE:
4745 /* There is no requirement that LOW be within the range of ETYPE
4746 if the latter is a subtype. It must, however, be within the base
4747 type of ETYPE. So be sure we do the subtraction in that type. */
4748 if (TREE_TYPE (etype))
4749 etype = TREE_TYPE (etype);
4750 break;
4752 case ENUMERAL_TYPE:
4753 case BOOLEAN_TYPE:
4754 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4755 TYPE_UNSIGNED (etype));
4756 break;
4758 default:
4759 break;
4762 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4763 if (TREE_CODE (etype) == INTEGER_TYPE
4764 && !TYPE_OVERFLOW_WRAPS (etype))
4766 tree utype, minv, maxv;
4768 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4769 for the type in question, as we rely on this here. */
4770 utype = unsigned_type_for (etype);
4771 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4772 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4773 integer_one_node, 1);
4774 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4776 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4777 minv, 1, maxv, 1)))
4778 etype = utype;
4779 else
4780 return 0;
4783 high = fold_convert (etype, high);
4784 low = fold_convert (etype, low);
4785 exp = fold_convert (etype, exp);
4787 value = const_binop (MINUS_EXPR, high, low, 0);
4790 if (POINTER_TYPE_P (etype))
4792 if (value != 0 && !TREE_OVERFLOW (value))
4794 low = fold_convert (sizetype, low);
4795 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4796 return build_range_check (type,
4797 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4798 1, build_int_cst (etype, 0), value);
4800 return 0;
4803 if (value != 0 && !TREE_OVERFLOW (value))
4804 return build_range_check (type,
4805 fold_build2 (MINUS_EXPR, etype, exp, low),
4806 1, build_int_cst (etype, 0), value);
4808 return 0;
4811 /* Return the predecessor of VAL in its type, handling the infinite case. */
4813 static tree
4814 range_predecessor (tree val)
4816 tree type = TREE_TYPE (val);
4818 if (INTEGRAL_TYPE_P (type)
4819 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4820 return 0;
4821 else
4822 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4825 /* Return the successor of VAL in its type, handling the infinite case. */
4827 static tree
4828 range_successor (tree val)
4830 tree type = TREE_TYPE (val);
4832 if (INTEGRAL_TYPE_P (type)
4833 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4834 return 0;
4835 else
4836 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4839 /* Given two ranges, see if we can merge them into one. Return 1 if we
4840 can, 0 if we can't. Set the output range into the specified parameters. */
4842 static int
4843 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4844 tree high0, int in1_p, tree low1, tree high1)
4846 int no_overlap;
4847 int subset;
4848 int temp;
4849 tree tem;
4850 int in_p;
4851 tree low, high;
4852 int lowequal = ((low0 == 0 && low1 == 0)
4853 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4854 low0, 0, low1, 0)));
4855 int highequal = ((high0 == 0 && high1 == 0)
4856 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4857 high0, 1, high1, 1)));
4859 /* Make range 0 be the range that starts first, or ends last if they
4860 start at the same value. Swap them if it isn't. */
4861 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4862 low0, 0, low1, 0))
4863 || (lowequal
4864 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4865 high1, 1, high0, 1))))
4867 temp = in0_p, in0_p = in1_p, in1_p = temp;
4868 tem = low0, low0 = low1, low1 = tem;
4869 tem = high0, high0 = high1, high1 = tem;
4872 /* Now flag two cases, whether the ranges are disjoint or whether the
4873 second range is totally subsumed in the first. Note that the tests
4874 below are simplified by the ones above. */
4875 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4876 high0, 1, low1, 0));
4877 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4878 high1, 1, high0, 1));
4880 /* We now have four cases, depending on whether we are including or
4881 excluding the two ranges. */
4882 if (in0_p && in1_p)
4884 /* If they don't overlap, the result is false. If the second range
4885 is a subset it is the result. Otherwise, the range is from the start
4886 of the second to the end of the first. */
4887 if (no_overlap)
4888 in_p = 0, low = high = 0;
4889 else if (subset)
4890 in_p = 1, low = low1, high = high1;
4891 else
4892 in_p = 1, low = low1, high = high0;
4895 else if (in0_p && ! in1_p)
4897 /* If they don't overlap, the result is the first range. If they are
4898 equal, the result is false. If the second range is a subset of the
4899 first, and the ranges begin at the same place, we go from just after
4900 the end of the second range to the end of the first. If the second
4901 range is not a subset of the first, or if it is a subset and both
4902 ranges end at the same place, the range starts at the start of the
4903 first range and ends just before the second range.
4904 Otherwise, we can't describe this as a single range. */
4905 if (no_overlap)
4906 in_p = 1, low = low0, high = high0;
4907 else if (lowequal && highequal)
4908 in_p = 0, low = high = 0;
4909 else if (subset && lowequal)
4911 low = range_successor (high1);
4912 high = high0;
4913 in_p = 1;
4914 if (low == 0)
4916 /* We are in the weird situation where high0 > high1 but
4917 high1 has no successor. Punt. */
4918 return 0;
4921 else if (! subset || highequal)
4923 low = low0;
4924 high = range_predecessor (low1);
4925 in_p = 1;
4926 if (high == 0)
4928 /* low0 < low1 but low1 has no predecessor. Punt. */
4929 return 0;
4932 else
4933 return 0;
4936 else if (! in0_p && in1_p)
4938 /* If they don't overlap, the result is the second range. If the second
4939 is a subset of the first, the result is false. Otherwise,
4940 the range starts just after the first range and ends at the
4941 end of the second. */
4942 if (no_overlap)
4943 in_p = 1, low = low1, high = high1;
4944 else if (subset || highequal)
4945 in_p = 0, low = high = 0;
4946 else
4948 low = range_successor (high0);
4949 high = high1;
4950 in_p = 1;
4951 if (low == 0)
4953 /* high1 > high0 but high0 has no successor. Punt. */
4954 return 0;
4959 else
4961 /* The case where we are excluding both ranges. Here the complex case
4962 is if they don't overlap. In that case, the only time we have a
4963 range is if they are adjacent. If the second is a subset of the
4964 first, the result is the first. Otherwise, the range to exclude
4965 starts at the beginning of the first range and ends at the end of the
4966 second. */
4967 if (no_overlap)
4969 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4970 range_successor (high0),
4971 1, low1, 0)))
4972 in_p = 0, low = low0, high = high1;
4973 else
4975 /* Canonicalize - [min, x] into - [-, x]. */
4976 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4977 switch (TREE_CODE (TREE_TYPE (low0)))
4979 case ENUMERAL_TYPE:
4980 if (TYPE_PRECISION (TREE_TYPE (low0))
4981 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4982 break;
4983 /* FALLTHROUGH */
4984 case INTEGER_TYPE:
4985 if (tree_int_cst_equal (low0,
4986 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4987 low0 = 0;
4988 break;
4989 case POINTER_TYPE:
4990 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4991 && integer_zerop (low0))
4992 low0 = 0;
4993 break;
4994 default:
4995 break;
4998 /* Canonicalize - [x, max] into - [x, -]. */
4999 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5000 switch (TREE_CODE (TREE_TYPE (high1)))
5002 case ENUMERAL_TYPE:
5003 if (TYPE_PRECISION (TREE_TYPE (high1))
5004 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5005 break;
5006 /* FALLTHROUGH */
5007 case INTEGER_TYPE:
5008 if (tree_int_cst_equal (high1,
5009 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5010 high1 = 0;
5011 break;
5012 case POINTER_TYPE:
5013 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5014 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5015 high1, 1,
5016 integer_one_node, 1)))
5017 high1 = 0;
5018 break;
5019 default:
5020 break;
5023 /* The ranges might be also adjacent between the maximum and
5024 minimum values of the given type. For
5025 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5026 return + [x + 1, y - 1]. */
5027 if (low0 == 0 && high1 == 0)
5029 low = range_successor (high0);
5030 high = range_predecessor (low1);
5031 if (low == 0 || high == 0)
5032 return 0;
5034 in_p = 1;
5036 else
5037 return 0;
5040 else if (subset)
5041 in_p = 0, low = low0, high = high0;
5042 else
5043 in_p = 0, low = low0, high = high1;
5046 *pin_p = in_p, *plow = low, *phigh = high;
5047 return 1;
5051 /* Subroutine of fold, looking inside expressions of the form
5052 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5053 of the COND_EXPR. This function is being used also to optimize
5054 A op B ? C : A, by reversing the comparison first.
5056 Return a folded expression whose code is not a COND_EXPR
5057 anymore, or NULL_TREE if no folding opportunity is found. */
5059 static tree
5060 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5062 enum tree_code comp_code = TREE_CODE (arg0);
5063 tree arg00 = TREE_OPERAND (arg0, 0);
5064 tree arg01 = TREE_OPERAND (arg0, 1);
5065 tree arg1_type = TREE_TYPE (arg1);
5066 tree tem;
5068 STRIP_NOPS (arg1);
5069 STRIP_NOPS (arg2);
5071 /* If we have A op 0 ? A : -A, consider applying the following
5072 transformations:
5074 A == 0? A : -A same as -A
5075 A != 0? A : -A same as A
5076 A >= 0? A : -A same as abs (A)
5077 A > 0? A : -A same as abs (A)
5078 A <= 0? A : -A same as -abs (A)
5079 A < 0? A : -A same as -abs (A)
5081 None of these transformations work for modes with signed
5082 zeros. If A is +/-0, the first two transformations will
5083 change the sign of the result (from +0 to -0, or vice
5084 versa). The last four will fix the sign of the result,
5085 even though the original expressions could be positive or
5086 negative, depending on the sign of A.
5088 Note that all these transformations are correct if A is
5089 NaN, since the two alternatives (A and -A) are also NaNs. */
5090 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5091 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5092 ? real_zerop (arg01)
5093 : integer_zerop (arg01))
5094 && ((TREE_CODE (arg2) == NEGATE_EXPR
5095 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5096 /* In the case that A is of the form X-Y, '-A' (arg2) may
5097 have already been folded to Y-X, check for that. */
5098 || (TREE_CODE (arg1) == MINUS_EXPR
5099 && TREE_CODE (arg2) == MINUS_EXPR
5100 && operand_equal_p (TREE_OPERAND (arg1, 0),
5101 TREE_OPERAND (arg2, 1), 0)
5102 && operand_equal_p (TREE_OPERAND (arg1, 1),
5103 TREE_OPERAND (arg2, 0), 0))))
5104 switch (comp_code)
5106 case EQ_EXPR:
5107 case UNEQ_EXPR:
5108 tem = fold_convert (arg1_type, arg1);
5109 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5110 case NE_EXPR:
5111 case LTGT_EXPR:
5112 return pedantic_non_lvalue (fold_convert (type, arg1));
5113 case UNGE_EXPR:
5114 case UNGT_EXPR:
5115 if (flag_trapping_math)
5116 break;
5117 /* Fall through. */
5118 case GE_EXPR:
5119 case GT_EXPR:
5120 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5121 arg1 = fold_convert (signed_type_for
5122 (TREE_TYPE (arg1)), arg1);
5123 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5124 return pedantic_non_lvalue (fold_convert (type, tem));
5125 case UNLE_EXPR:
5126 case UNLT_EXPR:
5127 if (flag_trapping_math)
5128 break;
5129 case LE_EXPR:
5130 case LT_EXPR:
5131 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5132 arg1 = fold_convert (signed_type_for
5133 (TREE_TYPE (arg1)), arg1);
5134 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5135 return negate_expr (fold_convert (type, tem));
5136 default:
5137 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5138 break;
5141 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5142 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5143 both transformations are correct when A is NaN: A != 0
5144 is then true, and A == 0 is false. */
5146 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5147 && integer_zerop (arg01) && integer_zerop (arg2))
5149 if (comp_code == NE_EXPR)
5150 return pedantic_non_lvalue (fold_convert (type, arg1));
5151 else if (comp_code == EQ_EXPR)
5152 return build_int_cst (type, 0);
5155 /* Try some transformations of A op B ? A : B.
5157 A == B? A : B same as B
5158 A != B? A : B same as A
5159 A >= B? A : B same as max (A, B)
5160 A > B? A : B same as max (B, A)
5161 A <= B? A : B same as min (A, B)
5162 A < B? A : B same as min (B, A)
5164 As above, these transformations don't work in the presence
5165 of signed zeros. For example, if A and B are zeros of
5166 opposite sign, the first two transformations will change
5167 the sign of the result. In the last four, the original
5168 expressions give different results for (A=+0, B=-0) and
5169 (A=-0, B=+0), but the transformed expressions do not.
5171 The first two transformations are correct if either A or B
5172 is a NaN. In the first transformation, the condition will
5173 be false, and B will indeed be chosen. In the case of the
5174 second transformation, the condition A != B will be true,
5175 and A will be chosen.
5177 The conversions to max() and min() are not correct if B is
5178 a number and A is not. The conditions in the original
5179 expressions will be false, so all four give B. The min()
5180 and max() versions would give a NaN instead. */
5181 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5182 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5183 /* Avoid these transformations if the COND_EXPR may be used
5184 as an lvalue in the C++ front-end. PR c++/19199. */
5185 && (in_gimple_form
5186 || (strcmp (lang_hooks.name, "GNU C++") != 0
5187 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5188 || ! maybe_lvalue_p (arg1)
5189 || ! maybe_lvalue_p (arg2)))
5191 tree comp_op0 = arg00;
5192 tree comp_op1 = arg01;
5193 tree comp_type = TREE_TYPE (comp_op0);
5195 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5196 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5198 comp_type = type;
5199 comp_op0 = arg1;
5200 comp_op1 = arg2;
5203 switch (comp_code)
5205 case EQ_EXPR:
5206 return pedantic_non_lvalue (fold_convert (type, arg2));
5207 case NE_EXPR:
5208 return pedantic_non_lvalue (fold_convert (type, arg1));
5209 case LE_EXPR:
5210 case LT_EXPR:
5211 case UNLE_EXPR:
5212 case UNLT_EXPR:
5213 /* In C++ a ?: expression can be an lvalue, so put the
5214 operand which will be used if they are equal first
5215 so that we can convert this back to the
5216 corresponding COND_EXPR. */
5217 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5219 comp_op0 = fold_convert (comp_type, comp_op0);
5220 comp_op1 = fold_convert (comp_type, comp_op1);
5221 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5222 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5223 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5224 return pedantic_non_lvalue (fold_convert (type, tem));
5226 break;
5227 case GE_EXPR:
5228 case GT_EXPR:
5229 case UNGE_EXPR:
5230 case UNGT_EXPR:
5231 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5233 comp_op0 = fold_convert (comp_type, comp_op0);
5234 comp_op1 = fold_convert (comp_type, comp_op1);
5235 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5236 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5237 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5238 return pedantic_non_lvalue (fold_convert (type, tem));
5240 break;
5241 case UNEQ_EXPR:
5242 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5243 return pedantic_non_lvalue (fold_convert (type, arg2));
5244 break;
5245 case LTGT_EXPR:
5246 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5247 return pedantic_non_lvalue (fold_convert (type, arg1));
5248 break;
5249 default:
5250 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5251 break;
5255 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5256 we might still be able to simplify this. For example,
5257 if C1 is one less or one more than C2, this might have started
5258 out as a MIN or MAX and been transformed by this function.
5259 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5261 if (INTEGRAL_TYPE_P (type)
5262 && TREE_CODE (arg01) == INTEGER_CST
5263 && TREE_CODE (arg2) == INTEGER_CST)
5264 switch (comp_code)
5266 case EQ_EXPR:
5267 /* We can replace A with C1 in this case. */
5268 arg1 = fold_convert (type, arg01);
5269 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5271 case LT_EXPR:
5272 /* If C1 is C2 + 1, this is min(A, C2). */
5273 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5274 OEP_ONLY_CONST)
5275 && operand_equal_p (arg01,
5276 const_binop (PLUS_EXPR, arg2,
5277 build_int_cst (type, 1), 0),
5278 OEP_ONLY_CONST))
5279 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5280 type,
5281 fold_convert (type, arg1),
5282 arg2));
5283 break;
5285 case LE_EXPR:
5286 /* If C1 is C2 - 1, this is min(A, C2). */
5287 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5288 OEP_ONLY_CONST)
5289 && operand_equal_p (arg01,
5290 const_binop (MINUS_EXPR, arg2,
5291 build_int_cst (type, 1), 0),
5292 OEP_ONLY_CONST))
5293 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5294 type,
5295 fold_convert (type, arg1),
5296 arg2));
5297 break;
5299 case GT_EXPR:
5300 /* If C1 is C2 - 1, this is max(A, C2). */
5301 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5302 OEP_ONLY_CONST)
5303 && operand_equal_p (arg01,
5304 const_binop (MINUS_EXPR, arg2,
5305 build_int_cst (type, 1), 0),
5306 OEP_ONLY_CONST))
5307 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5308 type,
5309 fold_convert (type, arg1),
5310 arg2));
5311 break;
5313 case GE_EXPR:
5314 /* If C1 is C2 + 1, this is max(A, C2). */
5315 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5316 OEP_ONLY_CONST)
5317 && operand_equal_p (arg01,
5318 const_binop (PLUS_EXPR, arg2,
5319 build_int_cst (type, 1), 0),
5320 OEP_ONLY_CONST))
5321 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5322 type,
5323 fold_convert (type, arg1),
5324 arg2));
5325 break;
5326 case NE_EXPR:
5327 break;
5328 default:
5329 gcc_unreachable ();
5332 return NULL_TREE;
5337 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5338 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5339 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5340 false) >= 2)
5341 #endif
5343 /* EXP is some logical combination of boolean tests. See if we can
5344 merge it into some range test. Return the new tree if so. */
5346 static tree
5347 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5349 int or_op = (code == TRUTH_ORIF_EXPR
5350 || code == TRUTH_OR_EXPR);
5351 int in0_p, in1_p, in_p;
5352 tree low0, low1, low, high0, high1, high;
5353 bool strict_overflow_p = false;
5354 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5355 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5356 tree tem;
5357 const char * const warnmsg = G_("assuming signed overflow does not occur "
5358 "when simplifying range test");
5360 /* If this is an OR operation, invert both sides; we will invert
5361 again at the end. */
5362 if (or_op)
5363 in0_p = ! in0_p, in1_p = ! in1_p;
5365 /* If both expressions are the same, if we can merge the ranges, and we
5366 can build the range test, return it or it inverted. If one of the
5367 ranges is always true or always false, consider it to be the same
5368 expression as the other. */
5369 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5370 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5371 in1_p, low1, high1)
5372 && 0 != (tem = (build_range_check (type,
5373 lhs != 0 ? lhs
5374 : rhs != 0 ? rhs : integer_zero_node,
5375 in_p, low, high))))
5377 if (strict_overflow_p)
5378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5379 return or_op ? invert_truthvalue (tem) : tem;
5382 /* On machines where the branch cost is expensive, if this is a
5383 short-circuited branch and the underlying object on both sides
5384 is the same, make a non-short-circuit operation. */
5385 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5386 && lhs != 0 && rhs != 0
5387 && (code == TRUTH_ANDIF_EXPR
5388 || code == TRUTH_ORIF_EXPR)
5389 && operand_equal_p (lhs, rhs, 0))
5391 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5392 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5393 which cases we can't do this. */
5394 if (simple_operand_p (lhs))
5395 return build2 (code == TRUTH_ANDIF_EXPR
5396 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5397 type, op0, op1);
5399 else if (lang_hooks.decls.global_bindings_p () == 0
5400 && ! CONTAINS_PLACEHOLDER_P (lhs))
5402 tree common = save_expr (lhs);
5404 if (0 != (lhs = build_range_check (type, common,
5405 or_op ? ! in0_p : in0_p,
5406 low0, high0))
5407 && (0 != (rhs = build_range_check (type, common,
5408 or_op ? ! in1_p : in1_p,
5409 low1, high1))))
5411 if (strict_overflow_p)
5412 fold_overflow_warning (warnmsg,
5413 WARN_STRICT_OVERFLOW_COMPARISON);
5414 return build2 (code == TRUTH_ANDIF_EXPR
5415 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5416 type, lhs, rhs);
5421 return 0;
5424 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5425 bit value. Arrange things so the extra bits will be set to zero if and
5426 only if C is signed-extended to its full width. If MASK is nonzero,
5427 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5429 static tree
5430 unextend (tree c, int p, int unsignedp, tree mask)
5432 tree type = TREE_TYPE (c);
5433 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5434 tree temp;
5436 if (p == modesize || unsignedp)
5437 return c;
5439 /* We work by getting just the sign bit into the low-order bit, then
5440 into the high-order bit, then sign-extend. We then XOR that value
5441 with C. */
5442 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5443 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5445 /* We must use a signed type in order to get an arithmetic right shift.
5446 However, we must also avoid introducing accidental overflows, so that
5447 a subsequent call to integer_zerop will work. Hence we must
5448 do the type conversion here. At this point, the constant is either
5449 zero or one, and the conversion to a signed type can never overflow.
5450 We could get an overflow if this conversion is done anywhere else. */
5451 if (TYPE_UNSIGNED (type))
5452 temp = fold_convert (signed_type_for (type), temp);
5454 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5455 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5456 if (mask != 0)
5457 temp = const_binop (BIT_AND_EXPR, temp,
5458 fold_convert (TREE_TYPE (c), mask), 0);
5459 /* If necessary, convert the type back to match the type of C. */
5460 if (TYPE_UNSIGNED (type))
5461 temp = fold_convert (type, temp);
5463 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5466 /* Find ways of folding logical expressions of LHS and RHS:
5467 Try to merge two comparisons to the same innermost item.
5468 Look for range tests like "ch >= '0' && ch <= '9'".
5469 Look for combinations of simple terms on machines with expensive branches
5470 and evaluate the RHS unconditionally.
5472 For example, if we have p->a == 2 && p->b == 4 and we can make an
5473 object large enough to span both A and B, we can do this with a comparison
5474 against the object ANDed with the a mask.
5476 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5477 operations to do this with one comparison.
5479 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5480 function and the one above.
5482 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5483 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5485 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5486 two operands.
5488 We return the simplified tree or 0 if no optimization is possible. */
5490 static tree
5491 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5493 /* If this is the "or" of two comparisons, we can do something if
5494 the comparisons are NE_EXPR. If this is the "and", we can do something
5495 if the comparisons are EQ_EXPR. I.e.,
5496 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5498 WANTED_CODE is this operation code. For single bit fields, we can
5499 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5500 comparison for one-bit fields. */
5502 enum tree_code wanted_code;
5503 enum tree_code lcode, rcode;
5504 tree ll_arg, lr_arg, rl_arg, rr_arg;
5505 tree ll_inner, lr_inner, rl_inner, rr_inner;
5506 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5507 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5508 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5509 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5510 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5511 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5512 enum machine_mode lnmode, rnmode;
5513 tree ll_mask, lr_mask, rl_mask, rr_mask;
5514 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5515 tree l_const, r_const;
5516 tree lntype, rntype, result;
5517 HOST_WIDE_INT first_bit, end_bit;
5518 int volatilep;
5519 tree orig_lhs = lhs, orig_rhs = rhs;
5520 enum tree_code orig_code = code;
5522 /* Start by getting the comparison codes. Fail if anything is volatile.
5523 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5524 it were surrounded with a NE_EXPR. */
5526 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5527 return 0;
5529 lcode = TREE_CODE (lhs);
5530 rcode = TREE_CODE (rhs);
5532 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5534 lhs = build2 (NE_EXPR, truth_type, lhs,
5535 build_int_cst (TREE_TYPE (lhs), 0));
5536 lcode = NE_EXPR;
5539 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5541 rhs = build2 (NE_EXPR, truth_type, rhs,
5542 build_int_cst (TREE_TYPE (rhs), 0));
5543 rcode = NE_EXPR;
5546 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5547 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5548 return 0;
5550 ll_arg = TREE_OPERAND (lhs, 0);
5551 lr_arg = TREE_OPERAND (lhs, 1);
5552 rl_arg = TREE_OPERAND (rhs, 0);
5553 rr_arg = TREE_OPERAND (rhs, 1);
5555 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5556 if (simple_operand_p (ll_arg)
5557 && simple_operand_p (lr_arg))
5559 tree result;
5560 if (operand_equal_p (ll_arg, rl_arg, 0)
5561 && operand_equal_p (lr_arg, rr_arg, 0))
5563 result = combine_comparisons (code, lcode, rcode,
5564 truth_type, ll_arg, lr_arg);
5565 if (result)
5566 return result;
5568 else if (operand_equal_p (ll_arg, rr_arg, 0)
5569 && operand_equal_p (lr_arg, rl_arg, 0))
5571 result = combine_comparisons (code, lcode,
5572 swap_tree_comparison (rcode),
5573 truth_type, ll_arg, lr_arg);
5574 if (result)
5575 return result;
5579 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5580 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5582 /* If the RHS can be evaluated unconditionally and its operands are
5583 simple, it wins to evaluate the RHS unconditionally on machines
5584 with expensive branches. In this case, this isn't a comparison
5585 that can be merged. Avoid doing this if the RHS is a floating-point
5586 comparison since those can trap. */
5588 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5589 false) >= 2
5590 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5591 && simple_operand_p (rl_arg)
5592 && simple_operand_p (rr_arg))
5594 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5595 if (code == TRUTH_OR_EXPR
5596 && lcode == NE_EXPR && integer_zerop (lr_arg)
5597 && rcode == NE_EXPR && integer_zerop (rr_arg)
5598 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5599 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5600 return build2 (NE_EXPR, truth_type,
5601 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5602 ll_arg, rl_arg),
5603 build_int_cst (TREE_TYPE (ll_arg), 0));
5605 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5606 if (code == TRUTH_AND_EXPR
5607 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5608 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5609 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5610 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5611 return build2 (EQ_EXPR, truth_type,
5612 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5613 ll_arg, rl_arg),
5614 build_int_cst (TREE_TYPE (ll_arg), 0));
5616 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5618 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5619 return build2 (code, truth_type, lhs, rhs);
5620 return NULL_TREE;
5624 /* See if the comparisons can be merged. Then get all the parameters for
5625 each side. */
5627 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5628 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5629 return 0;
5631 volatilep = 0;
5632 ll_inner = decode_field_reference (ll_arg,
5633 &ll_bitsize, &ll_bitpos, &ll_mode,
5634 &ll_unsignedp, &volatilep, &ll_mask,
5635 &ll_and_mask);
5636 lr_inner = decode_field_reference (lr_arg,
5637 &lr_bitsize, &lr_bitpos, &lr_mode,
5638 &lr_unsignedp, &volatilep, &lr_mask,
5639 &lr_and_mask);
5640 rl_inner = decode_field_reference (rl_arg,
5641 &rl_bitsize, &rl_bitpos, &rl_mode,
5642 &rl_unsignedp, &volatilep, &rl_mask,
5643 &rl_and_mask);
5644 rr_inner = decode_field_reference (rr_arg,
5645 &rr_bitsize, &rr_bitpos, &rr_mode,
5646 &rr_unsignedp, &volatilep, &rr_mask,
5647 &rr_and_mask);
5649 /* It must be true that the inner operation on the lhs of each
5650 comparison must be the same if we are to be able to do anything.
5651 Then see if we have constants. If not, the same must be true for
5652 the rhs's. */
5653 if (volatilep || ll_inner == 0 || rl_inner == 0
5654 || ! operand_equal_p (ll_inner, rl_inner, 0))
5655 return 0;
5657 if (TREE_CODE (lr_arg) == INTEGER_CST
5658 && TREE_CODE (rr_arg) == INTEGER_CST)
5659 l_const = lr_arg, r_const = rr_arg;
5660 else if (lr_inner == 0 || rr_inner == 0
5661 || ! operand_equal_p (lr_inner, rr_inner, 0))
5662 return 0;
5663 else
5664 l_const = r_const = 0;
5666 /* If either comparison code is not correct for our logical operation,
5667 fail. However, we can convert a one-bit comparison against zero into
5668 the opposite comparison against that bit being set in the field. */
5670 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5671 if (lcode != wanted_code)
5673 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5675 /* Make the left operand unsigned, since we are only interested
5676 in the value of one bit. Otherwise we are doing the wrong
5677 thing below. */
5678 ll_unsignedp = 1;
5679 l_const = ll_mask;
5681 else
5682 return 0;
5685 /* This is analogous to the code for l_const above. */
5686 if (rcode != wanted_code)
5688 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5690 rl_unsignedp = 1;
5691 r_const = rl_mask;
5693 else
5694 return 0;
5697 /* See if we can find a mode that contains both fields being compared on
5698 the left. If we can't, fail. Otherwise, update all constants and masks
5699 to be relative to a field of that size. */
5700 first_bit = MIN (ll_bitpos, rl_bitpos);
5701 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5702 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5703 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5704 volatilep);
5705 if (lnmode == VOIDmode)
5706 return 0;
5708 lnbitsize = GET_MODE_BITSIZE (lnmode);
5709 lnbitpos = first_bit & ~ (lnbitsize - 1);
5710 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5711 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5713 if (BYTES_BIG_ENDIAN)
5715 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5716 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5719 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5720 size_int (xll_bitpos), 0);
5721 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5722 size_int (xrl_bitpos), 0);
5724 if (l_const)
5726 l_const = fold_convert (lntype, l_const);
5727 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5728 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5729 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5730 fold_build1 (BIT_NOT_EXPR,
5731 lntype, ll_mask),
5732 0)))
5734 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5736 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5739 if (r_const)
5741 r_const = fold_convert (lntype, r_const);
5742 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5743 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5744 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5745 fold_build1 (BIT_NOT_EXPR,
5746 lntype, rl_mask),
5747 0)))
5749 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5751 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5755 /* If the right sides are not constant, do the same for it. Also,
5756 disallow this optimization if a size or signedness mismatch occurs
5757 between the left and right sides. */
5758 if (l_const == 0)
5760 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5761 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5762 /* Make sure the two fields on the right
5763 correspond to the left without being swapped. */
5764 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5765 return 0;
5767 first_bit = MIN (lr_bitpos, rr_bitpos);
5768 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5769 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5770 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5771 volatilep);
5772 if (rnmode == VOIDmode)
5773 return 0;
5775 rnbitsize = GET_MODE_BITSIZE (rnmode);
5776 rnbitpos = first_bit & ~ (rnbitsize - 1);
5777 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5778 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5780 if (BYTES_BIG_ENDIAN)
5782 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5783 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5786 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5787 size_int (xlr_bitpos), 0);
5788 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5789 size_int (xrr_bitpos), 0);
5791 /* Make a mask that corresponds to both fields being compared.
5792 Do this for both items being compared. If the operands are the
5793 same size and the bits being compared are in the same position
5794 then we can do this by masking both and comparing the masked
5795 results. */
5796 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5797 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5798 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5800 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5801 ll_unsignedp || rl_unsignedp);
5802 if (! all_ones_mask_p (ll_mask, lnbitsize))
5803 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5805 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5806 lr_unsignedp || rr_unsignedp);
5807 if (! all_ones_mask_p (lr_mask, rnbitsize))
5808 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5810 return build2 (wanted_code, truth_type, lhs, rhs);
5813 /* There is still another way we can do something: If both pairs of
5814 fields being compared are adjacent, we may be able to make a wider
5815 field containing them both.
5817 Note that we still must mask the lhs/rhs expressions. Furthermore,
5818 the mask must be shifted to account for the shift done by
5819 make_bit_field_ref. */
5820 if ((ll_bitsize + ll_bitpos == rl_bitpos
5821 && lr_bitsize + lr_bitpos == rr_bitpos)
5822 || (ll_bitpos == rl_bitpos + rl_bitsize
5823 && lr_bitpos == rr_bitpos + rr_bitsize))
5825 tree type;
5827 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5828 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5829 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5830 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5832 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5833 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5834 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5835 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5837 /* Convert to the smaller type before masking out unwanted bits. */
5838 type = lntype;
5839 if (lntype != rntype)
5841 if (lnbitsize > rnbitsize)
5843 lhs = fold_convert (rntype, lhs);
5844 ll_mask = fold_convert (rntype, ll_mask);
5845 type = rntype;
5847 else if (lnbitsize < rnbitsize)
5849 rhs = fold_convert (lntype, rhs);
5850 lr_mask = fold_convert (lntype, lr_mask);
5851 type = lntype;
5855 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5856 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5858 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5859 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5861 return build2 (wanted_code, truth_type, lhs, rhs);
5864 return 0;
5867 /* Handle the case of comparisons with constants. If there is something in
5868 common between the masks, those bits of the constants must be the same.
5869 If not, the condition is always false. Test for this to avoid generating
5870 incorrect code below. */
5871 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5872 if (! integer_zerop (result)
5873 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5874 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5876 if (wanted_code == NE_EXPR)
5878 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5879 return constant_boolean_node (true, truth_type);
5881 else
5883 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5884 return constant_boolean_node (false, truth_type);
5888 /* Construct the expression we will return. First get the component
5889 reference we will make. Unless the mask is all ones the width of
5890 that field, perform the mask operation. Then compare with the
5891 merged constant. */
5892 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5893 ll_unsignedp || rl_unsignedp);
5895 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5896 if (! all_ones_mask_p (ll_mask, lnbitsize))
5897 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5899 return build2 (wanted_code, truth_type, result,
5900 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5903 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5904 constant. */
5906 static tree
5907 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5909 tree arg0 = op0;
5910 enum tree_code op_code;
5911 tree comp_const;
5912 tree minmax_const;
5913 int consts_equal, consts_lt;
5914 tree inner;
5916 STRIP_SIGN_NOPS (arg0);
5918 op_code = TREE_CODE (arg0);
5919 minmax_const = TREE_OPERAND (arg0, 1);
5920 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5921 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5922 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5923 inner = TREE_OPERAND (arg0, 0);
5925 /* If something does not permit us to optimize, return the original tree. */
5926 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5927 || TREE_CODE (comp_const) != INTEGER_CST
5928 || TREE_OVERFLOW (comp_const)
5929 || TREE_CODE (minmax_const) != INTEGER_CST
5930 || TREE_OVERFLOW (minmax_const))
5931 return NULL_TREE;
5933 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5934 and GT_EXPR, doing the rest with recursive calls using logical
5935 simplifications. */
5936 switch (code)
5938 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5940 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5941 type, op0, op1);
5942 if (tem)
5943 return invert_truthvalue (tem);
5944 return NULL_TREE;
5947 case GE_EXPR:
5948 return
5949 fold_build2 (TRUTH_ORIF_EXPR, type,
5950 optimize_minmax_comparison
5951 (EQ_EXPR, type, arg0, comp_const),
5952 optimize_minmax_comparison
5953 (GT_EXPR, type, arg0, comp_const));
5955 case EQ_EXPR:
5956 if (op_code == MAX_EXPR && consts_equal)
5957 /* MAX (X, 0) == 0 -> X <= 0 */
5958 return fold_build2 (LE_EXPR, type, inner, comp_const);
5960 else if (op_code == MAX_EXPR && consts_lt)
5961 /* MAX (X, 0) == 5 -> X == 5 */
5962 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5964 else if (op_code == MAX_EXPR)
5965 /* MAX (X, 0) == -1 -> false */
5966 return omit_one_operand (type, integer_zero_node, inner);
5968 else if (consts_equal)
5969 /* MIN (X, 0) == 0 -> X >= 0 */
5970 return fold_build2 (GE_EXPR, type, inner, comp_const);
5972 else if (consts_lt)
5973 /* MIN (X, 0) == 5 -> false */
5974 return omit_one_operand (type, integer_zero_node, inner);
5976 else
5977 /* MIN (X, 0) == -1 -> X == -1 */
5978 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5980 case GT_EXPR:
5981 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5982 /* MAX (X, 0) > 0 -> X > 0
5983 MAX (X, 0) > 5 -> X > 5 */
5984 return fold_build2 (GT_EXPR, type, inner, comp_const);
5986 else if (op_code == MAX_EXPR)
5987 /* MAX (X, 0) > -1 -> true */
5988 return omit_one_operand (type, integer_one_node, inner);
5990 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5991 /* MIN (X, 0) > 0 -> false
5992 MIN (X, 0) > 5 -> false */
5993 return omit_one_operand (type, integer_zero_node, inner);
5995 else
5996 /* MIN (X, 0) > -1 -> X > -1 */
5997 return fold_build2 (GT_EXPR, type, inner, comp_const);
5999 default:
6000 return NULL_TREE;
6004 /* T is an integer expression that is being multiplied, divided, or taken a
6005 modulus (CODE says which and what kind of divide or modulus) by a
6006 constant C. See if we can eliminate that operation by folding it with
6007 other operations already in T. WIDE_TYPE, if non-null, is a type that
6008 should be used for the computation if wider than our type.
6010 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6011 (X * 2) + (Y * 4). We must, however, be assured that either the original
6012 expression would not overflow or that overflow is undefined for the type
6013 in the language in question.
6015 If we return a non-null expression, it is an equivalent form of the
6016 original computation, but need not be in the original type.
6018 We set *STRICT_OVERFLOW_P to true if the return values depends on
6019 signed overflow being undefined. Otherwise we do not change
6020 *STRICT_OVERFLOW_P. */
6022 static tree
6023 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6024 bool *strict_overflow_p)
6026 /* To avoid exponential search depth, refuse to allow recursion past
6027 three levels. Beyond that (1) it's highly unlikely that we'll find
6028 something interesting and (2) we've probably processed it before
6029 when we built the inner expression. */
6031 static int depth;
6032 tree ret;
6034 if (depth > 3)
6035 return NULL;
6037 depth++;
6038 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6039 depth--;
6041 return ret;
6044 static tree
6045 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6046 bool *strict_overflow_p)
6048 tree type = TREE_TYPE (t);
6049 enum tree_code tcode = TREE_CODE (t);
6050 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6051 > GET_MODE_SIZE (TYPE_MODE (type)))
6052 ? wide_type : type);
6053 tree t1, t2;
6054 int same_p = tcode == code;
6055 tree op0 = NULL_TREE, op1 = NULL_TREE;
6056 bool sub_strict_overflow_p;
6058 /* Don't deal with constants of zero here; they confuse the code below. */
6059 if (integer_zerop (c))
6060 return NULL_TREE;
6062 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6063 op0 = TREE_OPERAND (t, 0);
6065 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6066 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6068 /* Note that we need not handle conditional operations here since fold
6069 already handles those cases. So just do arithmetic here. */
6070 switch (tcode)
6072 case INTEGER_CST:
6073 /* For a constant, we can always simplify if we are a multiply
6074 or (for divide and modulus) if it is a multiple of our constant. */
6075 if (code == MULT_EXPR
6076 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6077 return const_binop (code, fold_convert (ctype, t),
6078 fold_convert (ctype, c), 0);
6079 break;
6081 CASE_CONVERT: case NON_LVALUE_EXPR:
6082 /* If op0 is an expression ... */
6083 if ((COMPARISON_CLASS_P (op0)
6084 || UNARY_CLASS_P (op0)
6085 || BINARY_CLASS_P (op0)
6086 || VL_EXP_CLASS_P (op0)
6087 || EXPRESSION_CLASS_P (op0))
6088 /* ... and has wrapping overflow, and its type is smaller
6089 than ctype, then we cannot pass through as widening. */
6090 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6091 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6092 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6093 && (TYPE_PRECISION (ctype)
6094 > TYPE_PRECISION (TREE_TYPE (op0))))
6095 /* ... or this is a truncation (t is narrower than op0),
6096 then we cannot pass through this narrowing. */
6097 || (TYPE_PRECISION (type)
6098 < TYPE_PRECISION (TREE_TYPE (op0)))
6099 /* ... or signedness changes for division or modulus,
6100 then we cannot pass through this conversion. */
6101 || (code != MULT_EXPR
6102 && (TYPE_UNSIGNED (ctype)
6103 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6104 /* ... or has undefined overflow while the converted to
6105 type has not, we cannot do the operation in the inner type
6106 as that would introduce undefined overflow. */
6107 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6108 && !TYPE_OVERFLOW_UNDEFINED (type))))
6109 break;
6111 /* Pass the constant down and see if we can make a simplification. If
6112 we can, replace this expression with the inner simplification for
6113 possible later conversion to our or some other type. */
6114 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6115 && TREE_CODE (t2) == INTEGER_CST
6116 && !TREE_OVERFLOW (t2)
6117 && (0 != (t1 = extract_muldiv (op0, t2, code,
6118 code == MULT_EXPR
6119 ? ctype : NULL_TREE,
6120 strict_overflow_p))))
6121 return t1;
6122 break;
6124 case ABS_EXPR:
6125 /* If widening the type changes it from signed to unsigned, then we
6126 must avoid building ABS_EXPR itself as unsigned. */
6127 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6129 tree cstype = (*signed_type_for) (ctype);
6130 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6131 != 0)
6133 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6134 return fold_convert (ctype, t1);
6136 break;
6138 /* If the constant is negative, we cannot simplify this. */
6139 if (tree_int_cst_sgn (c) == -1)
6140 break;
6141 /* FALLTHROUGH */
6142 case NEGATE_EXPR:
6143 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6144 != 0)
6145 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6146 break;
6148 case MIN_EXPR: case MAX_EXPR:
6149 /* If widening the type changes the signedness, then we can't perform
6150 this optimization as that changes the result. */
6151 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6152 break;
6154 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6155 sub_strict_overflow_p = false;
6156 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6157 &sub_strict_overflow_p)) != 0
6158 && (t2 = extract_muldiv (op1, c, code, wide_type,
6159 &sub_strict_overflow_p)) != 0)
6161 if (tree_int_cst_sgn (c) < 0)
6162 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6163 if (sub_strict_overflow_p)
6164 *strict_overflow_p = true;
6165 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6166 fold_convert (ctype, t2));
6168 break;
6170 case LSHIFT_EXPR: case RSHIFT_EXPR:
6171 /* If the second operand is constant, this is a multiplication
6172 or floor division, by a power of two, so we can treat it that
6173 way unless the multiplier or divisor overflows. Signed
6174 left-shift overflow is implementation-defined rather than
6175 undefined in C90, so do not convert signed left shift into
6176 multiplication. */
6177 if (TREE_CODE (op1) == INTEGER_CST
6178 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6179 /* const_binop may not detect overflow correctly,
6180 so check for it explicitly here. */
6181 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6182 && TREE_INT_CST_HIGH (op1) == 0
6183 && 0 != (t1 = fold_convert (ctype,
6184 const_binop (LSHIFT_EXPR,
6185 size_one_node,
6186 op1, 0)))
6187 && !TREE_OVERFLOW (t1))
6188 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6189 ? MULT_EXPR : FLOOR_DIV_EXPR,
6190 ctype, fold_convert (ctype, op0), t1),
6191 c, code, wide_type, strict_overflow_p);
6192 break;
6194 case PLUS_EXPR: case MINUS_EXPR:
6195 /* See if we can eliminate the operation on both sides. If we can, we
6196 can return a new PLUS or MINUS. If we can't, the only remaining
6197 cases where we can do anything are if the second operand is a
6198 constant. */
6199 sub_strict_overflow_p = false;
6200 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6201 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6202 if (t1 != 0 && t2 != 0
6203 && (code == MULT_EXPR
6204 /* If not multiplication, we can only do this if both operands
6205 are divisible by c. */
6206 || (multiple_of_p (ctype, op0, c)
6207 && multiple_of_p (ctype, op1, c))))
6209 if (sub_strict_overflow_p)
6210 *strict_overflow_p = true;
6211 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6212 fold_convert (ctype, t2));
6215 /* If this was a subtraction, negate OP1 and set it to be an addition.
6216 This simplifies the logic below. */
6217 if (tcode == MINUS_EXPR)
6218 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6220 if (TREE_CODE (op1) != INTEGER_CST)
6221 break;
6223 /* If either OP1 or C are negative, this optimization is not safe for
6224 some of the division and remainder types while for others we need
6225 to change the code. */
6226 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6228 if (code == CEIL_DIV_EXPR)
6229 code = FLOOR_DIV_EXPR;
6230 else if (code == FLOOR_DIV_EXPR)
6231 code = CEIL_DIV_EXPR;
6232 else if (code != MULT_EXPR
6233 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6234 break;
6237 /* If it's a multiply or a division/modulus operation of a multiple
6238 of our constant, do the operation and verify it doesn't overflow. */
6239 if (code == MULT_EXPR
6240 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6242 op1 = const_binop (code, fold_convert (ctype, op1),
6243 fold_convert (ctype, c), 0);
6244 /* We allow the constant to overflow with wrapping semantics. */
6245 if (op1 == 0
6246 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6247 break;
6249 else
6250 break;
6252 /* If we have an unsigned type is not a sizetype, we cannot widen
6253 the operation since it will change the result if the original
6254 computation overflowed. */
6255 if (TYPE_UNSIGNED (ctype)
6256 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6257 && ctype != type)
6258 break;
6260 /* If we were able to eliminate our operation from the first side,
6261 apply our operation to the second side and reform the PLUS. */
6262 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6263 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6265 /* The last case is if we are a multiply. In that case, we can
6266 apply the distributive law to commute the multiply and addition
6267 if the multiplication of the constants doesn't overflow. */
6268 if (code == MULT_EXPR)
6269 return fold_build2 (tcode, ctype,
6270 fold_build2 (code, ctype,
6271 fold_convert (ctype, op0),
6272 fold_convert (ctype, c)),
6273 op1);
6275 break;
6277 case MULT_EXPR:
6278 /* We have a special case here if we are doing something like
6279 (C * 8) % 4 since we know that's zero. */
6280 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6281 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6282 /* If the multiplication can overflow we cannot optimize this.
6283 ??? Until we can properly mark individual operations as
6284 not overflowing we need to treat sizetype special here as
6285 stor-layout relies on this opimization to make
6286 DECL_FIELD_BIT_OFFSET always a constant. */
6287 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6288 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6289 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6290 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6291 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6293 *strict_overflow_p = true;
6294 return omit_one_operand (type, integer_zero_node, op0);
6297 /* ... fall through ... */
6299 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6300 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6301 /* If we can extract our operation from the LHS, do so and return a
6302 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6303 do something only if the second operand is a constant. */
6304 if (same_p
6305 && (t1 = extract_muldiv (op0, c, code, wide_type,
6306 strict_overflow_p)) != 0)
6307 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6308 fold_convert (ctype, op1));
6309 else if (tcode == MULT_EXPR && code == MULT_EXPR
6310 && (t1 = extract_muldiv (op1, c, code, wide_type,
6311 strict_overflow_p)) != 0)
6312 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6313 fold_convert (ctype, t1));
6314 else if (TREE_CODE (op1) != INTEGER_CST)
6315 return 0;
6317 /* If these are the same operation types, we can associate them
6318 assuming no overflow. */
6319 if (tcode == code
6320 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6321 fold_convert (ctype, c), 1))
6322 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6323 TREE_INT_CST_HIGH (t1),
6324 (TYPE_UNSIGNED (ctype)
6325 && tcode != MULT_EXPR) ? -1 : 1,
6326 TREE_OVERFLOW (t1)))
6327 && !TREE_OVERFLOW (t1))
6328 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6330 /* If these operations "cancel" each other, we have the main
6331 optimizations of this pass, which occur when either constant is a
6332 multiple of the other, in which case we replace this with either an
6333 operation or CODE or TCODE.
6335 If we have an unsigned type that is not a sizetype, we cannot do
6336 this since it will change the result if the original computation
6337 overflowed. */
6338 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6339 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6340 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6341 || (tcode == MULT_EXPR
6342 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6343 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6344 && code != MULT_EXPR)))
6346 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6348 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6349 *strict_overflow_p = true;
6350 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6351 fold_convert (ctype,
6352 const_binop (TRUNC_DIV_EXPR,
6353 op1, c, 0)));
6355 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6357 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6358 *strict_overflow_p = true;
6359 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6360 fold_convert (ctype,
6361 const_binop (TRUNC_DIV_EXPR,
6362 c, op1, 0)));
6365 break;
6367 default:
6368 break;
6371 return 0;
6374 /* Return a node which has the indicated constant VALUE (either 0 or
6375 1), and is of the indicated TYPE. */
6377 tree
6378 constant_boolean_node (int value, tree type)
6380 if (type == integer_type_node)
6381 return value ? integer_one_node : integer_zero_node;
6382 else if (type == boolean_type_node)
6383 return value ? boolean_true_node : boolean_false_node;
6384 else
6385 return build_int_cst (type, value);
6389 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6390 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6391 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6392 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6393 COND is the first argument to CODE; otherwise (as in the example
6394 given here), it is the second argument. TYPE is the type of the
6395 original expression. Return NULL_TREE if no simplification is
6396 possible. */
6398 static tree
6399 fold_binary_op_with_conditional_arg (enum tree_code code,
6400 tree type, tree op0, tree op1,
6401 tree cond, tree arg, int cond_first_p)
6403 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6404 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6405 tree test, true_value, false_value;
6406 tree lhs = NULL_TREE;
6407 tree rhs = NULL_TREE;
6409 /* This transformation is only worthwhile if we don't have to wrap
6410 arg in a SAVE_EXPR, and the operation can be simplified on at least
6411 one of the branches once its pushed inside the COND_EXPR. */
6412 if (!TREE_CONSTANT (arg))
6413 return NULL_TREE;
6415 if (TREE_CODE (cond) == COND_EXPR)
6417 test = TREE_OPERAND (cond, 0);
6418 true_value = TREE_OPERAND (cond, 1);
6419 false_value = TREE_OPERAND (cond, 2);
6420 /* If this operand throws an expression, then it does not make
6421 sense to try to perform a logical or arithmetic operation
6422 involving it. */
6423 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6424 lhs = true_value;
6425 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6426 rhs = false_value;
6428 else
6430 tree testtype = TREE_TYPE (cond);
6431 test = cond;
6432 true_value = constant_boolean_node (true, testtype);
6433 false_value = constant_boolean_node (false, testtype);
6436 arg = fold_convert (arg_type, arg);
6437 if (lhs == 0)
6439 true_value = fold_convert (cond_type, true_value);
6440 if (cond_first_p)
6441 lhs = fold_build2 (code, type, true_value, arg);
6442 else
6443 lhs = fold_build2 (code, type, arg, true_value);
6445 if (rhs == 0)
6447 false_value = fold_convert (cond_type, false_value);
6448 if (cond_first_p)
6449 rhs = fold_build2 (code, type, false_value, arg);
6450 else
6451 rhs = fold_build2 (code, type, arg, false_value);
6454 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6455 return fold_convert (type, test);
6459 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6461 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6462 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6463 ADDEND is the same as X.
6465 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6466 and finite. The problematic cases are when X is zero, and its mode
6467 has signed zeros. In the case of rounding towards -infinity,
6468 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6469 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6471 bool
6472 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6474 if (!real_zerop (addend))
6475 return false;
6477 /* Don't allow the fold with -fsignaling-nans. */
6478 if (HONOR_SNANS (TYPE_MODE (type)))
6479 return false;
6481 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6482 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6483 return true;
6485 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6486 if (TREE_CODE (addend) == REAL_CST
6487 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6488 negate = !negate;
6490 /* The mode has signed zeros, and we have to honor their sign.
6491 In this situation, there is only one case we can return true for.
6492 X - 0 is the same as X unless rounding towards -infinity is
6493 supported. */
6494 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6497 /* Subroutine of fold() that checks comparisons of built-in math
6498 functions against real constants.
6500 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6501 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6502 is the type of the result and ARG0 and ARG1 are the operands of the
6503 comparison. ARG1 must be a TREE_REAL_CST.
6505 The function returns the constant folded tree if a simplification
6506 can be made, and NULL_TREE otherwise. */
6508 static tree
6509 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6510 tree type, tree arg0, tree arg1)
6512 REAL_VALUE_TYPE c;
6514 if (BUILTIN_SQRT_P (fcode))
6516 tree arg = CALL_EXPR_ARG (arg0, 0);
6517 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6519 c = TREE_REAL_CST (arg1);
6520 if (REAL_VALUE_NEGATIVE (c))
6522 /* sqrt(x) < y is always false, if y is negative. */
6523 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6524 return omit_one_operand (type, integer_zero_node, arg);
6526 /* sqrt(x) > y is always true, if y is negative and we
6527 don't care about NaNs, i.e. negative values of x. */
6528 if (code == NE_EXPR || !HONOR_NANS (mode))
6529 return omit_one_operand (type, integer_one_node, arg);
6531 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6532 return fold_build2 (GE_EXPR, type, arg,
6533 build_real (TREE_TYPE (arg), dconst0));
6535 else if (code == GT_EXPR || code == GE_EXPR)
6537 REAL_VALUE_TYPE c2;
6539 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6540 real_convert (&c2, mode, &c2);
6542 if (REAL_VALUE_ISINF (c2))
6544 /* sqrt(x) > y is x == +Inf, when y is very large. */
6545 if (HONOR_INFINITIES (mode))
6546 return fold_build2 (EQ_EXPR, type, arg,
6547 build_real (TREE_TYPE (arg), c2));
6549 /* sqrt(x) > y is always false, when y is very large
6550 and we don't care about infinities. */
6551 return omit_one_operand (type, integer_zero_node, arg);
6554 /* sqrt(x) > c is the same as x > c*c. */
6555 return fold_build2 (code, type, arg,
6556 build_real (TREE_TYPE (arg), c2));
6558 else if (code == LT_EXPR || code == LE_EXPR)
6560 REAL_VALUE_TYPE c2;
6562 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6563 real_convert (&c2, mode, &c2);
6565 if (REAL_VALUE_ISINF (c2))
6567 /* sqrt(x) < y is always true, when y is a very large
6568 value and we don't care about NaNs or Infinities. */
6569 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6570 return omit_one_operand (type, integer_one_node, arg);
6572 /* sqrt(x) < y is x != +Inf when y is very large and we
6573 don't care about NaNs. */
6574 if (! HONOR_NANS (mode))
6575 return fold_build2 (NE_EXPR, type, arg,
6576 build_real (TREE_TYPE (arg), c2));
6578 /* sqrt(x) < y is x >= 0 when y is very large and we
6579 don't care about Infinities. */
6580 if (! HONOR_INFINITIES (mode))
6581 return fold_build2 (GE_EXPR, type, arg,
6582 build_real (TREE_TYPE (arg), dconst0));
6584 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6585 if (lang_hooks.decls.global_bindings_p () != 0
6586 || CONTAINS_PLACEHOLDER_P (arg))
6587 return NULL_TREE;
6589 arg = save_expr (arg);
6590 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6591 fold_build2 (GE_EXPR, type, arg,
6592 build_real (TREE_TYPE (arg),
6593 dconst0)),
6594 fold_build2 (NE_EXPR, type, arg,
6595 build_real (TREE_TYPE (arg),
6596 c2)));
6599 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6600 if (! HONOR_NANS (mode))
6601 return fold_build2 (code, type, arg,
6602 build_real (TREE_TYPE (arg), c2));
6604 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6605 if (lang_hooks.decls.global_bindings_p () == 0
6606 && ! CONTAINS_PLACEHOLDER_P (arg))
6608 arg = save_expr (arg);
6609 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6610 fold_build2 (GE_EXPR, type, arg,
6611 build_real (TREE_TYPE (arg),
6612 dconst0)),
6613 fold_build2 (code, type, arg,
6614 build_real (TREE_TYPE (arg),
6615 c2)));
6620 return NULL_TREE;
6623 /* Subroutine of fold() that optimizes comparisons against Infinities,
6624 either +Inf or -Inf.
6626 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6627 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6628 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6630 The function returns the constant folded tree if a simplification
6631 can be made, and NULL_TREE otherwise. */
6633 static tree
6634 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6636 enum machine_mode mode;
6637 REAL_VALUE_TYPE max;
6638 tree temp;
6639 bool neg;
6641 mode = TYPE_MODE (TREE_TYPE (arg0));
6643 /* For negative infinity swap the sense of the comparison. */
6644 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6645 if (neg)
6646 code = swap_tree_comparison (code);
6648 switch (code)
6650 case GT_EXPR:
6651 /* x > +Inf is always false, if with ignore sNANs. */
6652 if (HONOR_SNANS (mode))
6653 return NULL_TREE;
6654 return omit_one_operand (type, integer_zero_node, arg0);
6656 case LE_EXPR:
6657 /* x <= +Inf is always true, if we don't case about NaNs. */
6658 if (! HONOR_NANS (mode))
6659 return omit_one_operand (type, integer_one_node, arg0);
6661 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6662 if (lang_hooks.decls.global_bindings_p () == 0
6663 && ! CONTAINS_PLACEHOLDER_P (arg0))
6665 arg0 = save_expr (arg0);
6666 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6668 break;
6670 case EQ_EXPR:
6671 case GE_EXPR:
6672 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6673 real_maxval (&max, neg, mode);
6674 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6675 arg0, build_real (TREE_TYPE (arg0), max));
6677 case LT_EXPR:
6678 /* x < +Inf is always equal to x <= DBL_MAX. */
6679 real_maxval (&max, neg, mode);
6680 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6681 arg0, build_real (TREE_TYPE (arg0), max));
6683 case NE_EXPR:
6684 /* x != +Inf is always equal to !(x > DBL_MAX). */
6685 real_maxval (&max, neg, mode);
6686 if (! HONOR_NANS (mode))
6687 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6688 arg0, build_real (TREE_TYPE (arg0), max));
6690 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6691 arg0, build_real (TREE_TYPE (arg0), max));
6692 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6694 default:
6695 break;
6698 return NULL_TREE;
6701 /* Subroutine of fold() that optimizes comparisons of a division by
6702 a nonzero integer constant against an integer constant, i.e.
6703 X/C1 op C2.
6705 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6706 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6707 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6709 The function returns the constant folded tree if a simplification
6710 can be made, and NULL_TREE otherwise. */
6712 static tree
6713 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6715 tree prod, tmp, hi, lo;
6716 tree arg00 = TREE_OPERAND (arg0, 0);
6717 tree arg01 = TREE_OPERAND (arg0, 1);
6718 unsigned HOST_WIDE_INT lpart;
6719 HOST_WIDE_INT hpart;
6720 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6721 bool neg_overflow;
6722 int overflow;
6724 /* We have to do this the hard way to detect unsigned overflow.
6725 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6726 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6727 TREE_INT_CST_HIGH (arg01),
6728 TREE_INT_CST_LOW (arg1),
6729 TREE_INT_CST_HIGH (arg1),
6730 &lpart, &hpart, unsigned_p);
6731 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6732 -1, overflow);
6733 neg_overflow = false;
6735 if (unsigned_p)
6737 tmp = int_const_binop (MINUS_EXPR, arg01,
6738 build_int_cst (TREE_TYPE (arg01), 1), 0);
6739 lo = prod;
6741 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6742 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6743 TREE_INT_CST_HIGH (prod),
6744 TREE_INT_CST_LOW (tmp),
6745 TREE_INT_CST_HIGH (tmp),
6746 &lpart, &hpart, unsigned_p);
6747 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6748 -1, overflow | TREE_OVERFLOW (prod));
6750 else if (tree_int_cst_sgn (arg01) >= 0)
6752 tmp = int_const_binop (MINUS_EXPR, arg01,
6753 build_int_cst (TREE_TYPE (arg01), 1), 0);
6754 switch (tree_int_cst_sgn (arg1))
6756 case -1:
6757 neg_overflow = true;
6758 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6759 hi = prod;
6760 break;
6762 case 0:
6763 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6764 hi = tmp;
6765 break;
6767 case 1:
6768 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6769 lo = prod;
6770 break;
6772 default:
6773 gcc_unreachable ();
6776 else
6778 /* A negative divisor reverses the relational operators. */
6779 code = swap_tree_comparison (code);
6781 tmp = int_const_binop (PLUS_EXPR, arg01,
6782 build_int_cst (TREE_TYPE (arg01), 1), 0);
6783 switch (tree_int_cst_sgn (arg1))
6785 case -1:
6786 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6787 lo = prod;
6788 break;
6790 case 0:
6791 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6792 lo = tmp;
6793 break;
6795 case 1:
6796 neg_overflow = true;
6797 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6798 hi = prod;
6799 break;
6801 default:
6802 gcc_unreachable ();
6806 switch (code)
6808 case EQ_EXPR:
6809 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6810 return omit_one_operand (type, integer_zero_node, arg00);
6811 if (TREE_OVERFLOW (hi))
6812 return fold_build2 (GE_EXPR, type, arg00, lo);
6813 if (TREE_OVERFLOW (lo))
6814 return fold_build2 (LE_EXPR, type, arg00, hi);
6815 return build_range_check (type, arg00, 1, lo, hi);
6817 case NE_EXPR:
6818 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6819 return omit_one_operand (type, integer_one_node, arg00);
6820 if (TREE_OVERFLOW (hi))
6821 return fold_build2 (LT_EXPR, type, arg00, lo);
6822 if (TREE_OVERFLOW (lo))
6823 return fold_build2 (GT_EXPR, type, arg00, hi);
6824 return build_range_check (type, arg00, 0, lo, hi);
6826 case LT_EXPR:
6827 if (TREE_OVERFLOW (lo))
6829 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6830 return omit_one_operand (type, tmp, arg00);
6832 return fold_build2 (LT_EXPR, type, arg00, lo);
6834 case LE_EXPR:
6835 if (TREE_OVERFLOW (hi))
6837 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6838 return omit_one_operand (type, tmp, arg00);
6840 return fold_build2 (LE_EXPR, type, arg00, hi);
6842 case GT_EXPR:
6843 if (TREE_OVERFLOW (hi))
6845 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6846 return omit_one_operand (type, tmp, arg00);
6848 return fold_build2 (GT_EXPR, type, arg00, hi);
6850 case GE_EXPR:
6851 if (TREE_OVERFLOW (lo))
6853 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6854 return omit_one_operand (type, tmp, arg00);
6856 return fold_build2 (GE_EXPR, type, arg00, lo);
6858 default:
6859 break;
6862 return NULL_TREE;
6866 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6867 equality/inequality test, then return a simplified form of the test
6868 using a sign testing. Otherwise return NULL. TYPE is the desired
6869 result type. */
6871 static tree
6872 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6873 tree result_type)
6875 /* If this is testing a single bit, we can optimize the test. */
6876 if ((code == NE_EXPR || code == EQ_EXPR)
6877 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6878 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6880 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6881 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6882 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6884 if (arg00 != NULL_TREE
6885 /* This is only a win if casting to a signed type is cheap,
6886 i.e. when arg00's type is not a partial mode. */
6887 && TYPE_PRECISION (TREE_TYPE (arg00))
6888 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6890 tree stype = signed_type_for (TREE_TYPE (arg00));
6891 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6892 result_type, fold_convert (stype, arg00),
6893 build_int_cst (stype, 0));
6897 return NULL_TREE;
6900 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6901 equality/inequality test, then return a simplified form of
6902 the test using shifts and logical operations. Otherwise return
6903 NULL. TYPE is the desired result type. */
6905 tree
6906 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6907 tree result_type)
6909 /* If this is testing a single bit, we can optimize the test. */
6910 if ((code == NE_EXPR || code == EQ_EXPR)
6911 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6912 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6914 tree inner = TREE_OPERAND (arg0, 0);
6915 tree type = TREE_TYPE (arg0);
6916 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6917 enum machine_mode operand_mode = TYPE_MODE (type);
6918 int ops_unsigned;
6919 tree signed_type, unsigned_type, intermediate_type;
6920 tree tem, one;
6922 /* First, see if we can fold the single bit test into a sign-bit
6923 test. */
6924 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6925 result_type);
6926 if (tem)
6927 return tem;
6929 /* Otherwise we have (A & C) != 0 where C is a single bit,
6930 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6931 Similarly for (A & C) == 0. */
6933 /* If INNER is a right shift of a constant and it plus BITNUM does
6934 not overflow, adjust BITNUM and INNER. */
6935 if (TREE_CODE (inner) == RSHIFT_EXPR
6936 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6937 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6938 && bitnum < TYPE_PRECISION (type)
6939 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6940 bitnum - TYPE_PRECISION (type)))
6942 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6943 inner = TREE_OPERAND (inner, 0);
6946 /* If we are going to be able to omit the AND below, we must do our
6947 operations as unsigned. If we must use the AND, we have a choice.
6948 Normally unsigned is faster, but for some machines signed is. */
6949 #ifdef LOAD_EXTEND_OP
6950 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6951 && !flag_syntax_only) ? 0 : 1;
6952 #else
6953 ops_unsigned = 1;
6954 #endif
6956 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6957 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6958 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6959 inner = fold_convert (intermediate_type, inner);
6961 if (bitnum != 0)
6962 inner = build2 (RSHIFT_EXPR, intermediate_type,
6963 inner, size_int (bitnum));
6965 one = build_int_cst (intermediate_type, 1);
6967 if (code == EQ_EXPR)
6968 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6970 /* Put the AND last so it can combine with more things. */
6971 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6973 /* Make sure to return the proper type. */
6974 inner = fold_convert (result_type, inner);
6976 return inner;
6978 return NULL_TREE;
6981 /* Check whether we are allowed to reorder operands arg0 and arg1,
6982 such that the evaluation of arg1 occurs before arg0. */
6984 static bool
6985 reorder_operands_p (const_tree arg0, const_tree arg1)
6987 if (! flag_evaluation_order)
6988 return true;
6989 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6990 return true;
6991 return ! TREE_SIDE_EFFECTS (arg0)
6992 && ! TREE_SIDE_EFFECTS (arg1);
6995 /* Test whether it is preferable two swap two operands, ARG0 and
6996 ARG1, for example because ARG0 is an integer constant and ARG1
6997 isn't. If REORDER is true, only recommend swapping if we can
6998 evaluate the operands in reverse order. */
7000 bool
7001 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7003 STRIP_SIGN_NOPS (arg0);
7004 STRIP_SIGN_NOPS (arg1);
7006 if (TREE_CODE (arg1) == INTEGER_CST)
7007 return 0;
7008 if (TREE_CODE (arg0) == INTEGER_CST)
7009 return 1;
7011 if (TREE_CODE (arg1) == REAL_CST)
7012 return 0;
7013 if (TREE_CODE (arg0) == REAL_CST)
7014 return 1;
7016 if (TREE_CODE (arg1) == FIXED_CST)
7017 return 0;
7018 if (TREE_CODE (arg0) == FIXED_CST)
7019 return 1;
7021 if (TREE_CODE (arg1) == COMPLEX_CST)
7022 return 0;
7023 if (TREE_CODE (arg0) == COMPLEX_CST)
7024 return 1;
7026 if (TREE_CONSTANT (arg1))
7027 return 0;
7028 if (TREE_CONSTANT (arg0))
7029 return 1;
7031 if (optimize_function_for_size_p (cfun))
7032 return 0;
7034 if (reorder && flag_evaluation_order
7035 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7036 return 0;
7038 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7039 for commutative and comparison operators. Ensuring a canonical
7040 form allows the optimizers to find additional redundancies without
7041 having to explicitly check for both orderings. */
7042 if (TREE_CODE (arg0) == SSA_NAME
7043 && TREE_CODE (arg1) == SSA_NAME
7044 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7045 return 1;
7047 /* Put SSA_NAMEs last. */
7048 if (TREE_CODE (arg1) == SSA_NAME)
7049 return 0;
7050 if (TREE_CODE (arg0) == SSA_NAME)
7051 return 1;
7053 /* Put variables last. */
7054 if (DECL_P (arg1))
7055 return 0;
7056 if (DECL_P (arg0))
7057 return 1;
7059 return 0;
7062 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7063 ARG0 is extended to a wider type. */
7065 static tree
7066 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7068 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7069 tree arg1_unw;
7070 tree shorter_type, outer_type;
7071 tree min, max;
7072 bool above, below;
7074 if (arg0_unw == arg0)
7075 return NULL_TREE;
7076 shorter_type = TREE_TYPE (arg0_unw);
7078 #ifdef HAVE_canonicalize_funcptr_for_compare
7079 /* Disable this optimization if we're casting a function pointer
7080 type on targets that require function pointer canonicalization. */
7081 if (HAVE_canonicalize_funcptr_for_compare
7082 && TREE_CODE (shorter_type) == POINTER_TYPE
7083 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7084 return NULL_TREE;
7085 #endif
7087 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7088 return NULL_TREE;
7090 arg1_unw = get_unwidened (arg1, NULL_TREE);
7092 /* If possible, express the comparison in the shorter mode. */
7093 if ((code == EQ_EXPR || code == NE_EXPR
7094 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7095 && (TREE_TYPE (arg1_unw) == shorter_type
7096 || ((TYPE_PRECISION (shorter_type)
7097 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7098 && (TYPE_UNSIGNED (shorter_type)
7099 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7100 || (TREE_CODE (arg1_unw) == INTEGER_CST
7101 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7102 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7103 && int_fits_type_p (arg1_unw, shorter_type))))
7104 return fold_build2 (code, type, arg0_unw,
7105 fold_convert (shorter_type, arg1_unw));
7107 if (TREE_CODE (arg1_unw) != INTEGER_CST
7108 || TREE_CODE (shorter_type) != INTEGER_TYPE
7109 || !int_fits_type_p (arg1_unw, shorter_type))
7110 return NULL_TREE;
7112 /* If we are comparing with the integer that does not fit into the range
7113 of the shorter type, the result is known. */
7114 outer_type = TREE_TYPE (arg1_unw);
7115 min = lower_bound_in_type (outer_type, shorter_type);
7116 max = upper_bound_in_type (outer_type, shorter_type);
7118 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7119 max, arg1_unw));
7120 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7121 arg1_unw, min));
7123 switch (code)
7125 case EQ_EXPR:
7126 if (above || below)
7127 return omit_one_operand (type, integer_zero_node, arg0);
7128 break;
7130 case NE_EXPR:
7131 if (above || below)
7132 return omit_one_operand (type, integer_one_node, arg0);
7133 break;
7135 case LT_EXPR:
7136 case LE_EXPR:
7137 if (above)
7138 return omit_one_operand (type, integer_one_node, arg0);
7139 else if (below)
7140 return omit_one_operand (type, integer_zero_node, arg0);
7142 case GT_EXPR:
7143 case GE_EXPR:
7144 if (above)
7145 return omit_one_operand (type, integer_zero_node, arg0);
7146 else if (below)
7147 return omit_one_operand (type, integer_one_node, arg0);
7149 default:
7150 break;
7153 return NULL_TREE;
7156 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7157 ARG0 just the signedness is changed. */
7159 static tree
7160 fold_sign_changed_comparison (enum tree_code code, tree type,
7161 tree arg0, tree arg1)
7163 tree arg0_inner;
7164 tree inner_type, outer_type;
7166 if (!CONVERT_EXPR_P (arg0))
7167 return NULL_TREE;
7169 outer_type = TREE_TYPE (arg0);
7170 arg0_inner = TREE_OPERAND (arg0, 0);
7171 inner_type = TREE_TYPE (arg0_inner);
7173 #ifdef HAVE_canonicalize_funcptr_for_compare
7174 /* Disable this optimization if we're casting a function pointer
7175 type on targets that require function pointer canonicalization. */
7176 if (HAVE_canonicalize_funcptr_for_compare
7177 && TREE_CODE (inner_type) == POINTER_TYPE
7178 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7179 return NULL_TREE;
7180 #endif
7182 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7183 return NULL_TREE;
7185 /* If the conversion is from an integral subtype to its basetype
7186 leave it alone. */
7187 if (TREE_TYPE (inner_type) == outer_type)
7188 return NULL_TREE;
7190 if (TREE_CODE (arg1) != INTEGER_CST
7191 && !(CONVERT_EXPR_P (arg1)
7192 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7193 return NULL_TREE;
7195 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7196 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7197 && code != NE_EXPR
7198 && code != EQ_EXPR)
7199 return NULL_TREE;
7201 if (TREE_CODE (arg1) == INTEGER_CST)
7202 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7203 TREE_INT_CST_HIGH (arg1), 0,
7204 TREE_OVERFLOW (arg1));
7205 else
7206 arg1 = fold_convert (inner_type, arg1);
7208 return fold_build2 (code, type, arg0_inner, arg1);
7211 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7212 step of the array. Reconstructs s and delta in the case of s * delta
7213 being an integer constant (and thus already folded).
7214 ADDR is the address. MULT is the multiplicative expression.
7215 If the function succeeds, the new address expression is returned. Otherwise
7216 NULL_TREE is returned. */
7218 static tree
7219 try_move_mult_to_index (tree addr, tree op1)
7221 tree s, delta, step;
7222 tree ref = TREE_OPERAND (addr, 0), pref;
7223 tree ret, pos;
7224 tree itype;
7225 bool mdim = false;
7227 /* Strip the nops that might be added when converting op1 to sizetype. */
7228 STRIP_NOPS (op1);
7230 /* Canonicalize op1 into a possibly non-constant delta
7231 and an INTEGER_CST s. */
7232 if (TREE_CODE (op1) == MULT_EXPR)
7234 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7236 STRIP_NOPS (arg0);
7237 STRIP_NOPS (arg1);
7239 if (TREE_CODE (arg0) == INTEGER_CST)
7241 s = arg0;
7242 delta = arg1;
7244 else if (TREE_CODE (arg1) == INTEGER_CST)
7246 s = arg1;
7247 delta = arg0;
7249 else
7250 return NULL_TREE;
7252 else if (TREE_CODE (op1) == INTEGER_CST)
7254 delta = op1;
7255 s = NULL_TREE;
7257 else
7259 /* Simulate we are delta * 1. */
7260 delta = op1;
7261 s = integer_one_node;
7264 for (;; ref = TREE_OPERAND (ref, 0))
7266 if (TREE_CODE (ref) == ARRAY_REF)
7268 /* Remember if this was a multi-dimensional array. */
7269 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7270 mdim = true;
7272 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7273 if (! itype)
7274 continue;
7276 step = array_ref_element_size (ref);
7277 if (TREE_CODE (step) != INTEGER_CST)
7278 continue;
7280 if (s)
7282 if (! tree_int_cst_equal (step, s))
7283 continue;
7285 else
7287 /* Try if delta is a multiple of step. */
7288 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7289 if (! tmp)
7290 continue;
7291 delta = tmp;
7294 /* Only fold here if we can verify we do not overflow one
7295 dimension of a multi-dimensional array. */
7296 if (mdim)
7298 tree tmp;
7300 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7301 || !INTEGRAL_TYPE_P (itype)
7302 || !TYPE_MAX_VALUE (itype)
7303 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7304 continue;
7306 tmp = fold_binary (PLUS_EXPR, itype,
7307 fold_convert (itype,
7308 TREE_OPERAND (ref, 1)),
7309 fold_convert (itype, delta));
7310 if (!tmp
7311 || TREE_CODE (tmp) != INTEGER_CST
7312 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7313 continue;
7316 break;
7318 else
7319 mdim = false;
7321 if (!handled_component_p (ref))
7322 return NULL_TREE;
7325 /* We found the suitable array reference. So copy everything up to it,
7326 and replace the index. */
7328 pref = TREE_OPERAND (addr, 0);
7329 ret = copy_node (pref);
7330 pos = ret;
7332 while (pref != ref)
7334 pref = TREE_OPERAND (pref, 0);
7335 TREE_OPERAND (pos, 0) = copy_node (pref);
7336 pos = TREE_OPERAND (pos, 0);
7339 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7340 fold_convert (itype,
7341 TREE_OPERAND (pos, 1)),
7342 fold_convert (itype, delta));
7344 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7348 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7349 means A >= Y && A != MAX, but in this case we know that
7350 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7352 static tree
7353 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7355 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7357 if (TREE_CODE (bound) == LT_EXPR)
7358 a = TREE_OPERAND (bound, 0);
7359 else if (TREE_CODE (bound) == GT_EXPR)
7360 a = TREE_OPERAND (bound, 1);
7361 else
7362 return NULL_TREE;
7364 typea = TREE_TYPE (a);
7365 if (!INTEGRAL_TYPE_P (typea)
7366 && !POINTER_TYPE_P (typea))
7367 return NULL_TREE;
7369 if (TREE_CODE (ineq) == LT_EXPR)
7371 a1 = TREE_OPERAND (ineq, 1);
7372 y = TREE_OPERAND (ineq, 0);
7374 else if (TREE_CODE (ineq) == GT_EXPR)
7376 a1 = TREE_OPERAND (ineq, 0);
7377 y = TREE_OPERAND (ineq, 1);
7379 else
7380 return NULL_TREE;
7382 if (TREE_TYPE (a1) != typea)
7383 return NULL_TREE;
7385 if (POINTER_TYPE_P (typea))
7387 /* Convert the pointer types into integer before taking the difference. */
7388 tree ta = fold_convert (ssizetype, a);
7389 tree ta1 = fold_convert (ssizetype, a1);
7390 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7392 else
7393 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7395 if (!diff || !integer_onep (diff))
7396 return NULL_TREE;
7398 return fold_build2 (GE_EXPR, type, a, y);
7401 /* Fold a sum or difference of at least one multiplication.
7402 Returns the folded tree or NULL if no simplification could be made. */
7404 static tree
7405 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7407 tree arg00, arg01, arg10, arg11;
7408 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7410 /* (A * C) +- (B * C) -> (A+-B) * C.
7411 (A * C) +- A -> A * (C+-1).
7412 We are most concerned about the case where C is a constant,
7413 but other combinations show up during loop reduction. Since
7414 it is not difficult, try all four possibilities. */
7416 if (TREE_CODE (arg0) == MULT_EXPR)
7418 arg00 = TREE_OPERAND (arg0, 0);
7419 arg01 = TREE_OPERAND (arg0, 1);
7421 else if (TREE_CODE (arg0) == INTEGER_CST)
7423 arg00 = build_one_cst (type);
7424 arg01 = arg0;
7426 else
7428 /* We cannot generate constant 1 for fract. */
7429 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7430 return NULL_TREE;
7431 arg00 = arg0;
7432 arg01 = build_one_cst (type);
7434 if (TREE_CODE (arg1) == MULT_EXPR)
7436 arg10 = TREE_OPERAND (arg1, 0);
7437 arg11 = TREE_OPERAND (arg1, 1);
7439 else if (TREE_CODE (arg1) == INTEGER_CST)
7441 arg10 = build_one_cst (type);
7442 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7443 the purpose of this canonicalization. */
7444 if (TREE_INT_CST_HIGH (arg1) == -1
7445 && negate_expr_p (arg1)
7446 && code == PLUS_EXPR)
7448 arg11 = negate_expr (arg1);
7449 code = MINUS_EXPR;
7451 else
7452 arg11 = arg1;
7454 else
7456 /* We cannot generate constant 1 for fract. */
7457 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7458 return NULL_TREE;
7459 arg10 = arg1;
7460 arg11 = build_one_cst (type);
7462 same = NULL_TREE;
7464 if (operand_equal_p (arg01, arg11, 0))
7465 same = arg01, alt0 = arg00, alt1 = arg10;
7466 else if (operand_equal_p (arg00, arg10, 0))
7467 same = arg00, alt0 = arg01, alt1 = arg11;
7468 else if (operand_equal_p (arg00, arg11, 0))
7469 same = arg00, alt0 = arg01, alt1 = arg10;
7470 else if (operand_equal_p (arg01, arg10, 0))
7471 same = arg01, alt0 = arg00, alt1 = arg11;
7473 /* No identical multiplicands; see if we can find a common
7474 power-of-two factor in non-power-of-two multiplies. This
7475 can help in multi-dimensional array access. */
7476 else if (host_integerp (arg01, 0)
7477 && host_integerp (arg11, 0))
7479 HOST_WIDE_INT int01, int11, tmp;
7480 bool swap = false;
7481 tree maybe_same;
7482 int01 = TREE_INT_CST_LOW (arg01);
7483 int11 = TREE_INT_CST_LOW (arg11);
7485 /* Move min of absolute values to int11. */
7486 if ((int01 >= 0 ? int01 : -int01)
7487 < (int11 >= 0 ? int11 : -int11))
7489 tmp = int01, int01 = int11, int11 = tmp;
7490 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7491 maybe_same = arg01;
7492 swap = true;
7494 else
7495 maybe_same = arg11;
7497 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7499 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7500 build_int_cst (TREE_TYPE (arg00),
7501 int01 / int11));
7502 alt1 = arg10;
7503 same = maybe_same;
7504 if (swap)
7505 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7509 if (same)
7510 return fold_build2 (MULT_EXPR, type,
7511 fold_build2 (code, type,
7512 fold_convert (type, alt0),
7513 fold_convert (type, alt1)),
7514 fold_convert (type, same));
7516 return NULL_TREE;
7519 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7520 specified by EXPR into the buffer PTR of length LEN bytes.
7521 Return the number of bytes placed in the buffer, or zero
7522 upon failure. */
7524 static int
7525 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7527 tree type = TREE_TYPE (expr);
7528 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7529 int byte, offset, word, words;
7530 unsigned char value;
7532 if (total_bytes > len)
7533 return 0;
7534 words = total_bytes / UNITS_PER_WORD;
7536 for (byte = 0; byte < total_bytes; byte++)
7538 int bitpos = byte * BITS_PER_UNIT;
7539 if (bitpos < HOST_BITS_PER_WIDE_INT)
7540 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7541 else
7542 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7543 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7545 if (total_bytes > UNITS_PER_WORD)
7547 word = byte / UNITS_PER_WORD;
7548 if (WORDS_BIG_ENDIAN)
7549 word = (words - 1) - word;
7550 offset = word * UNITS_PER_WORD;
7551 if (BYTES_BIG_ENDIAN)
7552 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7553 else
7554 offset += byte % UNITS_PER_WORD;
7556 else
7557 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7558 ptr[offset] = value;
7560 return total_bytes;
7564 /* Subroutine of native_encode_expr. Encode the REAL_CST
7565 specified by EXPR into the buffer PTR of length LEN bytes.
7566 Return the number of bytes placed in the buffer, or zero
7567 upon failure. */
7569 static int
7570 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7572 tree type = TREE_TYPE (expr);
7573 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7574 int byte, offset, word, words, bitpos;
7575 unsigned char value;
7577 /* There are always 32 bits in each long, no matter the size of
7578 the hosts long. We handle floating point representations with
7579 up to 192 bits. */
7580 long tmp[6];
7582 if (total_bytes > len)
7583 return 0;
7584 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7586 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7588 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7589 bitpos += BITS_PER_UNIT)
7591 byte = (bitpos / BITS_PER_UNIT) & 3;
7592 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7594 if (UNITS_PER_WORD < 4)
7596 word = byte / UNITS_PER_WORD;
7597 if (WORDS_BIG_ENDIAN)
7598 word = (words - 1) - word;
7599 offset = word * UNITS_PER_WORD;
7600 if (BYTES_BIG_ENDIAN)
7601 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7602 else
7603 offset += byte % UNITS_PER_WORD;
7605 else
7606 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7607 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7609 return total_bytes;
7612 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7613 specified by EXPR into the buffer PTR of length LEN bytes.
7614 Return the number of bytes placed in the buffer, or zero
7615 upon failure. */
7617 static int
7618 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7620 int rsize, isize;
7621 tree part;
7623 part = TREE_REALPART (expr);
7624 rsize = native_encode_expr (part, ptr, len);
7625 if (rsize == 0)
7626 return 0;
7627 part = TREE_IMAGPART (expr);
7628 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7629 if (isize != rsize)
7630 return 0;
7631 return rsize + isize;
7635 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7636 specified by EXPR into the buffer PTR of length LEN bytes.
7637 Return the number of bytes placed in the buffer, or zero
7638 upon failure. */
7640 static int
7641 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7643 int i, size, offset, count;
7644 tree itype, elem, elements;
7646 offset = 0;
7647 elements = TREE_VECTOR_CST_ELTS (expr);
7648 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7649 itype = TREE_TYPE (TREE_TYPE (expr));
7650 size = GET_MODE_SIZE (TYPE_MODE (itype));
7651 for (i = 0; i < count; i++)
7653 if (elements)
7655 elem = TREE_VALUE (elements);
7656 elements = TREE_CHAIN (elements);
7658 else
7659 elem = NULL_TREE;
7661 if (elem)
7663 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7664 return 0;
7666 else
7668 if (offset + size > len)
7669 return 0;
7670 memset (ptr+offset, 0, size);
7672 offset += size;
7674 return offset;
7678 /* Subroutine of native_encode_expr. Encode the STRING_CST
7679 specified by EXPR into the buffer PTR of length LEN bytes.
7680 Return the number of bytes placed in the buffer, or zero
7681 upon failure. */
7683 static int
7684 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7686 tree type = TREE_TYPE (expr);
7687 HOST_WIDE_INT total_bytes;
7689 if (TREE_CODE (type) != ARRAY_TYPE
7690 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7691 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7692 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7693 return 0;
7694 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7695 if (total_bytes > len)
7696 return 0;
7697 if (TREE_STRING_LENGTH (expr) < total_bytes)
7699 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7700 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7701 total_bytes - TREE_STRING_LENGTH (expr));
7703 else
7704 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7705 return total_bytes;
7709 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7710 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7711 buffer PTR of length LEN bytes. Return the number of bytes
7712 placed in the buffer, or zero upon failure. */
7715 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7717 switch (TREE_CODE (expr))
7719 case INTEGER_CST:
7720 return native_encode_int (expr, ptr, len);
7722 case REAL_CST:
7723 return native_encode_real (expr, ptr, len);
7725 case COMPLEX_CST:
7726 return native_encode_complex (expr, ptr, len);
7728 case VECTOR_CST:
7729 return native_encode_vector (expr, ptr, len);
7731 case STRING_CST:
7732 return native_encode_string (expr, ptr, len);
7734 default:
7735 return 0;
7740 /* Subroutine of native_interpret_expr. Interpret the contents of
7741 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7742 If the buffer cannot be interpreted, return NULL_TREE. */
7744 static tree
7745 native_interpret_int (tree type, const unsigned char *ptr, int len)
7747 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7748 int byte, offset, word, words;
7749 unsigned char value;
7750 unsigned int HOST_WIDE_INT lo = 0;
7751 HOST_WIDE_INT hi = 0;
7753 if (total_bytes > len)
7754 return NULL_TREE;
7755 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7756 return NULL_TREE;
7757 words = total_bytes / UNITS_PER_WORD;
7759 for (byte = 0; byte < total_bytes; byte++)
7761 int bitpos = byte * BITS_PER_UNIT;
7762 if (total_bytes > UNITS_PER_WORD)
7764 word = byte / UNITS_PER_WORD;
7765 if (WORDS_BIG_ENDIAN)
7766 word = (words - 1) - word;
7767 offset = word * UNITS_PER_WORD;
7768 if (BYTES_BIG_ENDIAN)
7769 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7770 else
7771 offset += byte % UNITS_PER_WORD;
7773 else
7774 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7775 value = ptr[offset];
7777 if (bitpos < HOST_BITS_PER_WIDE_INT)
7778 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7779 else
7780 hi |= (unsigned HOST_WIDE_INT) value
7781 << (bitpos - HOST_BITS_PER_WIDE_INT);
7784 return build_int_cst_wide_type (type, lo, hi);
7788 /* Subroutine of native_interpret_expr. Interpret the contents of
7789 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7790 If the buffer cannot be interpreted, return NULL_TREE. */
7792 static tree
7793 native_interpret_real (tree type, const unsigned char *ptr, int len)
7795 enum machine_mode mode = TYPE_MODE (type);
7796 int total_bytes = GET_MODE_SIZE (mode);
7797 int byte, offset, word, words, bitpos;
7798 unsigned char value;
7799 /* There are always 32 bits in each long, no matter the size of
7800 the hosts long. We handle floating point representations with
7801 up to 192 bits. */
7802 REAL_VALUE_TYPE r;
7803 long tmp[6];
7805 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7806 if (total_bytes > len || total_bytes > 24)
7807 return NULL_TREE;
7808 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7810 memset (tmp, 0, sizeof (tmp));
7811 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7812 bitpos += BITS_PER_UNIT)
7814 byte = (bitpos / BITS_PER_UNIT) & 3;
7815 if (UNITS_PER_WORD < 4)
7817 word = byte / UNITS_PER_WORD;
7818 if (WORDS_BIG_ENDIAN)
7819 word = (words - 1) - word;
7820 offset = word * UNITS_PER_WORD;
7821 if (BYTES_BIG_ENDIAN)
7822 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7823 else
7824 offset += byte % UNITS_PER_WORD;
7826 else
7827 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7828 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7830 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7833 real_from_target (&r, tmp, mode);
7834 return build_real (type, r);
7838 /* Subroutine of native_interpret_expr. Interpret the contents of
7839 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7840 If the buffer cannot be interpreted, return NULL_TREE. */
7842 static tree
7843 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7845 tree etype, rpart, ipart;
7846 int size;
7848 etype = TREE_TYPE (type);
7849 size = GET_MODE_SIZE (TYPE_MODE (etype));
7850 if (size * 2 > len)
7851 return NULL_TREE;
7852 rpart = native_interpret_expr (etype, ptr, size);
7853 if (!rpart)
7854 return NULL_TREE;
7855 ipart = native_interpret_expr (etype, ptr+size, size);
7856 if (!ipart)
7857 return NULL_TREE;
7858 return build_complex (type, rpart, ipart);
7862 /* Subroutine of native_interpret_expr. Interpret the contents of
7863 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7864 If the buffer cannot be interpreted, return NULL_TREE. */
7866 static tree
7867 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7869 tree etype, elem, elements;
7870 int i, size, count;
7872 etype = TREE_TYPE (type);
7873 size = GET_MODE_SIZE (TYPE_MODE (etype));
7874 count = TYPE_VECTOR_SUBPARTS (type);
7875 if (size * count > len)
7876 return NULL_TREE;
7878 elements = NULL_TREE;
7879 for (i = count - 1; i >= 0; i--)
7881 elem = native_interpret_expr (etype, ptr+(i*size), size);
7882 if (!elem)
7883 return NULL_TREE;
7884 elements = tree_cons (NULL_TREE, elem, elements);
7886 return build_vector (type, elements);
7890 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7891 the buffer PTR of length LEN as a constant of type TYPE. For
7892 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7893 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7894 return NULL_TREE. */
7896 tree
7897 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7899 switch (TREE_CODE (type))
7901 case INTEGER_TYPE:
7902 case ENUMERAL_TYPE:
7903 case BOOLEAN_TYPE:
7904 return native_interpret_int (type, ptr, len);
7906 case REAL_TYPE:
7907 return native_interpret_real (type, ptr, len);
7909 case COMPLEX_TYPE:
7910 return native_interpret_complex (type, ptr, len);
7912 case VECTOR_TYPE:
7913 return native_interpret_vector (type, ptr, len);
7915 default:
7916 return NULL_TREE;
7921 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7922 TYPE at compile-time. If we're unable to perform the conversion
7923 return NULL_TREE. */
7925 static tree
7926 fold_view_convert_expr (tree type, tree expr)
7928 /* We support up to 512-bit values (for V8DFmode). */
7929 unsigned char buffer[64];
7930 int len;
7932 /* Check that the host and target are sane. */
7933 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7934 return NULL_TREE;
7936 len = native_encode_expr (expr, buffer, sizeof (buffer));
7937 if (len == 0)
7938 return NULL_TREE;
7940 return native_interpret_expr (type, buffer, len);
7943 /* Build an expression for the address of T. Folds away INDIRECT_REF
7944 to avoid confusing the gimplify process. When IN_FOLD is true
7945 avoid modifications of T. */
7947 static tree
7948 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7950 /* The size of the object is not relevant when talking about its address. */
7951 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7952 t = TREE_OPERAND (t, 0);
7954 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7955 if (TREE_CODE (t) == INDIRECT_REF
7956 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7958 t = TREE_OPERAND (t, 0);
7960 if (TREE_TYPE (t) != ptrtype)
7961 t = build1 (NOP_EXPR, ptrtype, t);
7963 else if (!in_fold)
7965 tree base = t;
7967 while (handled_component_p (base))
7968 base = TREE_OPERAND (base, 0);
7970 if (DECL_P (base))
7971 TREE_ADDRESSABLE (base) = 1;
7973 t = build1 (ADDR_EXPR, ptrtype, t);
7975 else
7976 t = build1 (ADDR_EXPR, ptrtype, t);
7978 return t;
7981 /* Build an expression for the address of T with type PTRTYPE. This
7982 function modifies the input parameter 'T' by sometimes setting the
7983 TREE_ADDRESSABLE flag. */
7985 tree
7986 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7988 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7991 /* Build an expression for the address of T. This function modifies
7992 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7993 flag. When called from fold functions, use fold_addr_expr instead. */
7995 tree
7996 build_fold_addr_expr (tree t)
7998 return build_fold_addr_expr_with_type_1 (t,
7999 build_pointer_type (TREE_TYPE (t)),
8000 false);
8003 /* Same as build_fold_addr_expr, builds an expression for the address
8004 of T, but avoids touching the input node 't'. Fold functions
8005 should use this version. */
8007 static tree
8008 fold_addr_expr (tree t)
8010 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8012 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
8015 /* Fold a unary expression of code CODE and type TYPE with operand
8016 OP0. Return the folded expression if folding is successful.
8017 Otherwise, return NULL_TREE. */
8019 tree
8020 fold_unary (enum tree_code code, tree type, tree op0)
8022 tree tem;
8023 tree arg0;
8024 enum tree_code_class kind = TREE_CODE_CLASS (code);
8026 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8027 && TREE_CODE_LENGTH (code) == 1);
8029 arg0 = op0;
8030 if (arg0)
8032 if (CONVERT_EXPR_CODE_P (code)
8033 || code == FLOAT_EXPR || code == ABS_EXPR)
8035 /* Don't use STRIP_NOPS, because signedness of argument type
8036 matters. */
8037 STRIP_SIGN_NOPS (arg0);
8039 else
8041 /* Strip any conversions that don't change the mode. This
8042 is safe for every expression, except for a comparison
8043 expression because its signedness is derived from its
8044 operands.
8046 Note that this is done as an internal manipulation within
8047 the constant folder, in order to find the simplest
8048 representation of the arguments so that their form can be
8049 studied. In any cases, the appropriate type conversions
8050 should be put back in the tree that will get out of the
8051 constant folder. */
8052 STRIP_NOPS (arg0);
8056 if (TREE_CODE_CLASS (code) == tcc_unary)
8058 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8059 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8060 fold_build1 (code, type,
8061 fold_convert (TREE_TYPE (op0),
8062 TREE_OPERAND (arg0, 1))));
8063 else if (TREE_CODE (arg0) == COND_EXPR)
8065 tree arg01 = TREE_OPERAND (arg0, 1);
8066 tree arg02 = TREE_OPERAND (arg0, 2);
8067 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8068 arg01 = fold_build1 (code, type,
8069 fold_convert (TREE_TYPE (op0), arg01));
8070 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8071 arg02 = fold_build1 (code, type,
8072 fold_convert (TREE_TYPE (op0), arg02));
8073 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8074 arg01, arg02);
8076 /* If this was a conversion, and all we did was to move into
8077 inside the COND_EXPR, bring it back out. But leave it if
8078 it is a conversion from integer to integer and the
8079 result precision is no wider than a word since such a
8080 conversion is cheap and may be optimized away by combine,
8081 while it couldn't if it were outside the COND_EXPR. Then return
8082 so we don't get into an infinite recursion loop taking the
8083 conversion out and then back in. */
8085 if ((CONVERT_EXPR_CODE_P (code)
8086 || code == NON_LVALUE_EXPR)
8087 && TREE_CODE (tem) == COND_EXPR
8088 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8089 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8090 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8091 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8092 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8093 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8094 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8095 && (INTEGRAL_TYPE_P
8096 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8097 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8098 || flag_syntax_only))
8099 tem = build1 (code, type,
8100 build3 (COND_EXPR,
8101 TREE_TYPE (TREE_OPERAND
8102 (TREE_OPERAND (tem, 1), 0)),
8103 TREE_OPERAND (tem, 0),
8104 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8105 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8106 return tem;
8108 else if (COMPARISON_CLASS_P (arg0))
8110 if (TREE_CODE (type) == BOOLEAN_TYPE)
8112 arg0 = copy_node (arg0);
8113 TREE_TYPE (arg0) = type;
8114 return arg0;
8116 else if (TREE_CODE (type) != INTEGER_TYPE)
8117 return fold_build3 (COND_EXPR, type, arg0,
8118 fold_build1 (code, type,
8119 integer_one_node),
8120 fold_build1 (code, type,
8121 integer_zero_node));
8125 switch (code)
8127 case PAREN_EXPR:
8128 /* Re-association barriers around constants and other re-association
8129 barriers can be removed. */
8130 if (CONSTANT_CLASS_P (op0)
8131 || TREE_CODE (op0) == PAREN_EXPR)
8132 return fold_convert (type, op0);
8133 return NULL_TREE;
8135 CASE_CONVERT:
8136 case FLOAT_EXPR:
8137 case FIX_TRUNC_EXPR:
8138 if (TREE_TYPE (op0) == type)
8139 return op0;
8141 /* If we have (type) (a CMP b) and type is an integral type, return
8142 new expression involving the new type. */
8143 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8144 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8145 TREE_OPERAND (op0, 1));
8147 /* Handle cases of two conversions in a row. */
8148 if (CONVERT_EXPR_P (op0))
8150 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8151 tree inter_type = TREE_TYPE (op0);
8152 int inside_int = INTEGRAL_TYPE_P (inside_type);
8153 int inside_ptr = POINTER_TYPE_P (inside_type);
8154 int inside_float = FLOAT_TYPE_P (inside_type);
8155 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8156 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8157 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8158 int inter_int = INTEGRAL_TYPE_P (inter_type);
8159 int inter_ptr = POINTER_TYPE_P (inter_type);
8160 int inter_float = FLOAT_TYPE_P (inter_type);
8161 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8162 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8163 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8164 int final_int = INTEGRAL_TYPE_P (type);
8165 int final_ptr = POINTER_TYPE_P (type);
8166 int final_float = FLOAT_TYPE_P (type);
8167 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8168 unsigned int final_prec = TYPE_PRECISION (type);
8169 int final_unsignedp = TYPE_UNSIGNED (type);
8171 /* In addition to the cases of two conversions in a row
8172 handled below, if we are converting something to its own
8173 type via an object of identical or wider precision, neither
8174 conversion is needed. */
8175 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8176 && (((inter_int || inter_ptr) && final_int)
8177 || (inter_float && final_float))
8178 && inter_prec >= final_prec)
8179 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8181 /* Likewise, if the intermediate and final types are either both
8182 float or both integer, we don't need the middle conversion if
8183 it is wider than the final type and doesn't change the signedness
8184 (for integers). Avoid this if the final type is a pointer
8185 since then we sometimes need the inner conversion. Likewise if
8186 the outer has a precision not equal to the size of its mode. */
8187 if (((inter_int && inside_int)
8188 || (inter_float && inside_float)
8189 || (inter_vec && inside_vec))
8190 && inter_prec >= inside_prec
8191 && (inter_float || inter_vec
8192 || inter_unsignedp == inside_unsignedp)
8193 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8194 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8195 && ! final_ptr
8196 && (! final_vec || inter_prec == inside_prec))
8197 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8199 /* If we have a sign-extension of a zero-extended value, we can
8200 replace that by a single zero-extension. */
8201 if (inside_int && inter_int && final_int
8202 && inside_prec < inter_prec && inter_prec < final_prec
8203 && inside_unsignedp && !inter_unsignedp)
8204 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8206 /* Two conversions in a row are not needed unless:
8207 - some conversion is floating-point (overstrict for now), or
8208 - some conversion is a vector (overstrict for now), or
8209 - the intermediate type is narrower than both initial and
8210 final, or
8211 - the intermediate type and innermost type differ in signedness,
8212 and the outermost type is wider than the intermediate, or
8213 - the initial type is a pointer type and the precisions of the
8214 intermediate and final types differ, or
8215 - the final type is a pointer type and the precisions of the
8216 initial and intermediate types differ. */
8217 if (! inside_float && ! inter_float && ! final_float
8218 && ! inside_vec && ! inter_vec && ! final_vec
8219 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8220 && ! (inside_int && inter_int
8221 && inter_unsignedp != inside_unsignedp
8222 && inter_prec < final_prec)
8223 && ((inter_unsignedp && inter_prec > inside_prec)
8224 == (final_unsignedp && final_prec > inter_prec))
8225 && ! (inside_ptr && inter_prec != final_prec)
8226 && ! (final_ptr && inside_prec != inter_prec)
8227 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8228 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8229 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8232 /* Handle (T *)&A.B.C for A being of type T and B and C
8233 living at offset zero. This occurs frequently in
8234 C++ upcasting and then accessing the base. */
8235 if (TREE_CODE (op0) == ADDR_EXPR
8236 && POINTER_TYPE_P (type)
8237 && handled_component_p (TREE_OPERAND (op0, 0)))
8239 HOST_WIDE_INT bitsize, bitpos;
8240 tree offset;
8241 enum machine_mode mode;
8242 int unsignedp, volatilep;
8243 tree base = TREE_OPERAND (op0, 0);
8244 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8245 &mode, &unsignedp, &volatilep, false);
8246 /* If the reference was to a (constant) zero offset, we can use
8247 the address of the base if it has the same base type
8248 as the result type. */
8249 if (! offset && bitpos == 0
8250 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8251 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8252 return fold_convert (type, fold_addr_expr (base));
8255 if (TREE_CODE (op0) == MODIFY_EXPR
8256 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8257 /* Detect assigning a bitfield. */
8258 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8259 && DECL_BIT_FIELD
8260 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8262 /* Don't leave an assignment inside a conversion
8263 unless assigning a bitfield. */
8264 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8265 /* First do the assignment, then return converted constant. */
8266 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8267 TREE_NO_WARNING (tem) = 1;
8268 TREE_USED (tem) = 1;
8269 return tem;
8272 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8273 constants (if x has signed type, the sign bit cannot be set
8274 in c). This folds extension into the BIT_AND_EXPR.
8275 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8276 very likely don't have maximal range for their precision and this
8277 transformation effectively doesn't preserve non-maximal ranges. */
8278 if (TREE_CODE (type) == INTEGER_TYPE
8279 && TREE_CODE (op0) == BIT_AND_EXPR
8280 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
8281 /* Not if the conversion is to the sub-type. */
8282 && TREE_TYPE (type) != TREE_TYPE (op0))
8284 tree and = op0;
8285 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8286 int change = 0;
8288 if (TYPE_UNSIGNED (TREE_TYPE (and))
8289 || (TYPE_PRECISION (type)
8290 <= TYPE_PRECISION (TREE_TYPE (and))))
8291 change = 1;
8292 else if (TYPE_PRECISION (TREE_TYPE (and1))
8293 <= HOST_BITS_PER_WIDE_INT
8294 && host_integerp (and1, 1))
8296 unsigned HOST_WIDE_INT cst;
8298 cst = tree_low_cst (and1, 1);
8299 cst &= (HOST_WIDE_INT) -1
8300 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8301 change = (cst == 0);
8302 #ifdef LOAD_EXTEND_OP
8303 if (change
8304 && !flag_syntax_only
8305 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8306 == ZERO_EXTEND))
8308 tree uns = unsigned_type_for (TREE_TYPE (and0));
8309 and0 = fold_convert (uns, and0);
8310 and1 = fold_convert (uns, and1);
8312 #endif
8314 if (change)
8316 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8317 TREE_INT_CST_HIGH (and1), 0,
8318 TREE_OVERFLOW (and1));
8319 return fold_build2 (BIT_AND_EXPR, type,
8320 fold_convert (type, and0), tem);
8324 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8325 when one of the new casts will fold away. Conservatively we assume
8326 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8327 if (POINTER_TYPE_P (type)
8328 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8329 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8330 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8331 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8333 tree arg00 = TREE_OPERAND (arg0, 0);
8334 tree arg01 = TREE_OPERAND (arg0, 1);
8336 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8337 fold_convert (sizetype, arg01));
8340 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8341 of the same precision, and X is an integer type not narrower than
8342 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8343 if (INTEGRAL_TYPE_P (type)
8344 && TREE_CODE (op0) == BIT_NOT_EXPR
8345 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8346 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8347 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8349 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8350 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8351 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8352 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8355 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8356 type of X and Y (integer types only). */
8357 if (INTEGRAL_TYPE_P (type)
8358 && TREE_CODE (op0) == MULT_EXPR
8359 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8360 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8362 /* Be careful not to introduce new overflows. */
8363 tree mult_type;
8364 if (TYPE_OVERFLOW_WRAPS (type))
8365 mult_type = type;
8366 else
8367 mult_type = unsigned_type_for (type);
8369 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8371 tem = fold_build2 (MULT_EXPR, mult_type,
8372 fold_convert (mult_type,
8373 TREE_OPERAND (op0, 0)),
8374 fold_convert (mult_type,
8375 TREE_OPERAND (op0, 1)));
8376 return fold_convert (type, tem);
8380 tem = fold_convert_const (code, type, op0);
8381 return tem ? tem : NULL_TREE;
8383 case FIXED_CONVERT_EXPR:
8384 tem = fold_convert_const (code, type, arg0);
8385 return tem ? tem : NULL_TREE;
8387 case VIEW_CONVERT_EXPR:
8388 if (TREE_TYPE (op0) == type)
8389 return op0;
8390 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8391 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8393 /* For integral conversions with the same precision or pointer
8394 conversions use a NOP_EXPR instead. */
8395 if ((INTEGRAL_TYPE_P (type)
8396 || POINTER_TYPE_P (type))
8397 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8398 || POINTER_TYPE_P (TREE_TYPE (op0)))
8399 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8400 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8401 a sub-type to its base type as generated by the Ada FE. */
8402 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8403 && TREE_TYPE (TREE_TYPE (op0))))
8404 return fold_convert (type, op0);
8406 /* Strip inner integral conversions that do not change the precision. */
8407 if (CONVERT_EXPR_P (op0)
8408 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8409 || POINTER_TYPE_P (TREE_TYPE (op0)))
8410 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8411 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8412 && (TYPE_PRECISION (TREE_TYPE (op0))
8413 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8414 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8416 return fold_view_convert_expr (type, op0);
8418 case NEGATE_EXPR:
8419 case NEGATENV_EXPR:
8420 tem = fold_negate_expr (arg0);
8421 if (tem)
8422 return fold_convert (type, tem);
8423 return NULL_TREE;
8425 case ABS_EXPR:
8426 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8427 return fold_abs_const (arg0, type);
8428 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8429 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8430 /* Convert fabs((double)float) into (double)fabsf(float). */
8431 else if (TREE_CODE (arg0) == NOP_EXPR
8432 && TREE_CODE (type) == REAL_TYPE)
8434 tree targ0 = strip_float_extensions (arg0);
8435 if (targ0 != arg0)
8436 return fold_convert (type, fold_build1 (ABS_EXPR,
8437 TREE_TYPE (targ0),
8438 targ0));
8440 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8441 else if (TREE_CODE (arg0) == ABS_EXPR)
8442 return arg0;
8443 else if (tree_expr_nonnegative_p (arg0))
8444 return arg0;
8446 /* Strip sign ops from argument. */
8447 if (TREE_CODE (type) == REAL_TYPE)
8449 tem = fold_strip_sign_ops (arg0);
8450 if (tem)
8451 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8453 return NULL_TREE;
8455 case CONJ_EXPR:
8456 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8457 return fold_convert (type, arg0);
8458 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8460 tree itype = TREE_TYPE (type);
8461 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8462 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8463 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8465 if (TREE_CODE (arg0) == COMPLEX_CST)
8467 tree itype = TREE_TYPE (type);
8468 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8469 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8470 return build_complex (type, rpart, negate_expr (ipart));
8472 if (TREE_CODE (arg0) == CONJ_EXPR)
8473 return fold_convert (type, TREE_OPERAND (arg0, 0));
8474 return NULL_TREE;
8476 case BIT_NOT_EXPR:
8477 if (TREE_CODE (arg0) == INTEGER_CST)
8478 return fold_not_const (arg0, type);
8479 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8480 return fold_convert (type, TREE_OPERAND (arg0, 0));
8481 /* Convert ~ (-A) to A - 1. */
8482 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8483 return fold_build2 (MINUS_EXPR, type,
8484 fold_convert (type, TREE_OPERAND (arg0, 0)),
8485 build_int_cst (type, 1));
8486 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8487 else if (INTEGRAL_TYPE_P (type)
8488 && ((TREE_CODE (arg0) == MINUS_EXPR
8489 && integer_onep (TREE_OPERAND (arg0, 1)))
8490 || (TREE_CODE (arg0) == PLUS_EXPR
8491 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8492 return fold_build1 (NEGATE_EXPR, type,
8493 fold_convert (type, TREE_OPERAND (arg0, 0)));
8494 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8495 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8496 && (tem = fold_unary (BIT_NOT_EXPR, type,
8497 fold_convert (type,
8498 TREE_OPERAND (arg0, 0)))))
8499 return fold_build2 (BIT_XOR_EXPR, type, tem,
8500 fold_convert (type, TREE_OPERAND (arg0, 1)));
8501 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8502 && (tem = fold_unary (BIT_NOT_EXPR, type,
8503 fold_convert (type,
8504 TREE_OPERAND (arg0, 1)))))
8505 return fold_build2 (BIT_XOR_EXPR, type,
8506 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8507 /* Perform BIT_NOT_EXPR on each element individually. */
8508 else if (TREE_CODE (arg0) == VECTOR_CST)
8510 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8511 int count = TYPE_VECTOR_SUBPARTS (type), i;
8513 for (i = 0; i < count; i++)
8515 if (elements)
8517 elem = TREE_VALUE (elements);
8518 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8519 if (elem == NULL_TREE)
8520 break;
8521 elements = TREE_CHAIN (elements);
8523 else
8524 elem = build_int_cst (TREE_TYPE (type), -1);
8525 list = tree_cons (NULL_TREE, elem, list);
8527 if (i == count)
8528 return build_vector (type, nreverse (list));
8531 return NULL_TREE;
8533 case TRUTH_NOT_EXPR:
8534 /* The argument to invert_truthvalue must have Boolean type. */
8535 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8536 arg0 = fold_convert (boolean_type_node, arg0);
8538 /* Note that the operand of this must be an int
8539 and its values must be 0 or 1.
8540 ("true" is a fixed value perhaps depending on the language,
8541 but we don't handle values other than 1 correctly yet.) */
8542 tem = fold_truth_not_expr (arg0);
8543 if (!tem)
8544 return NULL_TREE;
8545 return fold_convert (type, tem);
8547 case REALPART_EXPR:
8548 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8549 return fold_convert (type, arg0);
8550 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8551 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8552 TREE_OPERAND (arg0, 1));
8553 if (TREE_CODE (arg0) == COMPLEX_CST)
8554 return fold_convert (type, TREE_REALPART (arg0));
8555 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8557 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8558 tem = fold_build2 (TREE_CODE (arg0), itype,
8559 fold_build1 (REALPART_EXPR, itype,
8560 TREE_OPERAND (arg0, 0)),
8561 fold_build1 (REALPART_EXPR, itype,
8562 TREE_OPERAND (arg0, 1)));
8563 return fold_convert (type, tem);
8565 if (TREE_CODE (arg0) == CONJ_EXPR)
8567 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8568 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8569 return fold_convert (type, tem);
8571 if (TREE_CODE (arg0) == CALL_EXPR)
8573 tree fn = get_callee_fndecl (arg0);
8574 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8575 switch (DECL_FUNCTION_CODE (fn))
8577 CASE_FLT_FN (BUILT_IN_CEXPI):
8578 fn = mathfn_built_in (type, BUILT_IN_COS);
8579 if (fn)
8580 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8581 break;
8583 default:
8584 break;
8587 return NULL_TREE;
8589 case IMAGPART_EXPR:
8590 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8591 return fold_convert (type, integer_zero_node);
8592 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8593 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8594 TREE_OPERAND (arg0, 0));
8595 if (TREE_CODE (arg0) == COMPLEX_CST)
8596 return fold_convert (type, TREE_IMAGPART (arg0));
8597 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8599 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8600 tem = fold_build2 (TREE_CODE (arg0), itype,
8601 fold_build1 (IMAGPART_EXPR, itype,
8602 TREE_OPERAND (arg0, 0)),
8603 fold_build1 (IMAGPART_EXPR, itype,
8604 TREE_OPERAND (arg0, 1)));
8605 return fold_convert (type, tem);
8607 if (TREE_CODE (arg0) == CONJ_EXPR)
8609 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8610 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8611 return fold_convert (type, negate_expr (tem));
8613 if (TREE_CODE (arg0) == CALL_EXPR)
8615 tree fn = get_callee_fndecl (arg0);
8616 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8617 switch (DECL_FUNCTION_CODE (fn))
8619 CASE_FLT_FN (BUILT_IN_CEXPI):
8620 fn = mathfn_built_in (type, BUILT_IN_SIN);
8621 if (fn)
8622 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8623 break;
8625 default:
8626 break;
8629 return NULL_TREE;
8631 default:
8632 return NULL_TREE;
8633 } /* switch (code) */
8637 /* If the operation was a conversion do _not_ mark a resulting constant
8638 with TREE_OVERFLOW if the original constant was not. These conversions
8639 have implementation defined behavior and retaining the TREE_OVERFLOW
8640 flag here would confuse later passes such as VRP. */
8641 tree
8642 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8644 tree res = fold_unary (code, type, op0);
8645 if (res
8646 && TREE_CODE (res) == INTEGER_CST
8647 && TREE_CODE (op0) == INTEGER_CST
8648 && CONVERT_EXPR_CODE_P (code))
8649 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8651 return res;
8654 /* Fold a binary expression of code CODE and type TYPE with operands
8655 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8656 Return the folded expression if folding is successful. Otherwise,
8657 return NULL_TREE. */
8659 static tree
8660 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8662 enum tree_code compl_code;
8664 if (code == MIN_EXPR)
8665 compl_code = MAX_EXPR;
8666 else if (code == MAX_EXPR)
8667 compl_code = MIN_EXPR;
8668 else
8669 gcc_unreachable ();
8671 /* MIN (MAX (a, b), b) == b. */
8672 if (TREE_CODE (op0) == compl_code
8673 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8674 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8676 /* MIN (MAX (b, a), b) == b. */
8677 if (TREE_CODE (op0) == compl_code
8678 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8679 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8680 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8682 /* MIN (a, MAX (a, b)) == a. */
8683 if (TREE_CODE (op1) == compl_code
8684 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8685 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8686 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8688 /* MIN (a, MAX (b, a)) == a. */
8689 if (TREE_CODE (op1) == compl_code
8690 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8691 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8692 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8694 return NULL_TREE;
8697 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8698 by changing CODE to reduce the magnitude of constants involved in
8699 ARG0 of the comparison.
8700 Returns a canonicalized comparison tree if a simplification was
8701 possible, otherwise returns NULL_TREE.
8702 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8703 valid if signed overflow is undefined. */
8705 static tree
8706 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8707 tree arg0, tree arg1,
8708 bool *strict_overflow_p)
8710 enum tree_code code0 = TREE_CODE (arg0);
8711 tree t, cst0 = NULL_TREE;
8712 int sgn0;
8713 bool swap = false;
8715 /* Match A +- CST code arg1 and CST code arg1. We can change the
8716 first form only if overflow is undefined. */
8717 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8718 /* In principle pointers also have undefined overflow behavior,
8719 but that causes problems elsewhere. */
8720 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8721 && (code0 == MINUS_EXPR
8722 || code0 == PLUS_EXPR)
8723 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8724 || code0 == INTEGER_CST))
8725 return NULL_TREE;
8727 /* Identify the constant in arg0 and its sign. */
8728 if (code0 == INTEGER_CST)
8729 cst0 = arg0;
8730 else
8731 cst0 = TREE_OPERAND (arg0, 1);
8732 sgn0 = tree_int_cst_sgn (cst0);
8734 /* Overflowed constants and zero will cause problems. */
8735 if (integer_zerop (cst0)
8736 || TREE_OVERFLOW (cst0))
8737 return NULL_TREE;
8739 /* See if we can reduce the magnitude of the constant in
8740 arg0 by changing the comparison code. */
8741 if (code0 == INTEGER_CST)
8743 /* CST <= arg1 -> CST-1 < arg1. */
8744 if (code == LE_EXPR && sgn0 == 1)
8745 code = LT_EXPR;
8746 /* -CST < arg1 -> -CST-1 <= arg1. */
8747 else if (code == LT_EXPR && sgn0 == -1)
8748 code = LE_EXPR;
8749 /* CST > arg1 -> CST-1 >= arg1. */
8750 else if (code == GT_EXPR && sgn0 == 1)
8751 code = GE_EXPR;
8752 /* -CST >= arg1 -> -CST-1 > arg1. */
8753 else if (code == GE_EXPR && sgn0 == -1)
8754 code = GT_EXPR;
8755 else
8756 return NULL_TREE;
8757 /* arg1 code' CST' might be more canonical. */
8758 swap = true;
8760 else
8762 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8763 if (code == LT_EXPR
8764 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8765 code = LE_EXPR;
8766 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8767 else if (code == GT_EXPR
8768 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8769 code = GE_EXPR;
8770 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8771 else if (code == LE_EXPR
8772 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8773 code = LT_EXPR;
8774 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8775 else if (code == GE_EXPR
8776 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8777 code = GT_EXPR;
8778 else
8779 return NULL_TREE;
8780 *strict_overflow_p = true;
8783 /* Now build the constant reduced in magnitude. But not if that
8784 would produce one outside of its types range. */
8785 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8786 && ((sgn0 == 1
8787 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8788 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8789 || (sgn0 == -1
8790 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8791 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8792 /* We cannot swap the comparison here as that would cause us to
8793 endlessly recurse. */
8794 return NULL_TREE;
8796 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8797 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8798 if (code0 != INTEGER_CST)
8799 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8801 /* If swapping might yield to a more canonical form, do so. */
8802 if (swap)
8803 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8804 else
8805 return fold_build2 (code, type, t, arg1);
8808 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8809 overflow further. Try to decrease the magnitude of constants involved
8810 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8811 and put sole constants at the second argument position.
8812 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8814 static tree
8815 maybe_canonicalize_comparison (enum tree_code code, tree type,
8816 tree arg0, tree arg1)
8818 tree t;
8819 bool strict_overflow_p;
8820 const char * const warnmsg = G_("assuming signed overflow does not occur "
8821 "when reducing constant in comparison");
8823 /* Try canonicalization by simplifying arg0. */
8824 strict_overflow_p = false;
8825 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8826 &strict_overflow_p);
8827 if (t)
8829 if (strict_overflow_p)
8830 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8831 return t;
8834 /* Try canonicalization by simplifying arg1 using the swapped
8835 comparison. */
8836 code = swap_tree_comparison (code);
8837 strict_overflow_p = false;
8838 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8839 &strict_overflow_p);
8840 if (t && strict_overflow_p)
8841 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8842 return t;
8845 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8846 space. This is used to avoid issuing overflow warnings for
8847 expressions like &p->x which can not wrap. */
8849 static bool
8850 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8852 unsigned HOST_WIDE_INT offset_low, total_low;
8853 HOST_WIDE_INT size, offset_high, total_high;
8855 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8856 return true;
8858 if (bitpos < 0)
8859 return true;
8861 if (offset == NULL_TREE)
8863 offset_low = 0;
8864 offset_high = 0;
8866 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8867 return true;
8868 else
8870 offset_low = TREE_INT_CST_LOW (offset);
8871 offset_high = TREE_INT_CST_HIGH (offset);
8874 if (add_double_with_sign (offset_low, offset_high,
8875 bitpos / BITS_PER_UNIT, 0,
8876 &total_low, &total_high,
8877 true))
8878 return true;
8880 if (total_high != 0)
8881 return true;
8883 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8884 if (size <= 0)
8885 return true;
8887 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8888 array. */
8889 if (TREE_CODE (base) == ADDR_EXPR)
8891 HOST_WIDE_INT base_size;
8893 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8894 if (base_size > 0 && size < base_size)
8895 size = base_size;
8898 return total_low > (unsigned HOST_WIDE_INT) size;
8901 /* Subroutine of fold_binary. This routine performs all of the
8902 transformations that are common to the equality/inequality
8903 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8904 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8905 fold_binary should call fold_binary. Fold a comparison with
8906 tree code CODE and type TYPE with operands OP0 and OP1. Return
8907 the folded comparison or NULL_TREE. */
8909 static tree
8910 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8912 tree arg0, arg1, tem;
8914 arg0 = op0;
8915 arg1 = op1;
8917 STRIP_SIGN_NOPS (arg0);
8918 STRIP_SIGN_NOPS (arg1);
8920 tem = fold_relational_const (code, type, arg0, arg1);
8921 if (tem != NULL_TREE)
8922 return tem;
8924 /* If one arg is a real or integer constant, put it last. */
8925 if (tree_swap_operands_p (arg0, arg1, true))
8926 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8928 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8929 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8930 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8931 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8932 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8933 && (TREE_CODE (arg1) == INTEGER_CST
8934 && !TREE_OVERFLOW (arg1)))
8936 tree const1 = TREE_OPERAND (arg0, 1);
8937 tree const2 = arg1;
8938 tree variable = TREE_OPERAND (arg0, 0);
8939 tree lhs;
8940 int lhs_add;
8941 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8943 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8944 TREE_TYPE (arg1), const2, const1);
8946 /* If the constant operation overflowed this can be
8947 simplified as a comparison against INT_MAX/INT_MIN. */
8948 if (TREE_CODE (lhs) == INTEGER_CST
8949 && TREE_OVERFLOW (lhs))
8951 int const1_sgn = tree_int_cst_sgn (const1);
8952 enum tree_code code2 = code;
8954 /* Get the sign of the constant on the lhs if the
8955 operation were VARIABLE + CONST1. */
8956 if (TREE_CODE (arg0) == MINUS_EXPR)
8957 const1_sgn = -const1_sgn;
8959 /* The sign of the constant determines if we overflowed
8960 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8961 Canonicalize to the INT_MIN overflow by swapping the comparison
8962 if necessary. */
8963 if (const1_sgn == -1)
8964 code2 = swap_tree_comparison (code);
8966 /* We now can look at the canonicalized case
8967 VARIABLE + 1 CODE2 INT_MIN
8968 and decide on the result. */
8969 if (code2 == LT_EXPR
8970 || code2 == LE_EXPR
8971 || code2 == EQ_EXPR)
8972 return omit_one_operand (type, boolean_false_node, variable);
8973 else if (code2 == NE_EXPR
8974 || code2 == GE_EXPR
8975 || code2 == GT_EXPR)
8976 return omit_one_operand (type, boolean_true_node, variable);
8979 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8980 && (TREE_CODE (lhs) != INTEGER_CST
8981 || !TREE_OVERFLOW (lhs)))
8983 fold_overflow_warning (("assuming signed overflow does not occur "
8984 "when changing X +- C1 cmp C2 to "
8985 "X cmp C1 +- C2"),
8986 WARN_STRICT_OVERFLOW_COMPARISON);
8987 return fold_build2 (code, type, variable, lhs);
8991 /* For comparisons of pointers we can decompose it to a compile time
8992 comparison of the base objects and the offsets into the object.
8993 This requires at least one operand being an ADDR_EXPR or a
8994 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8995 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8996 && (TREE_CODE (arg0) == ADDR_EXPR
8997 || TREE_CODE (arg1) == ADDR_EXPR
8998 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8999 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9001 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9002 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9003 enum machine_mode mode;
9004 int volatilep, unsignedp;
9005 bool indirect_base0 = false, indirect_base1 = false;
9007 /* Get base and offset for the access. Strip ADDR_EXPR for
9008 get_inner_reference, but put it back by stripping INDIRECT_REF
9009 off the base object if possible. indirect_baseN will be true
9010 if baseN is not an address but refers to the object itself. */
9011 base0 = arg0;
9012 if (TREE_CODE (arg0) == ADDR_EXPR)
9014 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9015 &bitsize, &bitpos0, &offset0, &mode,
9016 &unsignedp, &volatilep, false);
9017 if (TREE_CODE (base0) == INDIRECT_REF)
9018 base0 = TREE_OPERAND (base0, 0);
9019 else
9020 indirect_base0 = true;
9022 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9024 base0 = TREE_OPERAND (arg0, 0);
9025 offset0 = TREE_OPERAND (arg0, 1);
9028 base1 = arg1;
9029 if (TREE_CODE (arg1) == ADDR_EXPR)
9031 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9032 &bitsize, &bitpos1, &offset1, &mode,
9033 &unsignedp, &volatilep, false);
9034 if (TREE_CODE (base1) == INDIRECT_REF)
9035 base1 = TREE_OPERAND (base1, 0);
9036 else
9037 indirect_base1 = true;
9039 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9041 base1 = TREE_OPERAND (arg1, 0);
9042 offset1 = TREE_OPERAND (arg1, 1);
9045 /* If we have equivalent bases we might be able to simplify. */
9046 if (indirect_base0 == indirect_base1
9047 && operand_equal_p (base0, base1, 0))
9049 /* We can fold this expression to a constant if the non-constant
9050 offset parts are equal. */
9051 if ((offset0 == offset1
9052 || (offset0 && offset1
9053 && operand_equal_p (offset0, offset1, 0)))
9054 && (code == EQ_EXPR
9055 || code == NE_EXPR
9056 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9059 if (code != EQ_EXPR
9060 && code != NE_EXPR
9061 && bitpos0 != bitpos1
9062 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9063 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9064 fold_overflow_warning (("assuming pointer wraparound does not "
9065 "occur when comparing P +- C1 with "
9066 "P +- C2"),
9067 WARN_STRICT_OVERFLOW_CONDITIONAL);
9069 switch (code)
9071 case EQ_EXPR:
9072 return constant_boolean_node (bitpos0 == bitpos1, type);
9073 case NE_EXPR:
9074 return constant_boolean_node (bitpos0 != bitpos1, type);
9075 case LT_EXPR:
9076 return constant_boolean_node (bitpos0 < bitpos1, type);
9077 case LE_EXPR:
9078 return constant_boolean_node (bitpos0 <= bitpos1, type);
9079 case GE_EXPR:
9080 return constant_boolean_node (bitpos0 >= bitpos1, type);
9081 case GT_EXPR:
9082 return constant_boolean_node (bitpos0 > bitpos1, type);
9083 default:;
9086 /* We can simplify the comparison to a comparison of the variable
9087 offset parts if the constant offset parts are equal.
9088 Be careful to use signed size type here because otherwise we
9089 mess with array offsets in the wrong way. This is possible
9090 because pointer arithmetic is restricted to retain within an
9091 object and overflow on pointer differences is undefined as of
9092 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9093 else if (bitpos0 == bitpos1
9094 && ((code == EQ_EXPR || code == NE_EXPR)
9095 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9097 tree signed_size_type_node;
9098 signed_size_type_node = signed_type_for (size_type_node);
9100 /* By converting to signed size type we cover middle-end pointer
9101 arithmetic which operates on unsigned pointer types of size
9102 type size and ARRAY_REF offsets which are properly sign or
9103 zero extended from their type in case it is narrower than
9104 size type. */
9105 if (offset0 == NULL_TREE)
9106 offset0 = build_int_cst (signed_size_type_node, 0);
9107 else
9108 offset0 = fold_convert (signed_size_type_node, offset0);
9109 if (offset1 == NULL_TREE)
9110 offset1 = build_int_cst (signed_size_type_node, 0);
9111 else
9112 offset1 = fold_convert (signed_size_type_node, offset1);
9114 if (code != EQ_EXPR
9115 && code != NE_EXPR
9116 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9117 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9118 fold_overflow_warning (("assuming pointer wraparound does not "
9119 "occur when comparing P +- C1 with "
9120 "P +- C2"),
9121 WARN_STRICT_OVERFLOW_COMPARISON);
9123 return fold_build2 (code, type, offset0, offset1);
9126 /* For non-equal bases we can simplify if they are addresses
9127 of local binding decls or constants. */
9128 else if (indirect_base0 && indirect_base1
9129 /* We know that !operand_equal_p (base0, base1, 0)
9130 because the if condition was false. But make
9131 sure two decls are not the same. */
9132 && base0 != base1
9133 && TREE_CODE (arg0) == ADDR_EXPR
9134 && TREE_CODE (arg1) == ADDR_EXPR
9135 && (((TREE_CODE (base0) == VAR_DECL
9136 || TREE_CODE (base0) == PARM_DECL)
9137 && (targetm.binds_local_p (base0)
9138 || CONSTANT_CLASS_P (base1)))
9139 || CONSTANT_CLASS_P (base0))
9140 && (((TREE_CODE (base1) == VAR_DECL
9141 || TREE_CODE (base1) == PARM_DECL)
9142 && (targetm.binds_local_p (base1)
9143 || CONSTANT_CLASS_P (base0)))
9144 || CONSTANT_CLASS_P (base1)))
9146 if (code == EQ_EXPR)
9147 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9148 else if (code == NE_EXPR)
9149 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9151 /* For equal offsets we can simplify to a comparison of the
9152 base addresses. */
9153 else if (bitpos0 == bitpos1
9154 && (indirect_base0
9155 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9156 && (indirect_base1
9157 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9158 && ((offset0 == offset1)
9159 || (offset0 && offset1
9160 && operand_equal_p (offset0, offset1, 0))))
9162 if (indirect_base0)
9163 base0 = fold_addr_expr (base0);
9164 if (indirect_base1)
9165 base1 = fold_addr_expr (base1);
9166 return fold_build2 (code, type, base0, base1);
9170 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9171 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9172 the resulting offset is smaller in absolute value than the
9173 original one. */
9174 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9175 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9176 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9177 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9178 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9179 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9180 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9182 tree const1 = TREE_OPERAND (arg0, 1);
9183 tree const2 = TREE_OPERAND (arg1, 1);
9184 tree variable1 = TREE_OPERAND (arg0, 0);
9185 tree variable2 = TREE_OPERAND (arg1, 0);
9186 tree cst;
9187 const char * const warnmsg = G_("assuming signed overflow does not "
9188 "occur when combining constants around "
9189 "a comparison");
9191 /* Put the constant on the side where it doesn't overflow and is
9192 of lower absolute value than before. */
9193 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9194 ? MINUS_EXPR : PLUS_EXPR,
9195 const2, const1, 0);
9196 if (!TREE_OVERFLOW (cst)
9197 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9199 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9200 return fold_build2 (code, type,
9201 variable1,
9202 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9203 variable2, cst));
9206 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9207 ? MINUS_EXPR : PLUS_EXPR,
9208 const1, const2, 0);
9209 if (!TREE_OVERFLOW (cst)
9210 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9212 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9213 return fold_build2 (code, type,
9214 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9215 variable1, cst),
9216 variable2);
9220 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9221 signed arithmetic case. That form is created by the compiler
9222 often enough for folding it to be of value. One example is in
9223 computing loop trip counts after Operator Strength Reduction. */
9224 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9225 && TREE_CODE (arg0) == MULT_EXPR
9226 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9227 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9228 && integer_zerop (arg1))
9230 tree const1 = TREE_OPERAND (arg0, 1);
9231 tree const2 = arg1; /* zero */
9232 tree variable1 = TREE_OPERAND (arg0, 0);
9233 enum tree_code cmp_code = code;
9235 gcc_assert (!integer_zerop (const1));
9237 fold_overflow_warning (("assuming signed overflow does not occur when "
9238 "eliminating multiplication in comparison "
9239 "with zero"),
9240 WARN_STRICT_OVERFLOW_COMPARISON);
9242 /* If const1 is negative we swap the sense of the comparison. */
9243 if (tree_int_cst_sgn (const1) < 0)
9244 cmp_code = swap_tree_comparison (cmp_code);
9246 return fold_build2 (cmp_code, type, variable1, const2);
9249 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9250 if (tem)
9251 return tem;
9253 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9255 tree targ0 = strip_float_extensions (arg0);
9256 tree targ1 = strip_float_extensions (arg1);
9257 tree newtype = TREE_TYPE (targ0);
9259 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9260 newtype = TREE_TYPE (targ1);
9262 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9263 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9264 return fold_build2 (code, type, fold_convert (newtype, targ0),
9265 fold_convert (newtype, targ1));
9267 /* (-a) CMP (-b) -> b CMP a */
9268 if (TREE_CODE (arg0) == NEGATE_EXPR
9269 && TREE_CODE (arg1) == NEGATE_EXPR)
9270 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9271 TREE_OPERAND (arg0, 0));
9273 if (TREE_CODE (arg1) == REAL_CST)
9275 REAL_VALUE_TYPE cst;
9276 cst = TREE_REAL_CST (arg1);
9278 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9279 if (TREE_CODE (arg0) == NEGATE_EXPR)
9280 return fold_build2 (swap_tree_comparison (code), type,
9281 TREE_OPERAND (arg0, 0),
9282 build_real (TREE_TYPE (arg1),
9283 REAL_VALUE_NEGATE (cst)));
9285 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9286 /* a CMP (-0) -> a CMP 0 */
9287 if (REAL_VALUE_MINUS_ZERO (cst))
9288 return fold_build2 (code, type, arg0,
9289 build_real (TREE_TYPE (arg1), dconst0));
9291 /* x != NaN is always true, other ops are always false. */
9292 if (REAL_VALUE_ISNAN (cst)
9293 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9295 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9296 return omit_one_operand (type, tem, arg0);
9299 /* Fold comparisons against infinity. */
9300 if (REAL_VALUE_ISINF (cst))
9302 tem = fold_inf_compare (code, type, arg0, arg1);
9303 if (tem != NULL_TREE)
9304 return tem;
9308 /* If this is a comparison of a real constant with a PLUS_EXPR
9309 or a MINUS_EXPR of a real constant, we can convert it into a
9310 comparison with a revised real constant as long as no overflow
9311 occurs when unsafe_math_optimizations are enabled. */
9312 if (flag_unsafe_math_optimizations
9313 && TREE_CODE (arg1) == REAL_CST
9314 && (TREE_CODE (arg0) == PLUS_EXPR
9315 || TREE_CODE (arg0) == MINUS_EXPR)
9316 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9317 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9318 ? MINUS_EXPR : PLUS_EXPR,
9319 arg1, TREE_OPERAND (arg0, 1), 0))
9320 && !TREE_OVERFLOW (tem))
9321 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9323 /* Likewise, we can simplify a comparison of a real constant with
9324 a MINUS_EXPR whose first operand is also a real constant, i.e.
9325 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9326 floating-point types only if -fassociative-math is set. */
9327 if (flag_associative_math
9328 && TREE_CODE (arg1) == REAL_CST
9329 && TREE_CODE (arg0) == MINUS_EXPR
9330 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9331 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9332 arg1, 0))
9333 && !TREE_OVERFLOW (tem))
9334 return fold_build2 (swap_tree_comparison (code), type,
9335 TREE_OPERAND (arg0, 1), tem);
9337 /* Fold comparisons against built-in math functions. */
9338 if (TREE_CODE (arg1) == REAL_CST
9339 && flag_unsafe_math_optimizations
9340 && ! flag_errno_math)
9342 enum built_in_function fcode = builtin_mathfn_code (arg0);
9344 if (fcode != END_BUILTINS)
9346 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9347 if (tem != NULL_TREE)
9348 return tem;
9353 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9354 && CONVERT_EXPR_P (arg0))
9356 /* If we are widening one operand of an integer comparison,
9357 see if the other operand is similarly being widened. Perhaps we
9358 can do the comparison in the narrower type. */
9359 tem = fold_widened_comparison (code, type, arg0, arg1);
9360 if (tem)
9361 return tem;
9363 /* Or if we are changing signedness. */
9364 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9365 if (tem)
9366 return tem;
9369 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9370 constant, we can simplify it. */
9371 if (TREE_CODE (arg1) == INTEGER_CST
9372 && (TREE_CODE (arg0) == MIN_EXPR
9373 || TREE_CODE (arg0) == MAX_EXPR)
9374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9376 tem = optimize_minmax_comparison (code, type, op0, op1);
9377 if (tem)
9378 return tem;
9381 /* Simplify comparison of something with itself. (For IEEE
9382 floating-point, we can only do some of these simplifications.) */
9383 if (operand_equal_p (arg0, arg1, 0))
9385 switch (code)
9387 case EQ_EXPR:
9388 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9389 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9390 return constant_boolean_node (1, type);
9391 break;
9393 case GE_EXPR:
9394 case LE_EXPR:
9395 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9396 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9397 return constant_boolean_node (1, type);
9398 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9400 case NE_EXPR:
9401 /* For NE, we can only do this simplification if integer
9402 or we don't honor IEEE floating point NaNs. */
9403 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9404 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9405 break;
9406 /* ... fall through ... */
9407 case GT_EXPR:
9408 case LT_EXPR:
9409 return constant_boolean_node (0, type);
9410 default:
9411 gcc_unreachable ();
9415 /* If we are comparing an expression that just has comparisons
9416 of two integer values, arithmetic expressions of those comparisons,
9417 and constants, we can simplify it. There are only three cases
9418 to check: the two values can either be equal, the first can be
9419 greater, or the second can be greater. Fold the expression for
9420 those three values. Since each value must be 0 or 1, we have
9421 eight possibilities, each of which corresponds to the constant 0
9422 or 1 or one of the six possible comparisons.
9424 This handles common cases like (a > b) == 0 but also handles
9425 expressions like ((x > y) - (y > x)) > 0, which supposedly
9426 occur in macroized code. */
9428 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9430 tree cval1 = 0, cval2 = 0;
9431 int save_p = 0;
9433 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9434 /* Don't handle degenerate cases here; they should already
9435 have been handled anyway. */
9436 && cval1 != 0 && cval2 != 0
9437 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9438 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9439 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9440 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9441 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9442 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9443 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9445 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9446 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9448 /* We can't just pass T to eval_subst in case cval1 or cval2
9449 was the same as ARG1. */
9451 tree high_result
9452 = fold_build2 (code, type,
9453 eval_subst (arg0, cval1, maxval,
9454 cval2, minval),
9455 arg1);
9456 tree equal_result
9457 = fold_build2 (code, type,
9458 eval_subst (arg0, cval1, maxval,
9459 cval2, maxval),
9460 arg1);
9461 tree low_result
9462 = fold_build2 (code, type,
9463 eval_subst (arg0, cval1, minval,
9464 cval2, maxval),
9465 arg1);
9467 /* All three of these results should be 0 or 1. Confirm they are.
9468 Then use those values to select the proper code to use. */
9470 if (TREE_CODE (high_result) == INTEGER_CST
9471 && TREE_CODE (equal_result) == INTEGER_CST
9472 && TREE_CODE (low_result) == INTEGER_CST)
9474 /* Make a 3-bit mask with the high-order bit being the
9475 value for `>', the next for '=', and the low for '<'. */
9476 switch ((integer_onep (high_result) * 4)
9477 + (integer_onep (equal_result) * 2)
9478 + integer_onep (low_result))
9480 case 0:
9481 /* Always false. */
9482 return omit_one_operand (type, integer_zero_node, arg0);
9483 case 1:
9484 code = LT_EXPR;
9485 break;
9486 case 2:
9487 code = EQ_EXPR;
9488 break;
9489 case 3:
9490 code = LE_EXPR;
9491 break;
9492 case 4:
9493 code = GT_EXPR;
9494 break;
9495 case 5:
9496 code = NE_EXPR;
9497 break;
9498 case 6:
9499 code = GE_EXPR;
9500 break;
9501 case 7:
9502 /* Always true. */
9503 return omit_one_operand (type, integer_one_node, arg0);
9506 if (save_p)
9507 return save_expr (build2 (code, type, cval1, cval2));
9508 return fold_build2 (code, type, cval1, cval2);
9513 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9514 into a single range test. */
9515 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9516 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9517 && TREE_CODE (arg1) == INTEGER_CST
9518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9519 && !integer_zerop (TREE_OPERAND (arg0, 1))
9520 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9521 && !TREE_OVERFLOW (arg1))
9523 tem = fold_div_compare (code, type, arg0, arg1);
9524 if (tem != NULL_TREE)
9525 return tem;
9528 /* Fold ~X op ~Y as Y op X. */
9529 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9530 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9532 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9533 return fold_build2 (code, type,
9534 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9535 TREE_OPERAND (arg0, 0));
9538 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9539 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9540 && TREE_CODE (arg1) == INTEGER_CST)
9542 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9543 return fold_build2 (swap_tree_comparison (code), type,
9544 TREE_OPERAND (arg0, 0),
9545 fold_build1 (BIT_NOT_EXPR, cmp_type,
9546 fold_convert (cmp_type, arg1)));
9549 return NULL_TREE;
9553 /* Subroutine of fold_binary. Optimize complex multiplications of the
9554 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9555 argument EXPR represents the expression "z" of type TYPE. */
9557 static tree
9558 fold_mult_zconjz (tree type, tree expr)
9560 tree itype = TREE_TYPE (type);
9561 tree rpart, ipart, tem;
9563 if (TREE_CODE (expr) == COMPLEX_EXPR)
9565 rpart = TREE_OPERAND (expr, 0);
9566 ipart = TREE_OPERAND (expr, 1);
9568 else if (TREE_CODE (expr) == COMPLEX_CST)
9570 rpart = TREE_REALPART (expr);
9571 ipart = TREE_IMAGPART (expr);
9573 else
9575 expr = save_expr (expr);
9576 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9577 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9580 rpart = save_expr (rpart);
9581 ipart = save_expr (ipart);
9582 tem = fold_build2 (PLUS_EXPR, itype,
9583 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9584 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9585 return fold_build2 (COMPLEX_EXPR, type, tem,
9586 fold_convert (itype, integer_zero_node));
9590 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9591 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9592 guarantees that P and N have the same least significant log2(M) bits.
9593 N is not otherwise constrained. In particular, N is not normalized to
9594 0 <= N < M as is common. In general, the precise value of P is unknown.
9595 M is chosen as large as possible such that constant N can be determined.
9597 Returns M and sets *RESIDUE to N. */
9599 static unsigned HOST_WIDE_INT
9600 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9602 enum tree_code code;
9604 *residue = 0;
9606 code = TREE_CODE (expr);
9607 if (code == ADDR_EXPR)
9609 expr = TREE_OPERAND (expr, 0);
9610 if (handled_component_p (expr))
9612 HOST_WIDE_INT bitsize, bitpos;
9613 tree offset;
9614 enum machine_mode mode;
9615 int unsignedp, volatilep;
9617 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9618 &mode, &unsignedp, &volatilep, false);
9619 *residue = bitpos / BITS_PER_UNIT;
9620 if (offset)
9622 if (TREE_CODE (offset) == INTEGER_CST)
9623 *residue += TREE_INT_CST_LOW (offset);
9624 else
9625 /* We don't handle more complicated offset expressions. */
9626 return 1;
9630 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9631 return DECL_ALIGN_UNIT (expr);
9633 else if (code == POINTER_PLUS_EXPR)
9635 tree op0, op1;
9636 unsigned HOST_WIDE_INT modulus;
9637 enum tree_code inner_code;
9639 op0 = TREE_OPERAND (expr, 0);
9640 STRIP_NOPS (op0);
9641 modulus = get_pointer_modulus_and_residue (op0, residue);
9643 op1 = TREE_OPERAND (expr, 1);
9644 STRIP_NOPS (op1);
9645 inner_code = TREE_CODE (op1);
9646 if (inner_code == INTEGER_CST)
9648 *residue += TREE_INT_CST_LOW (op1);
9649 return modulus;
9651 else if (inner_code == MULT_EXPR)
9653 op1 = TREE_OPERAND (op1, 1);
9654 if (TREE_CODE (op1) == INTEGER_CST)
9656 unsigned HOST_WIDE_INT align;
9658 /* Compute the greatest power-of-2 divisor of op1. */
9659 align = TREE_INT_CST_LOW (op1);
9660 align &= -align;
9662 /* If align is non-zero and less than *modulus, replace
9663 *modulus with align., If align is 0, then either op1 is 0
9664 or the greatest power-of-2 divisor of op1 doesn't fit in an
9665 unsigned HOST_WIDE_INT. In either case, no additional
9666 constraint is imposed. */
9667 if (align)
9668 modulus = MIN (modulus, align);
9670 return modulus;
9675 /* If we get here, we were unable to determine anything useful about the
9676 expression. */
9677 return 1;
9681 /* Fold a binary expression of code CODE and type TYPE with operands
9682 OP0 and OP1. Return the folded expression if folding is
9683 successful. Otherwise, return NULL_TREE. */
9685 tree
9686 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9688 enum tree_code_class kind = TREE_CODE_CLASS (code);
9689 tree arg0, arg1, tem;
9690 tree t1 = NULL_TREE;
9691 bool strict_overflow_p;
9693 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9694 && TREE_CODE_LENGTH (code) == 2
9695 && op0 != NULL_TREE
9696 && op1 != NULL_TREE);
9698 arg0 = op0;
9699 arg1 = op1;
9701 /* Strip any conversions that don't change the mode. This is
9702 safe for every expression, except for a comparison expression
9703 because its signedness is derived from its operands. So, in
9704 the latter case, only strip conversions that don't change the
9705 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9706 preserved.
9708 Note that this is done as an internal manipulation within the
9709 constant folder, in order to find the simplest representation
9710 of the arguments so that their form can be studied. In any
9711 cases, the appropriate type conversions should be put back in
9712 the tree that will get out of the constant folder. */
9714 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9716 STRIP_SIGN_NOPS (arg0);
9717 STRIP_SIGN_NOPS (arg1);
9719 else
9721 STRIP_NOPS (arg0);
9722 STRIP_NOPS (arg1);
9725 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9726 constant but we can't do arithmetic on them. */
9727 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9728 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9729 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9730 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9731 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9732 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9734 if (kind == tcc_binary)
9736 /* Make sure type and arg0 have the same saturating flag. */
9737 gcc_assert (TYPE_SATURATING (type)
9738 == TYPE_SATURATING (TREE_TYPE (arg0)));
9739 tem = const_binop (code, arg0, arg1, 0);
9741 else if (kind == tcc_comparison)
9742 tem = fold_relational_const (code, type, arg0, arg1);
9743 else
9744 tem = NULL_TREE;
9746 if (tem != NULL_TREE)
9748 if (TREE_TYPE (tem) != type)
9749 tem = fold_convert (type, tem);
9750 return tem;
9754 /* If this is a commutative operation, and ARG0 is a constant, move it
9755 to ARG1 to reduce the number of tests below. */
9756 if (commutative_tree_code (code)
9757 && tree_swap_operands_p (arg0, arg1, true))
9758 return fold_build2 (code, type, op1, op0);
9760 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9762 First check for cases where an arithmetic operation is applied to a
9763 compound, conditional, or comparison operation. Push the arithmetic
9764 operation inside the compound or conditional to see if any folding
9765 can then be done. Convert comparison to conditional for this purpose.
9766 The also optimizes non-constant cases that used to be done in
9767 expand_expr.
9769 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9770 one of the operands is a comparison and the other is a comparison, a
9771 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9772 code below would make the expression more complex. Change it to a
9773 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9774 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9776 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9777 || code == EQ_EXPR || code == NE_EXPR)
9778 && ((truth_value_p (TREE_CODE (arg0))
9779 && (truth_value_p (TREE_CODE (arg1))
9780 || (TREE_CODE (arg1) == BIT_AND_EXPR
9781 && integer_onep (TREE_OPERAND (arg1, 1)))))
9782 || (truth_value_p (TREE_CODE (arg1))
9783 && (truth_value_p (TREE_CODE (arg0))
9784 || (TREE_CODE (arg0) == BIT_AND_EXPR
9785 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9787 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9788 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9789 : TRUTH_XOR_EXPR,
9790 boolean_type_node,
9791 fold_convert (boolean_type_node, arg0),
9792 fold_convert (boolean_type_node, arg1));
9794 if (code == EQ_EXPR)
9795 tem = invert_truthvalue (tem);
9797 return fold_convert (type, tem);
9800 if (TREE_CODE_CLASS (code) == tcc_binary
9801 || TREE_CODE_CLASS (code) == tcc_comparison)
9803 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9804 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9805 fold_build2 (code, type,
9806 fold_convert (TREE_TYPE (op0),
9807 TREE_OPERAND (arg0, 1)),
9808 op1));
9809 if (TREE_CODE (arg1) == COMPOUND_EXPR
9810 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9811 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9812 fold_build2 (code, type, op0,
9813 fold_convert (TREE_TYPE (op1),
9814 TREE_OPERAND (arg1, 1))));
9816 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9818 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9819 arg0, arg1,
9820 /*cond_first_p=*/1);
9821 if (tem != NULL_TREE)
9822 return tem;
9825 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9827 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9828 arg1, arg0,
9829 /*cond_first_p=*/0);
9830 if (tem != NULL_TREE)
9831 return tem;
9835 switch (code)
9837 case POINTER_PLUS_EXPR:
9838 case POINTER_PLUSNV_EXPR:
9839 /* 0 +p index -> (type)index */
9840 if (integer_zerop (arg0))
9841 return non_lvalue (fold_convert (type, arg1));
9843 /* PTR +p 0 -> PTR */
9844 if (integer_zerop (arg1))
9845 return non_lvalue (fold_convert (type, arg0));
9847 /* PTR_CST +p CST -> CST1 */
9848 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9849 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9851 /* ??? Auditing required. */
9852 if (code == POINTER_PLUSNV_EXPR)
9853 return NULL_TREE;
9855 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9856 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9857 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9858 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9859 fold_convert (sizetype, arg1),
9860 fold_convert (sizetype, arg0)));
9862 /* index +p PTR -> PTR +p index */
9863 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9864 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9865 return fold_build2 (POINTER_PLUS_EXPR, type,
9866 fold_convert (type, arg1),
9867 fold_convert (sizetype, arg0));
9869 /* (PTR +p B) +p A -> PTR +p (B + A) */
9870 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9872 tree inner;
9873 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9874 tree arg00 = TREE_OPERAND (arg0, 0);
9875 inner = fold_build2 (PLUS_EXPR, sizetype,
9876 arg01, fold_convert (sizetype, arg1));
9877 return fold_convert (type,
9878 fold_build2 (POINTER_PLUS_EXPR,
9879 TREE_TYPE (arg00), arg00, inner));
9882 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9883 of the array. Loop optimizer sometimes produce this type of
9884 expressions. */
9885 if (TREE_CODE (arg0) == ADDR_EXPR)
9887 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9888 if (tem)
9889 return fold_convert (type, tem);
9892 return NULL_TREE;
9894 case PLUS_EXPR:
9895 case PLUSNV_EXPR:
9896 if (! FLOAT_TYPE_P (type))
9898 if (integer_zerop (arg1))
9899 return non_lvalue (fold_convert (type, arg0));
9902 /* ??? Auditing required. */
9903 if (code == PLUSNV_EXPR)
9904 return NULL_TREE;
9906 /* A + (-B) -> A - B */
9907 if (TREE_CODE (arg1) == NEGATE_EXPR)
9908 return fold_build2 (MINUS_EXPR, type,
9909 fold_convert (type, arg0),
9910 fold_convert (type, TREE_OPERAND (arg1, 0)));
9911 /* (-A) + B -> B - A */
9912 if (TREE_CODE (arg0) == NEGATE_EXPR
9913 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9914 return fold_build2 (MINUS_EXPR, type,
9915 fold_convert (type, arg1),
9916 fold_convert (type, TREE_OPERAND (arg0, 0)));
9918 if (INTEGRAL_TYPE_P (type))
9920 /* Convert ~A + 1 to -A. */
9921 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9922 && integer_onep (arg1))
9923 return fold_build1 (NEGATE_EXPR, type,
9924 fold_convert (type, TREE_OPERAND (arg0, 0)));
9926 /* ~X + X is -1. */
9927 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9928 && !TYPE_OVERFLOW_TRAPS (type))
9930 tree tem = TREE_OPERAND (arg0, 0);
9932 STRIP_NOPS (tem);
9933 if (operand_equal_p (tem, arg1, 0))
9935 t1 = build_int_cst_type (type, -1);
9936 return omit_one_operand (type, t1, arg1);
9940 /* X + ~X is -1. */
9941 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9942 && !TYPE_OVERFLOW_TRAPS (type))
9944 tree tem = TREE_OPERAND (arg1, 0);
9946 STRIP_NOPS (tem);
9947 if (operand_equal_p (arg0, tem, 0))
9949 t1 = build_int_cst_type (type, -1);
9950 return omit_one_operand (type, t1, arg0);
9954 /* X + (X / CST) * -CST is X % CST. */
9955 if (TREE_CODE (arg1) == MULT_EXPR
9956 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9957 && operand_equal_p (arg0,
9958 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9960 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9961 tree cst1 = TREE_OPERAND (arg1, 1);
9962 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9963 if (sum && integer_zerop (sum))
9964 return fold_convert (type,
9965 fold_build2 (TRUNC_MOD_EXPR,
9966 TREE_TYPE (arg0), arg0, cst0));
9970 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9971 same or one. Make sure type is not saturating.
9972 fold_plusminus_mult_expr will re-associate. */
9973 if ((TREE_CODE (arg0) == MULT_EXPR
9974 || TREE_CODE (arg1) == MULT_EXPR)
9975 && !TYPE_SATURATING (type)
9976 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9978 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9979 if (tem)
9980 return tem;
9983 if (! FLOAT_TYPE_P (type))
9985 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9986 with a constant, and the two constants have no bits in common,
9987 we should treat this as a BIT_IOR_EXPR since this may produce more
9988 simplifications. */
9989 if (TREE_CODE (arg0) == BIT_AND_EXPR
9990 && TREE_CODE (arg1) == BIT_AND_EXPR
9991 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9992 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9993 && integer_zerop (const_binop (BIT_AND_EXPR,
9994 TREE_OPERAND (arg0, 1),
9995 TREE_OPERAND (arg1, 1), 0)))
9997 code = BIT_IOR_EXPR;
9998 goto bit_ior;
10001 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10002 (plus (plus (mult) (mult)) (foo)) so that we can
10003 take advantage of the factoring cases below. */
10004 if (((TREE_CODE (arg0) == PLUS_EXPR
10005 || TREE_CODE (arg0) == MINUS_EXPR)
10006 && TREE_CODE (arg1) == MULT_EXPR)
10007 || ((TREE_CODE (arg1) == PLUS_EXPR
10008 || TREE_CODE (arg1) == MINUS_EXPR)
10009 && TREE_CODE (arg0) == MULT_EXPR))
10011 tree parg0, parg1, parg, marg;
10012 enum tree_code pcode;
10014 if (TREE_CODE (arg1) == MULT_EXPR)
10015 parg = arg0, marg = arg1;
10016 else
10017 parg = arg1, marg = arg0;
10018 pcode = TREE_CODE (parg);
10019 parg0 = TREE_OPERAND (parg, 0);
10020 parg1 = TREE_OPERAND (parg, 1);
10021 STRIP_NOPS (parg0);
10022 STRIP_NOPS (parg1);
10024 if (TREE_CODE (parg0) == MULT_EXPR
10025 && TREE_CODE (parg1) != MULT_EXPR)
10026 return fold_build2 (pcode, type,
10027 fold_build2 (PLUS_EXPR, type,
10028 fold_convert (type, parg0),
10029 fold_convert (type, marg)),
10030 fold_convert (type, parg1));
10031 if (TREE_CODE (parg0) != MULT_EXPR
10032 && TREE_CODE (parg1) == MULT_EXPR)
10033 return fold_build2 (PLUS_EXPR, type,
10034 fold_convert (type, parg0),
10035 fold_build2 (pcode, type,
10036 fold_convert (type, marg),
10037 fold_convert (type,
10038 parg1)));
10041 else
10043 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10044 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10045 return non_lvalue (fold_convert (type, arg0));
10047 /* Likewise if the operands are reversed. */
10048 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10049 return non_lvalue (fold_convert (type, arg1));
10051 /* Convert X + -C into X - C. */
10052 if (TREE_CODE (arg1) == REAL_CST
10053 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10055 tem = fold_negate_const (arg1, type);
10056 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10057 return fold_build2 (MINUS_EXPR, type,
10058 fold_convert (type, arg0),
10059 fold_convert (type, tem));
10062 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10063 to __complex__ ( x, y ). This is not the same for SNaNs or
10064 if signed zeros are involved. */
10065 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10066 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10067 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10069 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10070 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10071 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10072 bool arg0rz = false, arg0iz = false;
10073 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10074 || (arg0i && (arg0iz = real_zerop (arg0i))))
10076 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10077 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10078 if (arg0rz && arg1i && real_zerop (arg1i))
10080 tree rp = arg1r ? arg1r
10081 : build1 (REALPART_EXPR, rtype, arg1);
10082 tree ip = arg0i ? arg0i
10083 : build1 (IMAGPART_EXPR, rtype, arg0);
10084 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10086 else if (arg0iz && arg1r && real_zerop (arg1r))
10088 tree rp = arg0r ? arg0r
10089 : build1 (REALPART_EXPR, rtype, arg0);
10090 tree ip = arg1i ? arg1i
10091 : build1 (IMAGPART_EXPR, rtype, arg1);
10092 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10097 if (flag_unsafe_math_optimizations
10098 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10099 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10100 && (tem = distribute_real_division (code, type, arg0, arg1)))
10101 return tem;
10103 /* Convert x+x into x*2.0. */
10104 if (operand_equal_p (arg0, arg1, 0)
10105 && SCALAR_FLOAT_TYPE_P (type))
10106 return fold_build2 (MULT_EXPR, type, arg0,
10107 build_real (type, dconst2));
10109 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10110 We associate floats only if the user has specified
10111 -fassociative-math. */
10112 if (flag_associative_math
10113 && TREE_CODE (arg1) == PLUS_EXPR
10114 && TREE_CODE (arg0) != MULT_EXPR)
10116 tree tree10 = TREE_OPERAND (arg1, 0);
10117 tree tree11 = TREE_OPERAND (arg1, 1);
10118 if (TREE_CODE (tree11) == MULT_EXPR
10119 && TREE_CODE (tree10) == MULT_EXPR)
10121 tree tree0;
10122 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10123 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10126 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10127 We associate floats only if the user has specified
10128 -fassociative-math. */
10129 if (flag_associative_math
10130 && TREE_CODE (arg0) == PLUS_EXPR
10131 && TREE_CODE (arg1) != MULT_EXPR)
10133 tree tree00 = TREE_OPERAND (arg0, 0);
10134 tree tree01 = TREE_OPERAND (arg0, 1);
10135 if (TREE_CODE (tree01) == MULT_EXPR
10136 && TREE_CODE (tree00) == MULT_EXPR)
10138 tree tree0;
10139 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10140 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10145 bit_rotate:
10146 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10147 is a rotate of A by C1 bits. */
10148 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10149 is a rotate of A by B bits. */
10151 enum tree_code code0, code1;
10152 tree rtype;
10153 code0 = TREE_CODE (arg0);
10154 code1 = TREE_CODE (arg1);
10155 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10156 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10157 && operand_equal_p (TREE_OPERAND (arg0, 0),
10158 TREE_OPERAND (arg1, 0), 0)
10159 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10160 TYPE_UNSIGNED (rtype))
10161 /* Only create rotates in complete modes. Other cases are not
10162 expanded properly. */
10163 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10165 tree tree01, tree11;
10166 enum tree_code code01, code11;
10168 tree01 = TREE_OPERAND (arg0, 1);
10169 tree11 = TREE_OPERAND (arg1, 1);
10170 STRIP_NOPS (tree01);
10171 STRIP_NOPS (tree11);
10172 code01 = TREE_CODE (tree01);
10173 code11 = TREE_CODE (tree11);
10174 if (code01 == INTEGER_CST
10175 && code11 == INTEGER_CST
10176 && TREE_INT_CST_HIGH (tree01) == 0
10177 && TREE_INT_CST_HIGH (tree11) == 0
10178 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10179 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10180 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
10181 code0 == LSHIFT_EXPR ? tree01 : tree11);
10182 else if (code11 == MINUS_EXPR)
10184 tree tree110, tree111;
10185 tree110 = TREE_OPERAND (tree11, 0);
10186 tree111 = TREE_OPERAND (tree11, 1);
10187 STRIP_NOPS (tree110);
10188 STRIP_NOPS (tree111);
10189 if (TREE_CODE (tree110) == INTEGER_CST
10190 && 0 == compare_tree_int (tree110,
10191 TYPE_PRECISION
10192 (TREE_TYPE (TREE_OPERAND
10193 (arg0, 0))))
10194 && operand_equal_p (tree01, tree111, 0))
10195 return build2 ((code0 == LSHIFT_EXPR
10196 ? LROTATE_EXPR
10197 : RROTATE_EXPR),
10198 type, TREE_OPERAND (arg0, 0), tree01);
10200 else if (code01 == MINUS_EXPR)
10202 tree tree010, tree011;
10203 tree010 = TREE_OPERAND (tree01, 0);
10204 tree011 = TREE_OPERAND (tree01, 1);
10205 STRIP_NOPS (tree010);
10206 STRIP_NOPS (tree011);
10207 if (TREE_CODE (tree010) == INTEGER_CST
10208 && 0 == compare_tree_int (tree010,
10209 TYPE_PRECISION
10210 (TREE_TYPE (TREE_OPERAND
10211 (arg0, 0))))
10212 && operand_equal_p (tree11, tree011, 0))
10213 return build2 ((code0 != LSHIFT_EXPR
10214 ? LROTATE_EXPR
10215 : RROTATE_EXPR),
10216 type, TREE_OPERAND (arg0, 0), tree11);
10221 associate:
10222 /* In most languages, can't associate operations on floats through
10223 parentheses. Rather than remember where the parentheses were, we
10224 don't associate floats at all, unless the user has specified
10225 -fassociative-math.
10226 And, we need to make sure type is not saturating. */
10228 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10229 && !TYPE_SATURATING (type))
10231 tree var0, con0, lit0, minus_lit0;
10232 tree var1, con1, lit1, minus_lit1;
10233 bool ok = true;
10235 /* Split both trees into variables, constants, and literals. Then
10236 associate each group together, the constants with literals,
10237 then the result with variables. This increases the chances of
10238 literals being recombined later and of generating relocatable
10239 expressions for the sum of a constant and literal. */
10240 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10241 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10242 code == MINUS_EXPR);
10244 /* With undefined overflow we can only associate constants
10245 with one variable. */
10246 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10247 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10248 && var0 && var1)
10250 tree tmp0 = var0;
10251 tree tmp1 = var1;
10253 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10254 tmp0 = TREE_OPERAND (tmp0, 0);
10255 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10256 tmp1 = TREE_OPERAND (tmp1, 0);
10257 /* The only case we can still associate with two variables
10258 is if they are the same, modulo negation. */
10259 if (!operand_equal_p (tmp0, tmp1, 0))
10260 ok = false;
10263 /* Only do something if we found more than two objects. Otherwise,
10264 nothing has changed and we risk infinite recursion. */
10265 if (ok
10266 && (2 < ((var0 != 0) + (var1 != 0)
10267 + (con0 != 0) + (con1 != 0)
10268 + (lit0 != 0) + (lit1 != 0)
10269 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10271 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10272 if (code == MINUS_EXPR)
10273 code = PLUS_EXPR;
10275 var0 = associate_trees (var0, var1, code, type);
10276 con0 = associate_trees (con0, con1, code, type);
10277 lit0 = associate_trees (lit0, lit1, code, type);
10278 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10280 /* Preserve the MINUS_EXPR if the negative part of the literal is
10281 greater than the positive part. Otherwise, the multiplicative
10282 folding code (i.e extract_muldiv) may be fooled in case
10283 unsigned constants are subtracted, like in the following
10284 example: ((X*2 + 4) - 8U)/2. */
10285 if (minus_lit0 && lit0)
10287 if (TREE_CODE (lit0) == INTEGER_CST
10288 && TREE_CODE (minus_lit0) == INTEGER_CST
10289 && tree_int_cst_lt (lit0, minus_lit0))
10291 minus_lit0 = associate_trees (minus_lit0, lit0,
10292 MINUS_EXPR, type);
10293 lit0 = 0;
10295 else
10297 lit0 = associate_trees (lit0, minus_lit0,
10298 MINUS_EXPR, type);
10299 minus_lit0 = 0;
10302 if (minus_lit0)
10304 if (con0 == 0)
10305 return fold_convert (type,
10306 associate_trees (var0, minus_lit0,
10307 MINUS_EXPR, type));
10308 else
10310 con0 = associate_trees (con0, minus_lit0,
10311 MINUS_EXPR, type);
10312 return fold_convert (type,
10313 associate_trees (var0, con0,
10314 PLUS_EXPR, type));
10318 con0 = associate_trees (con0, lit0, code, type);
10319 return fold_convert (type, associate_trees (var0, con0,
10320 code, type));
10324 return NULL_TREE;
10326 case MINUS_EXPR:
10327 case MINUSNV_EXPR:
10328 if (! FLOAT_TYPE_P (type))
10330 if (integer_zerop (arg0))
10331 return negate_expr (fold_convert (type, arg1));
10332 if (integer_zerop (arg1))
10333 return non_lvalue (fold_convert (type, arg0));
10336 /* ??? Auditing required. */
10337 if (code == MINUSNV_EXPR)
10338 return NULL_TREE;
10340 /* Pointer simplifications for subtraction, simple reassociations. */
10341 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10343 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10344 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10345 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10347 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10348 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10349 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10350 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10351 return fold_build2 (PLUS_EXPR, type,
10352 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10353 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10355 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10356 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10358 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10359 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10360 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10361 if (tmp)
10362 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10365 /* A - (-B) -> A + B */
10366 if (TREE_CODE (arg1) == NEGATE_EXPR)
10367 return fold_build2 (PLUS_EXPR, type, op0,
10368 fold_convert (type, TREE_OPERAND (arg1, 0)));
10369 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10370 if (TREE_CODE (arg0) == NEGATE_EXPR
10371 && (FLOAT_TYPE_P (type)
10372 || INTEGRAL_TYPE_P (type))
10373 && negate_expr_p (arg1)
10374 && reorder_operands_p (arg0, arg1))
10375 return fold_build2 (MINUS_EXPR, type,
10376 fold_convert (type, negate_expr (arg1)),
10377 fold_convert (type, TREE_OPERAND (arg0, 0)));
10378 /* Convert -A - 1 to ~A. */
10379 if (INTEGRAL_TYPE_P (type)
10380 && TREE_CODE (arg0) == NEGATE_EXPR
10381 && integer_onep (arg1)
10382 && !TYPE_OVERFLOW_TRAPS (type))
10383 return fold_build1 (BIT_NOT_EXPR, type,
10384 fold_convert (type, TREE_OPERAND (arg0, 0)));
10386 /* Convert -1 - A to ~A. */
10387 if (INTEGRAL_TYPE_P (type)
10388 && integer_all_onesp (arg0))
10389 return fold_build1 (BIT_NOT_EXPR, type, op1);
10392 /* X - (X / CST) * CST is X % CST. */
10393 if (INTEGRAL_TYPE_P (type)
10394 && TREE_CODE (arg1) == MULT_EXPR
10395 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10396 && operand_equal_p (arg0,
10397 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10398 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10399 TREE_OPERAND (arg1, 1), 0))
10400 return fold_convert (type,
10401 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10402 arg0, TREE_OPERAND (arg1, 1)));
10404 if (! FLOAT_TYPE_P (type))
10406 /* Fold A - (A & B) into ~B & A. */
10407 if (!TREE_SIDE_EFFECTS (arg0)
10408 && TREE_CODE (arg1) == BIT_AND_EXPR)
10410 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10412 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10413 return fold_build2 (BIT_AND_EXPR, type,
10414 fold_build1 (BIT_NOT_EXPR, type, arg10),
10415 fold_convert (type, arg0));
10417 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10419 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10420 return fold_build2 (BIT_AND_EXPR, type,
10421 fold_build1 (BIT_NOT_EXPR, type, arg11),
10422 fold_convert (type, arg0));
10426 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10427 any power of 2 minus 1. */
10428 if (TREE_CODE (arg0) == BIT_AND_EXPR
10429 && TREE_CODE (arg1) == BIT_AND_EXPR
10430 && operand_equal_p (TREE_OPERAND (arg0, 0),
10431 TREE_OPERAND (arg1, 0), 0))
10433 tree mask0 = TREE_OPERAND (arg0, 1);
10434 tree mask1 = TREE_OPERAND (arg1, 1);
10435 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10437 if (operand_equal_p (tem, mask1, 0))
10439 tem = fold_build2 (BIT_XOR_EXPR, type,
10440 TREE_OPERAND (arg0, 0), mask1);
10441 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10446 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10447 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10448 return non_lvalue (fold_convert (type, arg0));
10450 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10451 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10452 (-ARG1 + ARG0) reduces to -ARG1. */
10453 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10454 return negate_expr (fold_convert (type, arg1));
10456 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10457 __complex__ ( x, -y ). This is not the same for SNaNs or if
10458 signed zeros are involved. */
10459 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10461 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10463 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10464 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10465 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10466 bool arg0rz = false, arg0iz = false;
10467 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10468 || (arg0i && (arg0iz = real_zerop (arg0i))))
10470 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10471 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10472 if (arg0rz && arg1i && real_zerop (arg1i))
10474 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10475 arg1r ? arg1r
10476 : build1 (REALPART_EXPR, rtype, arg1));
10477 tree ip = arg0i ? arg0i
10478 : build1 (IMAGPART_EXPR, rtype, arg0);
10479 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10481 else if (arg0iz && arg1r && real_zerop (arg1r))
10483 tree rp = arg0r ? arg0r
10484 : build1 (REALPART_EXPR, rtype, arg0);
10485 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10486 arg1i ? arg1i
10487 : build1 (IMAGPART_EXPR, rtype, arg1));
10488 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10493 /* Fold &x - &x. This can happen from &x.foo - &x.
10494 This is unsafe for certain floats even in non-IEEE formats.
10495 In IEEE, it is unsafe because it does wrong for NaNs.
10496 Also note that operand_equal_p is always false if an operand
10497 is volatile. */
10499 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10500 && operand_equal_p (arg0, arg1, 0))
10501 return fold_convert (type, integer_zero_node);
10503 /* A - B -> A + (-B) if B is easily negatable. */
10504 if (negate_expr_p (arg1)
10505 && ((FLOAT_TYPE_P (type)
10506 /* Avoid this transformation if B is a positive REAL_CST. */
10507 && (TREE_CODE (arg1) != REAL_CST
10508 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10509 || INTEGRAL_TYPE_P (type)))
10510 return fold_build2 (PLUS_EXPR, type,
10511 fold_convert (type, arg0),
10512 fold_convert (type, negate_expr (arg1)));
10514 /* Try folding difference of addresses. */
10516 HOST_WIDE_INT diff;
10518 if ((TREE_CODE (arg0) == ADDR_EXPR
10519 || TREE_CODE (arg1) == ADDR_EXPR)
10520 && ptr_difference_const (arg0, arg1, &diff))
10521 return build_int_cst_type (type, diff);
10524 /* Fold &a[i] - &a[j] to i-j. */
10525 if (TREE_CODE (arg0) == ADDR_EXPR
10526 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10527 && TREE_CODE (arg1) == ADDR_EXPR
10528 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10530 tree aref0 = TREE_OPERAND (arg0, 0);
10531 tree aref1 = TREE_OPERAND (arg1, 0);
10532 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10533 TREE_OPERAND (aref1, 0), 0))
10535 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10536 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10537 tree esz = array_ref_element_size (aref0);
10538 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10539 return fold_build2 (MULT_EXPR, type, diff,
10540 fold_convert (type, esz));
10545 if (flag_unsafe_math_optimizations
10546 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10547 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10548 && (tem = distribute_real_division (code, type, arg0, arg1)))
10549 return tem;
10551 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10552 same or one. Make sure type is not saturating.
10553 fold_plusminus_mult_expr will re-associate. */
10554 if ((TREE_CODE (arg0) == MULT_EXPR
10555 || TREE_CODE (arg1) == MULT_EXPR)
10556 && !TYPE_SATURATING (type)
10557 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10559 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10560 if (tem)
10561 return tem;
10564 goto associate;
10566 case MULT_EXPR:
10567 case MULTNV_EXPR:
10568 if (! FLOAT_TYPE_P (type))
10570 if (integer_zerop (arg1))
10571 return omit_one_operand (type, arg1, arg0);
10572 if (integer_onep (arg1))
10573 return non_lvalue (fold_convert (type, arg0));
10574 /* Transform x * -1 into -x. Make sure to do the negation
10575 on the original operand with conversions not stripped
10576 because we can only strip non-sign-changing conversions. */
10577 if (integer_all_onesp (arg1))
10578 return fold_convert (type, negate_expr (op0));
10581 /* ??? Auditing required. */
10582 if (code == MULTNV_EXPR)
10583 return NULL_TREE;
10585 /* (-A) * (-B) -> A * B */
10586 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10587 return fold_build2 (MULT_EXPR, type,
10588 fold_convert (type, TREE_OPERAND (arg0, 0)),
10589 fold_convert (type, negate_expr (arg1)));
10590 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10591 return fold_build2 (MULT_EXPR, type,
10592 fold_convert (type, negate_expr (arg0)),
10593 fold_convert (type, TREE_OPERAND (arg1, 0)));
10595 if (! FLOAT_TYPE_P (type))
10597 /* Transform x * -C into -x * C if x is easily negatable. */
10598 if (TREE_CODE (arg1) == INTEGER_CST
10599 && tree_int_cst_sgn (arg1) == -1
10600 && negate_expr_p (arg0)
10601 && (tem = negate_expr (arg1)) != arg1
10602 && !TREE_OVERFLOW (tem))
10603 return fold_build2 (MULT_EXPR, type,
10604 fold_convert (type, negate_expr (arg0)), tem);
10606 /* (a * (1 << b)) is (a << b) */
10607 if (TREE_CODE (arg1) == LSHIFT_EXPR
10608 && integer_onep (TREE_OPERAND (arg1, 0)))
10609 return fold_build2 (LSHIFT_EXPR, type, op0,
10610 TREE_OPERAND (arg1, 1));
10611 if (TREE_CODE (arg0) == LSHIFT_EXPR
10612 && integer_onep (TREE_OPERAND (arg0, 0)))
10613 return fold_build2 (LSHIFT_EXPR, type, op1,
10614 TREE_OPERAND (arg0, 1));
10616 /* (A + A) * C -> A * 2 * C */
10617 if (TREE_CODE (arg0) == PLUS_EXPR
10618 && TREE_CODE (arg1) == INTEGER_CST
10619 && operand_equal_p (TREE_OPERAND (arg0, 0),
10620 TREE_OPERAND (arg0, 1), 0))
10621 return fold_build2 (MULT_EXPR, type,
10622 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10623 TREE_OPERAND (arg0, 1)),
10624 fold_build2 (MULT_EXPR, type,
10625 build_int_cst (type, 2) , arg1));
10627 strict_overflow_p = false;
10628 if (TREE_CODE (arg1) == INTEGER_CST
10629 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10630 &strict_overflow_p)))
10632 if (strict_overflow_p)
10633 fold_overflow_warning (("assuming signed overflow does not "
10634 "occur when simplifying "
10635 "multiplication"),
10636 WARN_STRICT_OVERFLOW_MISC);
10637 return fold_convert (type, tem);
10640 /* Optimize z * conj(z) for integer complex numbers. */
10641 if (TREE_CODE (arg0) == CONJ_EXPR
10642 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10643 return fold_mult_zconjz (type, arg1);
10644 if (TREE_CODE (arg1) == CONJ_EXPR
10645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10646 return fold_mult_zconjz (type, arg0);
10648 else
10650 /* Maybe fold x * 0 to 0. The expressions aren't the same
10651 when x is NaN, since x * 0 is also NaN. Nor are they the
10652 same in modes with signed zeros, since multiplying a
10653 negative value by 0 gives -0, not +0. */
10654 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10655 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10656 && real_zerop (arg1))
10657 return omit_one_operand (type, arg1, arg0);
10658 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10659 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10660 && real_onep (arg1))
10661 return non_lvalue (fold_convert (type, arg0));
10663 /* Transform x * -1.0 into -x. */
10664 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10665 && real_minus_onep (arg1))
10666 return fold_convert (type, negate_expr (arg0));
10668 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10669 the result for floating point types due to rounding so it is applied
10670 only if -fassociative-math was specify. */
10671 if (flag_associative_math
10672 && TREE_CODE (arg0) == RDIV_EXPR
10673 && TREE_CODE (arg1) == REAL_CST
10674 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10676 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10677 arg1, 0);
10678 if (tem)
10679 return fold_build2 (RDIV_EXPR, type, tem,
10680 TREE_OPERAND (arg0, 1));
10683 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10684 if (operand_equal_p (arg0, arg1, 0))
10686 tree tem = fold_strip_sign_ops (arg0);
10687 if (tem != NULL_TREE)
10689 tem = fold_convert (type, tem);
10690 return fold_build2 (MULT_EXPR, type, tem, tem);
10694 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10695 This is not the same for NaNs or if signed zeros are
10696 involved. */
10697 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10698 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10699 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10700 && TREE_CODE (arg1) == COMPLEX_CST
10701 && real_zerop (TREE_REALPART (arg1)))
10703 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10704 if (real_onep (TREE_IMAGPART (arg1)))
10705 return fold_build2 (COMPLEX_EXPR, type,
10706 negate_expr (fold_build1 (IMAGPART_EXPR,
10707 rtype, arg0)),
10708 fold_build1 (REALPART_EXPR, rtype, arg0));
10709 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10710 return fold_build2 (COMPLEX_EXPR, type,
10711 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10712 negate_expr (fold_build1 (REALPART_EXPR,
10713 rtype, arg0)));
10716 /* Optimize z * conj(z) for floating point complex numbers.
10717 Guarded by flag_unsafe_math_optimizations as non-finite
10718 imaginary components don't produce scalar results. */
10719 if (flag_unsafe_math_optimizations
10720 && TREE_CODE (arg0) == CONJ_EXPR
10721 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10722 return fold_mult_zconjz (type, arg1);
10723 if (flag_unsafe_math_optimizations
10724 && TREE_CODE (arg1) == CONJ_EXPR
10725 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10726 return fold_mult_zconjz (type, arg0);
10728 if (flag_unsafe_math_optimizations)
10730 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10731 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10733 /* Optimizations of root(...)*root(...). */
10734 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10736 tree rootfn, arg;
10737 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10738 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10740 /* Optimize sqrt(x)*sqrt(x) as x. */
10741 if (BUILTIN_SQRT_P (fcode0)
10742 && operand_equal_p (arg00, arg10, 0)
10743 && ! HONOR_SNANS (TYPE_MODE (type)))
10744 return arg00;
10746 /* Optimize root(x)*root(y) as root(x*y). */
10747 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10748 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10749 return build_call_expr (rootfn, 1, arg);
10752 /* Optimize expN(x)*expN(y) as expN(x+y). */
10753 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10755 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10756 tree arg = fold_build2 (PLUS_EXPR, type,
10757 CALL_EXPR_ARG (arg0, 0),
10758 CALL_EXPR_ARG (arg1, 0));
10759 return build_call_expr (expfn, 1, arg);
10762 /* Optimizations of pow(...)*pow(...). */
10763 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10764 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10765 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10767 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10768 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10769 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10770 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10772 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10773 if (operand_equal_p (arg01, arg11, 0))
10775 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10776 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10777 return build_call_expr (powfn, 2, arg, arg01);
10780 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10781 if (operand_equal_p (arg00, arg10, 0))
10783 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10784 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10785 return build_call_expr (powfn, 2, arg00, arg);
10789 /* Optimize tan(x)*cos(x) as sin(x). */
10790 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10791 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10792 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10793 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10794 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10795 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10796 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10797 CALL_EXPR_ARG (arg1, 0), 0))
10799 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10801 if (sinfn != NULL_TREE)
10802 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10805 /* Optimize x*pow(x,c) as pow(x,c+1). */
10806 if (fcode1 == BUILT_IN_POW
10807 || fcode1 == BUILT_IN_POWF
10808 || fcode1 == BUILT_IN_POWL)
10810 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10811 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10812 if (TREE_CODE (arg11) == REAL_CST
10813 && !TREE_OVERFLOW (arg11)
10814 && operand_equal_p (arg0, arg10, 0))
10816 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10817 REAL_VALUE_TYPE c;
10818 tree arg;
10820 c = TREE_REAL_CST (arg11);
10821 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10822 arg = build_real (type, c);
10823 return build_call_expr (powfn, 2, arg0, arg);
10827 /* Optimize pow(x,c)*x as pow(x,c+1). */
10828 if (fcode0 == BUILT_IN_POW
10829 || fcode0 == BUILT_IN_POWF
10830 || fcode0 == BUILT_IN_POWL)
10832 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10833 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10834 if (TREE_CODE (arg01) == REAL_CST
10835 && !TREE_OVERFLOW (arg01)
10836 && operand_equal_p (arg1, arg00, 0))
10838 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10839 REAL_VALUE_TYPE c;
10840 tree arg;
10842 c = TREE_REAL_CST (arg01);
10843 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10844 arg = build_real (type, c);
10845 return build_call_expr (powfn, 2, arg1, arg);
10849 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10850 if (optimize_function_for_speed_p (cfun)
10851 && operand_equal_p (arg0, arg1, 0))
10853 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10855 if (powfn)
10857 tree arg = build_real (type, dconst2);
10858 return build_call_expr (powfn, 2, arg0, arg);
10863 goto associate;
10865 case BIT_IOR_EXPR:
10866 bit_ior:
10867 if (integer_all_onesp (arg1))
10868 return omit_one_operand (type, arg1, arg0);
10869 if (integer_zerop (arg1))
10870 return non_lvalue (fold_convert (type, arg0));
10871 if (operand_equal_p (arg0, arg1, 0))
10872 return non_lvalue (fold_convert (type, arg0));
10874 /* ~X | X is -1. */
10875 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10876 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10878 t1 = fold_convert (type, integer_zero_node);
10879 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10880 return omit_one_operand (type, t1, arg1);
10883 /* X | ~X is -1. */
10884 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10885 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10887 t1 = fold_convert (type, integer_zero_node);
10888 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10889 return omit_one_operand (type, t1, arg0);
10892 /* Canonicalize (X & C1) | C2. */
10893 if (TREE_CODE (arg0) == BIT_AND_EXPR
10894 && TREE_CODE (arg1) == INTEGER_CST
10895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10897 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10898 int width = TYPE_PRECISION (type), w;
10899 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10900 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10901 hi2 = TREE_INT_CST_HIGH (arg1);
10902 lo2 = TREE_INT_CST_LOW (arg1);
10904 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10905 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10906 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10908 if (width > HOST_BITS_PER_WIDE_INT)
10910 mhi = (unsigned HOST_WIDE_INT) -1
10911 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10912 mlo = -1;
10914 else
10916 mhi = 0;
10917 mlo = (unsigned HOST_WIDE_INT) -1
10918 >> (HOST_BITS_PER_WIDE_INT - width);
10921 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10922 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10923 return fold_build2 (BIT_IOR_EXPR, type,
10924 TREE_OPERAND (arg0, 0), arg1);
10926 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10927 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10928 mode which allows further optimizations. */
10929 hi1 &= mhi;
10930 lo1 &= mlo;
10931 hi2 &= mhi;
10932 lo2 &= mlo;
10933 hi3 = hi1 & ~hi2;
10934 lo3 = lo1 & ~lo2;
10935 for (w = BITS_PER_UNIT;
10936 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10937 w <<= 1)
10939 unsigned HOST_WIDE_INT mask
10940 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10941 if (((lo1 | lo2) & mask) == mask
10942 && (lo1 & ~mask) == 0 && hi1 == 0)
10944 hi3 = 0;
10945 lo3 = mask;
10946 break;
10949 if (hi3 != hi1 || lo3 != lo1)
10950 return fold_build2 (BIT_IOR_EXPR, type,
10951 fold_build2 (BIT_AND_EXPR, type,
10952 TREE_OPERAND (arg0, 0),
10953 build_int_cst_wide (type,
10954 lo3, hi3)),
10955 arg1);
10958 /* (X & Y) | Y is (X, Y). */
10959 if (TREE_CODE (arg0) == BIT_AND_EXPR
10960 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10961 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10962 /* (X & Y) | X is (Y, X). */
10963 if (TREE_CODE (arg0) == BIT_AND_EXPR
10964 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10965 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10966 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10967 /* X | (X & Y) is (Y, X). */
10968 if (TREE_CODE (arg1) == BIT_AND_EXPR
10969 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10970 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10971 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10972 /* X | (Y & X) is (Y, X). */
10973 if (TREE_CODE (arg1) == BIT_AND_EXPR
10974 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10975 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10976 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10978 t1 = distribute_bit_expr (code, type, arg0, arg1);
10979 if (t1 != NULL_TREE)
10980 return t1;
10982 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10984 This results in more efficient code for machines without a NAND
10985 instruction. Combine will canonicalize to the first form
10986 which will allow use of NAND instructions provided by the
10987 backend if they exist. */
10988 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10989 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10991 return fold_build1 (BIT_NOT_EXPR, type,
10992 build2 (BIT_AND_EXPR, type,
10993 fold_convert (type,
10994 TREE_OPERAND (arg0, 0)),
10995 fold_convert (type,
10996 TREE_OPERAND (arg1, 0))));
10999 /* See if this can be simplified into a rotate first. If that
11000 is unsuccessful continue in the association code. */
11001 goto bit_rotate;
11003 case BIT_XOR_EXPR:
11004 if (integer_zerop (arg1))
11005 return non_lvalue (fold_convert (type, arg0));
11006 if (integer_all_onesp (arg1))
11007 return fold_build1 (BIT_NOT_EXPR, type, op0);
11008 if (operand_equal_p (arg0, arg1, 0))
11009 return omit_one_operand (type, integer_zero_node, arg0);
11011 /* ~X ^ X is -1. */
11012 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11013 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11015 t1 = fold_convert (type, integer_zero_node);
11016 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11017 return omit_one_operand (type, t1, arg1);
11020 /* X ^ ~X is -1. */
11021 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11022 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11024 t1 = fold_convert (type, integer_zero_node);
11025 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
11026 return omit_one_operand (type, t1, arg0);
11029 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11030 with a constant, and the two constants have no bits in common,
11031 we should treat this as a BIT_IOR_EXPR since this may produce more
11032 simplifications. */
11033 if (TREE_CODE (arg0) == BIT_AND_EXPR
11034 && TREE_CODE (arg1) == BIT_AND_EXPR
11035 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11036 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11037 && integer_zerop (const_binop (BIT_AND_EXPR,
11038 TREE_OPERAND (arg0, 1),
11039 TREE_OPERAND (arg1, 1), 0)))
11041 code = BIT_IOR_EXPR;
11042 goto bit_ior;
11045 /* (X | Y) ^ X -> Y & ~ X*/
11046 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11047 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11049 tree t2 = TREE_OPERAND (arg0, 1);
11050 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11051 arg1);
11052 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11053 fold_convert (type, t1));
11054 return t1;
11057 /* (Y | X) ^ X -> Y & ~ X*/
11058 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11059 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11061 tree t2 = TREE_OPERAND (arg0, 0);
11062 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11063 arg1);
11064 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11065 fold_convert (type, t1));
11066 return t1;
11069 /* X ^ (X | Y) -> Y & ~ X*/
11070 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11071 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11073 tree t2 = TREE_OPERAND (arg1, 1);
11074 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11075 arg0);
11076 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11077 fold_convert (type, t1));
11078 return t1;
11081 /* X ^ (Y | X) -> Y & ~ X*/
11082 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11083 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11085 tree t2 = TREE_OPERAND (arg1, 0);
11086 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11087 arg0);
11088 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11089 fold_convert (type, t1));
11090 return t1;
11093 /* Convert ~X ^ ~Y to X ^ Y. */
11094 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11095 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11096 return fold_build2 (code, type,
11097 fold_convert (type, TREE_OPERAND (arg0, 0)),
11098 fold_convert (type, TREE_OPERAND (arg1, 0)));
11100 /* Convert ~X ^ C to X ^ ~C. */
11101 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11102 && TREE_CODE (arg1) == INTEGER_CST)
11103 return fold_build2 (code, type,
11104 fold_convert (type, TREE_OPERAND (arg0, 0)),
11105 fold_build1 (BIT_NOT_EXPR, type, arg1));
11107 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11108 if (TREE_CODE (arg0) == BIT_AND_EXPR
11109 && integer_onep (TREE_OPERAND (arg0, 1))
11110 && integer_onep (arg1))
11111 return fold_build2 (EQ_EXPR, type, arg0,
11112 build_int_cst (TREE_TYPE (arg0), 0));
11114 /* Fold (X & Y) ^ Y as ~X & Y. */
11115 if (TREE_CODE (arg0) == BIT_AND_EXPR
11116 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11118 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11119 return fold_build2 (BIT_AND_EXPR, type,
11120 fold_build1 (BIT_NOT_EXPR, type, tem),
11121 fold_convert (type, arg1));
11123 /* Fold (X & Y) ^ X as ~Y & X. */
11124 if (TREE_CODE (arg0) == BIT_AND_EXPR
11125 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11126 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11128 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11129 return fold_build2 (BIT_AND_EXPR, type,
11130 fold_build1 (BIT_NOT_EXPR, type, tem),
11131 fold_convert (type, arg1));
11133 /* Fold X ^ (X & Y) as X & ~Y. */
11134 if (TREE_CODE (arg1) == BIT_AND_EXPR
11135 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11137 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11138 return fold_build2 (BIT_AND_EXPR, type,
11139 fold_convert (type, arg0),
11140 fold_build1 (BIT_NOT_EXPR, type, tem));
11142 /* Fold X ^ (Y & X) as ~Y & X. */
11143 if (TREE_CODE (arg1) == BIT_AND_EXPR
11144 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11145 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11147 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11148 return fold_build2 (BIT_AND_EXPR, type,
11149 fold_build1 (BIT_NOT_EXPR, type, tem),
11150 fold_convert (type, arg0));
11153 /* See if this can be simplified into a rotate first. If that
11154 is unsuccessful continue in the association code. */
11155 goto bit_rotate;
11157 case BIT_AND_EXPR:
11158 if (integer_all_onesp (arg1))
11159 return non_lvalue (fold_convert (type, arg0));
11160 if (integer_zerop (arg1))
11161 return omit_one_operand (type, arg1, arg0);
11162 if (operand_equal_p (arg0, arg1, 0))
11163 return non_lvalue (fold_convert (type, arg0));
11165 /* ~X & X is always zero. */
11166 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11167 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11168 return omit_one_operand (type, integer_zero_node, arg1);
11170 /* X & ~X is always zero. */
11171 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11172 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11173 return omit_one_operand (type, integer_zero_node, arg0);
11175 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11176 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11177 && TREE_CODE (arg1) == INTEGER_CST
11178 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11180 tree tmp1 = fold_convert (type, arg1);
11181 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11182 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11183 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11184 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11185 return fold_convert (type,
11186 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11189 /* (X | Y) & Y is (X, Y). */
11190 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11191 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11192 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11193 /* (X | Y) & X is (Y, X). */
11194 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11195 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11196 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11197 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11198 /* X & (X | Y) is (Y, X). */
11199 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11200 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11201 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11202 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11203 /* X & (Y | X) is (Y, X). */
11204 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11205 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11206 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11207 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11209 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11210 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11211 && integer_onep (TREE_OPERAND (arg0, 1))
11212 && integer_onep (arg1))
11214 tem = TREE_OPERAND (arg0, 0);
11215 return fold_build2 (EQ_EXPR, type,
11216 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11217 build_int_cst (TREE_TYPE (tem), 1)),
11218 build_int_cst (TREE_TYPE (tem), 0));
11220 /* Fold ~X & 1 as (X & 1) == 0. */
11221 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11222 && integer_onep (arg1))
11224 tem = TREE_OPERAND (arg0, 0);
11225 return fold_build2 (EQ_EXPR, type,
11226 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11227 build_int_cst (TREE_TYPE (tem), 1)),
11228 build_int_cst (TREE_TYPE (tem), 0));
11231 /* Fold (X ^ Y) & Y as ~X & Y. */
11232 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11233 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11235 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11236 return fold_build2 (BIT_AND_EXPR, type,
11237 fold_build1 (BIT_NOT_EXPR, type, tem),
11238 fold_convert (type, arg1));
11240 /* Fold (X ^ Y) & X as ~Y & X. */
11241 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11242 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11243 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11245 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11246 return fold_build2 (BIT_AND_EXPR, type,
11247 fold_build1 (BIT_NOT_EXPR, type, tem),
11248 fold_convert (type, arg1));
11250 /* Fold X & (X ^ Y) as X & ~Y. */
11251 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11252 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11254 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11255 return fold_build2 (BIT_AND_EXPR, type,
11256 fold_convert (type, arg0),
11257 fold_build1 (BIT_NOT_EXPR, type, tem));
11259 /* Fold X & (Y ^ X) as ~Y & X. */
11260 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11261 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11262 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11264 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11265 return fold_build2 (BIT_AND_EXPR, type,
11266 fold_build1 (BIT_NOT_EXPR, type, tem),
11267 fold_convert (type, arg0));
11270 t1 = distribute_bit_expr (code, type, arg0, arg1);
11271 if (t1 != NULL_TREE)
11272 return t1;
11273 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11274 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11275 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11277 unsigned int prec
11278 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11280 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11281 && (~TREE_INT_CST_LOW (arg1)
11282 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11283 return fold_convert (type, TREE_OPERAND (arg0, 0));
11286 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11288 This results in more efficient code for machines without a NOR
11289 instruction. Combine will canonicalize to the first form
11290 which will allow use of NOR instructions provided by the
11291 backend if they exist. */
11292 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11293 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11295 return fold_build1 (BIT_NOT_EXPR, type,
11296 build2 (BIT_IOR_EXPR, type,
11297 fold_convert (type,
11298 TREE_OPERAND (arg0, 0)),
11299 fold_convert (type,
11300 TREE_OPERAND (arg1, 0))));
11303 /* If arg0 is derived from the address of an object or function, we may
11304 be able to fold this expression using the object or function's
11305 alignment. */
11306 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11308 unsigned HOST_WIDE_INT modulus, residue;
11309 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11311 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11313 /* This works because modulus is a power of 2. If this weren't the
11314 case, we'd have to replace it by its greatest power-of-2
11315 divisor: modulus & -modulus. */
11316 if (low < modulus)
11317 return build_int_cst (type, residue & low);
11320 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11321 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11322 if the new mask might be further optimized. */
11323 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11324 || TREE_CODE (arg0) == RSHIFT_EXPR)
11325 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11326 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11327 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11328 < TYPE_PRECISION (TREE_TYPE (arg0))
11329 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11330 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11332 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11333 unsigned HOST_WIDE_INT mask
11334 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11335 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11336 tree shift_type = TREE_TYPE (arg0);
11338 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11339 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11340 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11341 && TYPE_PRECISION (TREE_TYPE (arg0))
11342 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11344 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11345 tree arg00 = TREE_OPERAND (arg0, 0);
11346 /* See if more bits can be proven as zero because of
11347 zero extension. */
11348 if (TREE_CODE (arg00) == NOP_EXPR
11349 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11351 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11352 if (TYPE_PRECISION (inner_type)
11353 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11354 && TYPE_PRECISION (inner_type) < prec)
11356 prec = TYPE_PRECISION (inner_type);
11357 /* See if we can shorten the right shift. */
11358 if (shiftc < prec)
11359 shift_type = inner_type;
11362 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11363 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11364 zerobits <<= prec - shiftc;
11365 /* For arithmetic shift if sign bit could be set, zerobits
11366 can contain actually sign bits, so no transformation is
11367 possible, unless MASK masks them all away. In that
11368 case the shift needs to be converted into logical shift. */
11369 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11370 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11372 if ((mask & zerobits) == 0)
11373 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11374 else
11375 zerobits = 0;
11379 /* ((X << 16) & 0xff00) is (X, 0). */
11380 if ((mask & zerobits) == mask)
11381 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11383 newmask = mask | zerobits;
11384 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11386 unsigned int prec;
11388 /* Only do the transformation if NEWMASK is some integer
11389 mode's mask. */
11390 for (prec = BITS_PER_UNIT;
11391 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11392 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11393 break;
11394 if (prec < HOST_BITS_PER_WIDE_INT
11395 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11397 if (shift_type != TREE_TYPE (arg0))
11399 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11400 fold_convert (shift_type,
11401 TREE_OPERAND (arg0, 0)),
11402 TREE_OPERAND (arg0, 1));
11403 tem = fold_convert (type, tem);
11405 else
11406 tem = op0;
11407 return fold_build2 (BIT_AND_EXPR, type, tem,
11408 build_int_cst_type (TREE_TYPE (op1),
11409 newmask));
11414 goto associate;
11416 case RDIV_EXPR:
11417 /* Don't touch a floating-point divide by zero unless the mode
11418 of the constant can represent infinity. */
11419 if (TREE_CODE (arg1) == REAL_CST
11420 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11421 && real_zerop (arg1))
11422 return NULL_TREE;
11424 /* Optimize A / A to 1.0 if we don't care about
11425 NaNs or Infinities. Skip the transformation
11426 for non-real operands. */
11427 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11428 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11429 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11430 && operand_equal_p (arg0, arg1, 0))
11432 tree r = build_real (TREE_TYPE (arg0), dconst1);
11434 return omit_two_operands (type, r, arg0, arg1);
11437 /* The complex version of the above A / A optimization. */
11438 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11439 && operand_equal_p (arg0, arg1, 0))
11441 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11442 if (! HONOR_NANS (TYPE_MODE (elem_type))
11443 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11445 tree r = build_real (elem_type, dconst1);
11446 /* omit_two_operands will call fold_convert for us. */
11447 return omit_two_operands (type, r, arg0, arg1);
11451 /* (-A) / (-B) -> A / B */
11452 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11453 return fold_build2 (RDIV_EXPR, type,
11454 TREE_OPERAND (arg0, 0),
11455 negate_expr (arg1));
11456 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11457 return fold_build2 (RDIV_EXPR, type,
11458 negate_expr (arg0),
11459 TREE_OPERAND (arg1, 0));
11461 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11462 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11463 && real_onep (arg1))
11464 return non_lvalue (fold_convert (type, arg0));
11466 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11467 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11468 && real_minus_onep (arg1))
11469 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11471 /* If ARG1 is a constant, we can convert this to a multiply by the
11472 reciprocal. This does not have the same rounding properties,
11473 so only do this if -freciprocal-math. We can actually
11474 always safely do it if ARG1 is a power of two, but it's hard to
11475 tell if it is or not in a portable manner. */
11476 if (TREE_CODE (arg1) == REAL_CST)
11478 if (flag_reciprocal_math
11479 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11480 arg1, 0)))
11481 return fold_build2 (MULT_EXPR, type, arg0, tem);
11482 /* Find the reciprocal if optimizing and the result is exact. */
11483 if (optimize)
11485 REAL_VALUE_TYPE r;
11486 r = TREE_REAL_CST (arg1);
11487 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11489 tem = build_real (type, r);
11490 return fold_build2 (MULT_EXPR, type,
11491 fold_convert (type, arg0), tem);
11495 /* Convert A/B/C to A/(B*C). */
11496 if (flag_reciprocal_math
11497 && TREE_CODE (arg0) == RDIV_EXPR)
11498 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11499 fold_build2 (MULT_EXPR, type,
11500 TREE_OPERAND (arg0, 1), arg1));
11502 /* Convert A/(B/C) to (A/B)*C. */
11503 if (flag_reciprocal_math
11504 && TREE_CODE (arg1) == RDIV_EXPR)
11505 return fold_build2 (MULT_EXPR, type,
11506 fold_build2 (RDIV_EXPR, type, arg0,
11507 TREE_OPERAND (arg1, 0)),
11508 TREE_OPERAND (arg1, 1));
11510 /* Convert C1/(X*C2) into (C1/C2)/X. */
11511 if (flag_reciprocal_math
11512 && TREE_CODE (arg1) == MULT_EXPR
11513 && TREE_CODE (arg0) == REAL_CST
11514 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11516 tree tem = const_binop (RDIV_EXPR, arg0,
11517 TREE_OPERAND (arg1, 1), 0);
11518 if (tem)
11519 return fold_build2 (RDIV_EXPR, type, tem,
11520 TREE_OPERAND (arg1, 0));
11523 if (flag_unsafe_math_optimizations)
11525 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11526 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11528 /* Optimize sin(x)/cos(x) as tan(x). */
11529 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11530 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11531 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11532 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11533 CALL_EXPR_ARG (arg1, 0), 0))
11535 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11537 if (tanfn != NULL_TREE)
11538 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11541 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11542 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11543 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11544 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11545 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11546 CALL_EXPR_ARG (arg1, 0), 0))
11548 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11550 if (tanfn != NULL_TREE)
11552 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11553 return fold_build2 (RDIV_EXPR, type,
11554 build_real (type, dconst1), tmp);
11558 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11559 NaNs or Infinities. */
11560 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11561 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11562 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11564 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11565 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11567 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11568 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11569 && operand_equal_p (arg00, arg01, 0))
11571 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11573 if (cosfn != NULL_TREE)
11574 return build_call_expr (cosfn, 1, arg00);
11578 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11579 NaNs or Infinities. */
11580 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11581 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11582 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11584 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11585 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11587 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11588 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11589 && operand_equal_p (arg00, arg01, 0))
11591 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11593 if (cosfn != NULL_TREE)
11595 tree tmp = build_call_expr (cosfn, 1, arg00);
11596 return fold_build2 (RDIV_EXPR, type,
11597 build_real (type, dconst1),
11598 tmp);
11603 /* Optimize pow(x,c)/x as pow(x,c-1). */
11604 if (fcode0 == BUILT_IN_POW
11605 || fcode0 == BUILT_IN_POWF
11606 || fcode0 == BUILT_IN_POWL)
11608 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11609 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11610 if (TREE_CODE (arg01) == REAL_CST
11611 && !TREE_OVERFLOW (arg01)
11612 && operand_equal_p (arg1, arg00, 0))
11614 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11615 REAL_VALUE_TYPE c;
11616 tree arg;
11618 c = TREE_REAL_CST (arg01);
11619 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11620 arg = build_real (type, c);
11621 return build_call_expr (powfn, 2, arg1, arg);
11625 /* Optimize a/root(b/c) into a*root(c/b). */
11626 if (BUILTIN_ROOT_P (fcode1))
11628 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11630 if (TREE_CODE (rootarg) == RDIV_EXPR)
11632 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11633 tree b = TREE_OPERAND (rootarg, 0);
11634 tree c = TREE_OPERAND (rootarg, 1);
11636 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11638 tmp = build_call_expr (rootfn, 1, tmp);
11639 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11643 /* Optimize x/expN(y) into x*expN(-y). */
11644 if (BUILTIN_EXPONENT_P (fcode1))
11646 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11647 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11648 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11649 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11652 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11653 if (fcode1 == BUILT_IN_POW
11654 || fcode1 == BUILT_IN_POWF
11655 || fcode1 == BUILT_IN_POWL)
11657 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11658 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11659 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11660 tree neg11 = fold_convert (type, negate_expr (arg11));
11661 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11662 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11665 return NULL_TREE;
11667 case TRUNC_DIV_EXPR:
11668 case FLOOR_DIV_EXPR:
11669 /* Simplify A / (B << N) where A and B are positive and B is
11670 a power of 2, to A >> (N + log2(B)). */
11671 strict_overflow_p = false;
11672 if (TREE_CODE (arg1) == LSHIFT_EXPR
11673 && (TYPE_UNSIGNED (type)
11674 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11676 tree sval = TREE_OPERAND (arg1, 0);
11677 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11679 tree sh_cnt = TREE_OPERAND (arg1, 1);
11680 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11682 if (strict_overflow_p)
11683 fold_overflow_warning (("assuming signed overflow does not "
11684 "occur when simplifying A / (B << N)"),
11685 WARN_STRICT_OVERFLOW_MISC);
11687 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11688 sh_cnt, build_int_cst (NULL_TREE, pow2));
11689 return fold_build2 (RSHIFT_EXPR, type,
11690 fold_convert (type, arg0), sh_cnt);
11694 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11695 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11696 if (INTEGRAL_TYPE_P (type)
11697 && TYPE_UNSIGNED (type)
11698 && code == FLOOR_DIV_EXPR)
11699 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11701 /* Fall thru */
11703 case ROUND_DIV_EXPR:
11704 case CEIL_DIV_EXPR:
11705 case EXACT_DIV_EXPR:
11706 if (integer_onep (arg1))
11707 return non_lvalue (fold_convert (type, arg0));
11708 if (integer_zerop (arg1))
11709 return NULL_TREE;
11710 /* X / -1 is -X. */
11711 if (!TYPE_UNSIGNED (type)
11712 && TREE_CODE (arg1) == INTEGER_CST
11713 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11714 && TREE_INT_CST_HIGH (arg1) == -1)
11715 return fold_convert (type, negate_expr (arg0));
11717 /* Convert -A / -B to A / B when the type is signed and overflow is
11718 undefined. */
11719 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11720 && TREE_CODE (arg0) == NEGATE_EXPR
11721 && negate_expr_p (arg1))
11723 if (INTEGRAL_TYPE_P (type))
11724 fold_overflow_warning (("assuming signed overflow does not occur "
11725 "when distributing negation across "
11726 "division"),
11727 WARN_STRICT_OVERFLOW_MISC);
11728 return fold_build2 (code, type,
11729 fold_convert (type, TREE_OPERAND (arg0, 0)),
11730 fold_convert (type, negate_expr (arg1)));
11732 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11733 && TREE_CODE (arg1) == NEGATE_EXPR
11734 && negate_expr_p (arg0))
11736 if (INTEGRAL_TYPE_P (type))
11737 fold_overflow_warning (("assuming signed overflow does not occur "
11738 "when distributing negation across "
11739 "division"),
11740 WARN_STRICT_OVERFLOW_MISC);
11741 return fold_build2 (code, type,
11742 fold_convert (type, negate_expr (arg0)),
11743 fold_convert (type, TREE_OPERAND (arg1, 0)));
11746 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11747 operation, EXACT_DIV_EXPR.
11749 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11750 At one time others generated faster code, it's not clear if they do
11751 after the last round to changes to the DIV code in expmed.c. */
11752 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11753 && multiple_of_p (type, arg0, arg1))
11754 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11756 strict_overflow_p = false;
11757 if (TREE_CODE (arg1) == INTEGER_CST
11758 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11759 &strict_overflow_p)))
11761 if (strict_overflow_p)
11762 fold_overflow_warning (("assuming signed overflow does not occur "
11763 "when simplifying division"),
11764 WARN_STRICT_OVERFLOW_MISC);
11765 return fold_convert (type, tem);
11768 return NULL_TREE;
11770 case CEIL_MOD_EXPR:
11771 case FLOOR_MOD_EXPR:
11772 case ROUND_MOD_EXPR:
11773 case TRUNC_MOD_EXPR:
11774 /* X % 1 is always zero, but be sure to preserve any side
11775 effects in X. */
11776 if (integer_onep (arg1))
11777 return omit_one_operand (type, integer_zero_node, arg0);
11779 /* X % 0, return X % 0 unchanged so that we can get the
11780 proper warnings and errors. */
11781 if (integer_zerop (arg1))
11782 return NULL_TREE;
11784 /* 0 % X is always zero, but be sure to preserve any side
11785 effects in X. Place this after checking for X == 0. */
11786 if (integer_zerop (arg0))
11787 return omit_one_operand (type, integer_zero_node, arg1);
11789 /* X % -1 is zero. */
11790 if (!TYPE_UNSIGNED (type)
11791 && TREE_CODE (arg1) == INTEGER_CST
11792 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11793 && TREE_INT_CST_HIGH (arg1) == -1)
11794 return omit_one_operand (type, integer_zero_node, arg0);
11796 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11797 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11798 strict_overflow_p = false;
11799 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11800 && (TYPE_UNSIGNED (type)
11801 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11803 tree c = arg1;
11804 /* Also optimize A % (C << N) where C is a power of 2,
11805 to A & ((C << N) - 1). */
11806 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11807 c = TREE_OPERAND (arg1, 0);
11809 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11811 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11812 build_int_cst (TREE_TYPE (arg1), 1));
11813 if (strict_overflow_p)
11814 fold_overflow_warning (("assuming signed overflow does not "
11815 "occur when simplifying "
11816 "X % (power of two)"),
11817 WARN_STRICT_OVERFLOW_MISC);
11818 return fold_build2 (BIT_AND_EXPR, type,
11819 fold_convert (type, arg0),
11820 fold_convert (type, mask));
11824 /* X % -C is the same as X % C. */
11825 if (code == TRUNC_MOD_EXPR
11826 && !TYPE_UNSIGNED (type)
11827 && TREE_CODE (arg1) == INTEGER_CST
11828 && !TREE_OVERFLOW (arg1)
11829 && TREE_INT_CST_HIGH (arg1) < 0
11830 && !TYPE_OVERFLOW_TRAPS (type)
11831 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11832 && !sign_bit_p (arg1, arg1))
11833 return fold_build2 (code, type, fold_convert (type, arg0),
11834 fold_convert (type, negate_expr (arg1)));
11836 /* X % -Y is the same as X % Y. */
11837 if (code == TRUNC_MOD_EXPR
11838 && !TYPE_UNSIGNED (type)
11839 && TREE_CODE (arg1) == NEGATE_EXPR
11840 && !TYPE_OVERFLOW_TRAPS (type))
11841 return fold_build2 (code, type, fold_convert (type, arg0),
11842 fold_convert (type, TREE_OPERAND (arg1, 0)));
11844 if (TREE_CODE (arg1) == INTEGER_CST
11845 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11846 &strict_overflow_p)))
11848 if (strict_overflow_p)
11849 fold_overflow_warning (("assuming signed overflow does not occur "
11850 "when simplifying modulus"),
11851 WARN_STRICT_OVERFLOW_MISC);
11852 return fold_convert (type, tem);
11855 return NULL_TREE;
11857 case LROTATE_EXPR:
11858 case RROTATE_EXPR:
11859 if (integer_all_onesp (arg0))
11860 return omit_one_operand (type, arg0, arg1);
11861 goto shift;
11863 case RSHIFT_EXPR:
11864 /* Optimize -1 >> x for arithmetic right shifts. */
11865 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11866 && tree_expr_nonnegative_p (arg1))
11867 return omit_one_operand (type, arg0, arg1);
11868 /* ... fall through ... */
11870 case LSHIFT_EXPR:
11871 shift:
11872 if (integer_zerop (arg1))
11873 return non_lvalue (fold_convert (type, arg0));
11874 if (integer_zerop (arg0))
11875 return omit_one_operand (type, arg0, arg1);
11877 /* Since negative shift count is not well-defined,
11878 don't try to compute it in the compiler. */
11879 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11880 return NULL_TREE;
11882 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11883 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11884 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11885 && host_integerp (TREE_OPERAND (arg0, 1), false)
11886 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11888 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11889 + TREE_INT_CST_LOW (arg1));
11891 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11892 being well defined. */
11893 if (low >= TYPE_PRECISION (type))
11895 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11896 low = low % TYPE_PRECISION (type);
11897 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11898 return build_int_cst (type, 0);
11899 else
11900 low = TYPE_PRECISION (type) - 1;
11903 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11904 build_int_cst (type, low));
11907 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11908 into x & ((unsigned)-1 >> c) for unsigned types. */
11909 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11910 || (TYPE_UNSIGNED (type)
11911 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11912 && host_integerp (arg1, false)
11913 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11914 && host_integerp (TREE_OPERAND (arg0, 1), false)
11915 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11917 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11918 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11919 tree lshift;
11920 tree arg00;
11922 if (low0 == low1)
11924 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11926 lshift = build_int_cst (type, -1);
11927 lshift = int_const_binop (code, lshift, arg1, 0);
11929 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11933 /* Rewrite an LROTATE_EXPR by a constant into an
11934 RROTATE_EXPR by a new constant. */
11935 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11937 tree tem = build_int_cst (TREE_TYPE (arg1),
11938 TYPE_PRECISION (type));
11939 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11940 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11943 /* If we have a rotate of a bit operation with the rotate count and
11944 the second operand of the bit operation both constant,
11945 permute the two operations. */
11946 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11947 && (TREE_CODE (arg0) == BIT_AND_EXPR
11948 || TREE_CODE (arg0) == BIT_IOR_EXPR
11949 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11950 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11951 return fold_build2 (TREE_CODE (arg0), type,
11952 fold_build2 (code, type,
11953 TREE_OPERAND (arg0, 0), arg1),
11954 fold_build2 (code, type,
11955 TREE_OPERAND (arg0, 1), arg1));
11957 /* Two consecutive rotates adding up to the precision of the
11958 type can be ignored. */
11959 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11960 && TREE_CODE (arg0) == RROTATE_EXPR
11961 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11962 && TREE_INT_CST_HIGH (arg1) == 0
11963 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11964 && ((TREE_INT_CST_LOW (arg1)
11965 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11966 == (unsigned int) TYPE_PRECISION (type)))
11967 return TREE_OPERAND (arg0, 0);
11969 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11970 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11971 if the latter can be further optimized. */
11972 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11973 && TREE_CODE (arg0) == BIT_AND_EXPR
11974 && TREE_CODE (arg1) == INTEGER_CST
11975 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11977 tree mask = fold_build2 (code, type,
11978 fold_convert (type, TREE_OPERAND (arg0, 1)),
11979 arg1);
11980 tree shift = fold_build2 (code, type,
11981 fold_convert (type, TREE_OPERAND (arg0, 0)),
11982 arg1);
11983 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11984 if (tem)
11985 return tem;
11988 return NULL_TREE;
11990 case MIN_EXPR:
11991 if (operand_equal_p (arg0, arg1, 0))
11992 return omit_one_operand (type, arg0, arg1);
11993 if (INTEGRAL_TYPE_P (type)
11994 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11995 return omit_one_operand (type, arg1, arg0);
11996 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11997 if (tem)
11998 return tem;
11999 goto associate;
12001 case MAX_EXPR:
12002 if (operand_equal_p (arg0, arg1, 0))
12003 return omit_one_operand (type, arg0, arg1);
12004 if (INTEGRAL_TYPE_P (type)
12005 && TYPE_MAX_VALUE (type)
12006 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12007 return omit_one_operand (type, arg1, arg0);
12008 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
12009 if (tem)
12010 return tem;
12011 goto associate;
12013 case TRUTH_ANDIF_EXPR:
12014 /* Note that the operands of this must be ints
12015 and their values must be 0 or 1.
12016 ("true" is a fixed value perhaps depending on the language.) */
12017 /* If first arg is constant zero, return it. */
12018 if (integer_zerop (arg0))
12019 return fold_convert (type, arg0);
12020 case TRUTH_AND_EXPR:
12021 /* If either arg is constant true, drop it. */
12022 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12023 return non_lvalue (fold_convert (type, arg1));
12024 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12025 /* Preserve sequence points. */
12026 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12027 return non_lvalue (fold_convert (type, arg0));
12028 /* If second arg is constant zero, result is zero, but first arg
12029 must be evaluated. */
12030 if (integer_zerop (arg1))
12031 return omit_one_operand (type, arg1, arg0);
12032 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12033 case will be handled here. */
12034 if (integer_zerop (arg0))
12035 return omit_one_operand (type, arg0, arg1);
12037 /* !X && X is always false. */
12038 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12039 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12040 return omit_one_operand (type, integer_zero_node, arg1);
12041 /* X && !X is always false. */
12042 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12043 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12044 return omit_one_operand (type, integer_zero_node, arg0);
12046 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12047 means A >= Y && A != MAX, but in this case we know that
12048 A < X <= MAX. */
12050 if (!TREE_SIDE_EFFECTS (arg0)
12051 && !TREE_SIDE_EFFECTS (arg1))
12053 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12054 if (tem && !operand_equal_p (tem, arg0, 0))
12055 return fold_build2 (code, type, tem, arg1);
12057 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12058 if (tem && !operand_equal_p (tem, arg1, 0))
12059 return fold_build2 (code, type, arg0, tem);
12062 truth_andor:
12063 /* We only do these simplifications if we are optimizing. */
12064 if (!optimize)
12065 return NULL_TREE;
12067 /* Check for things like (A || B) && (A || C). We can convert this
12068 to A || (B && C). Note that either operator can be any of the four
12069 truth and/or operations and the transformation will still be
12070 valid. Also note that we only care about order for the
12071 ANDIF and ORIF operators. If B contains side effects, this
12072 might change the truth-value of A. */
12073 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12074 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12075 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12076 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12077 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12078 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12080 tree a00 = TREE_OPERAND (arg0, 0);
12081 tree a01 = TREE_OPERAND (arg0, 1);
12082 tree a10 = TREE_OPERAND (arg1, 0);
12083 tree a11 = TREE_OPERAND (arg1, 1);
12084 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12085 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12086 && (code == TRUTH_AND_EXPR
12087 || code == TRUTH_OR_EXPR));
12089 if (operand_equal_p (a00, a10, 0))
12090 return fold_build2 (TREE_CODE (arg0), type, a00,
12091 fold_build2 (code, type, a01, a11));
12092 else if (commutative && operand_equal_p (a00, a11, 0))
12093 return fold_build2 (TREE_CODE (arg0), type, a00,
12094 fold_build2 (code, type, a01, a10));
12095 else if (commutative && operand_equal_p (a01, a10, 0))
12096 return fold_build2 (TREE_CODE (arg0), type, a01,
12097 fold_build2 (code, type, a00, a11));
12099 /* This case if tricky because we must either have commutative
12100 operators or else A10 must not have side-effects. */
12102 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12103 && operand_equal_p (a01, a11, 0))
12104 return fold_build2 (TREE_CODE (arg0), type,
12105 fold_build2 (code, type, a00, a10),
12106 a01);
12109 /* See if we can build a range comparison. */
12110 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12111 return tem;
12113 /* Check for the possibility of merging component references. If our
12114 lhs is another similar operation, try to merge its rhs with our
12115 rhs. Then try to merge our lhs and rhs. */
12116 if (TREE_CODE (arg0) == code
12117 && 0 != (tem = fold_truthop (code, type,
12118 TREE_OPERAND (arg0, 1), arg1)))
12119 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12121 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12122 return tem;
12124 return NULL_TREE;
12126 case TRUTH_ORIF_EXPR:
12127 /* Note that the operands of this must be ints
12128 and their values must be 0 or true.
12129 ("true" is a fixed value perhaps depending on the language.) */
12130 /* If first arg is constant true, return it. */
12131 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12132 return fold_convert (type, arg0);
12133 case TRUTH_OR_EXPR:
12134 /* If either arg is constant zero, drop it. */
12135 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12136 return non_lvalue (fold_convert (type, arg1));
12137 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12138 /* Preserve sequence points. */
12139 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12140 return non_lvalue (fold_convert (type, arg0));
12141 /* If second arg is constant true, result is true, but we must
12142 evaluate first arg. */
12143 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12144 return omit_one_operand (type, arg1, arg0);
12145 /* Likewise for first arg, but note this only occurs here for
12146 TRUTH_OR_EXPR. */
12147 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12148 return omit_one_operand (type, arg0, arg1);
12150 /* !X || X is always true. */
12151 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12152 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12153 return omit_one_operand (type, integer_one_node, arg1);
12154 /* X || !X is always true. */
12155 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12156 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12157 return omit_one_operand (type, integer_one_node, arg0);
12159 goto truth_andor;
12161 case TRUTH_XOR_EXPR:
12162 /* If the second arg is constant zero, drop it. */
12163 if (integer_zerop (arg1))
12164 return non_lvalue (fold_convert (type, arg0));
12165 /* If the second arg is constant true, this is a logical inversion. */
12166 if (integer_onep (arg1))
12168 /* Only call invert_truthvalue if operand is a truth value. */
12169 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12170 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12171 else
12172 tem = invert_truthvalue (arg0);
12173 return non_lvalue (fold_convert (type, tem));
12175 /* Identical arguments cancel to zero. */
12176 if (operand_equal_p (arg0, arg1, 0))
12177 return omit_one_operand (type, integer_zero_node, arg0);
12179 /* !X ^ X is always true. */
12180 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12181 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12182 return omit_one_operand (type, integer_one_node, arg1);
12184 /* X ^ !X is always true. */
12185 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12186 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12187 return omit_one_operand (type, integer_one_node, arg0);
12189 return NULL_TREE;
12191 case EQ_EXPR:
12192 case NE_EXPR:
12193 tem = fold_comparison (code, type, op0, op1);
12194 if (tem != NULL_TREE)
12195 return tem;
12197 /* bool_var != 0 becomes bool_var. */
12198 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12199 && code == NE_EXPR)
12200 return non_lvalue (fold_convert (type, arg0));
12202 /* bool_var == 1 becomes bool_var. */
12203 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12204 && code == EQ_EXPR)
12205 return non_lvalue (fold_convert (type, arg0));
12207 /* bool_var != 1 becomes !bool_var. */
12208 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12209 && code == NE_EXPR)
12210 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12212 /* bool_var == 0 becomes !bool_var. */
12213 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12214 && code == EQ_EXPR)
12215 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12217 /* If this is an equality comparison of the address of two non-weak,
12218 unaliased symbols neither of which are extern (since we do not
12219 have access to attributes for externs), then we know the result. */
12220 if (TREE_CODE (arg0) == ADDR_EXPR
12221 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12222 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12223 && ! lookup_attribute ("alias",
12224 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12225 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12226 && TREE_CODE (arg1) == ADDR_EXPR
12227 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12228 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12229 && ! lookup_attribute ("alias",
12230 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12231 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12233 /* We know that we're looking at the address of two
12234 non-weak, unaliased, static _DECL nodes.
12236 It is both wasteful and incorrect to call operand_equal_p
12237 to compare the two ADDR_EXPR nodes. It is wasteful in that
12238 all we need to do is test pointer equality for the arguments
12239 to the two ADDR_EXPR nodes. It is incorrect to use
12240 operand_equal_p as that function is NOT equivalent to a
12241 C equality test. It can in fact return false for two
12242 objects which would test as equal using the C equality
12243 operator. */
12244 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12245 return constant_boolean_node (equal
12246 ? code == EQ_EXPR : code != EQ_EXPR,
12247 type);
12250 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12251 a MINUS_EXPR of a constant, we can convert it into a comparison with
12252 a revised constant as long as no overflow occurs. */
12253 if (TREE_CODE (arg1) == INTEGER_CST
12254 && (TREE_CODE (arg0) == PLUS_EXPR
12255 || TREE_CODE (arg0) == MINUS_EXPR)
12256 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12257 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12258 ? MINUS_EXPR : PLUS_EXPR,
12259 fold_convert (TREE_TYPE (arg0), arg1),
12260 TREE_OPERAND (arg0, 1), 0))
12261 && !TREE_OVERFLOW (tem))
12262 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12264 /* Similarly for a NEGATE_EXPR. */
12265 if (TREE_CODE (arg0) == NEGATE_EXPR
12266 && TREE_CODE (arg1) == INTEGER_CST
12267 && 0 != (tem = negate_expr (arg1))
12268 && TREE_CODE (tem) == INTEGER_CST
12269 && !TREE_OVERFLOW (tem))
12270 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12272 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12273 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12274 && TREE_CODE (arg1) == INTEGER_CST
12275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12276 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12277 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12278 fold_convert (TREE_TYPE (arg0), arg1),
12279 TREE_OPERAND (arg0, 1)));
12281 /* Transform comparisons of the form X +- C CMP X. */
12282 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12283 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12285 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12286 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12288 tree cst = TREE_OPERAND (arg0, 1);
12290 if (code == EQ_EXPR
12291 && !integer_zerop (cst))
12292 return omit_two_operands (type, boolean_false_node,
12293 TREE_OPERAND (arg0, 0), arg1);
12294 else
12295 return omit_two_operands (type, boolean_true_node,
12296 TREE_OPERAND (arg0, 0), arg1);
12299 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12300 for !=. Don't do this for ordered comparisons due to overflow. */
12301 if (TREE_CODE (arg0) == MINUS_EXPR
12302 && integer_zerop (arg1))
12303 return fold_build2 (code, type,
12304 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12306 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12307 if (TREE_CODE (arg0) == ABS_EXPR
12308 && (integer_zerop (arg1) || real_zerop (arg1)))
12309 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12311 /* If this is an EQ or NE comparison with zero and ARG0 is
12312 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12313 two operations, but the latter can be done in one less insn
12314 on machines that have only two-operand insns or on which a
12315 constant cannot be the first operand. */
12316 if (TREE_CODE (arg0) == BIT_AND_EXPR
12317 && integer_zerop (arg1))
12319 tree arg00 = TREE_OPERAND (arg0, 0);
12320 tree arg01 = TREE_OPERAND (arg0, 1);
12321 if (TREE_CODE (arg00) == LSHIFT_EXPR
12322 && integer_onep (TREE_OPERAND (arg00, 0)))
12324 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12325 arg01, TREE_OPERAND (arg00, 1));
12326 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12327 build_int_cst (TREE_TYPE (arg0), 1));
12328 return fold_build2 (code, type,
12329 fold_convert (TREE_TYPE (arg1), tem), arg1);
12331 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12332 && integer_onep (TREE_OPERAND (arg01, 0)))
12334 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12335 arg00, TREE_OPERAND (arg01, 1));
12336 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12337 build_int_cst (TREE_TYPE (arg0), 1));
12338 return fold_build2 (code, type,
12339 fold_convert (TREE_TYPE (arg1), tem), arg1);
12343 /* If this is an NE or EQ comparison of zero against the result of a
12344 signed MOD operation whose second operand is a power of 2, make
12345 the MOD operation unsigned since it is simpler and equivalent. */
12346 if (integer_zerop (arg1)
12347 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12348 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12349 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12350 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12351 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12352 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12354 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12355 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12356 fold_convert (newtype,
12357 TREE_OPERAND (arg0, 0)),
12358 fold_convert (newtype,
12359 TREE_OPERAND (arg0, 1)));
12361 return fold_build2 (code, type, newmod,
12362 fold_convert (newtype, arg1));
12365 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12366 C1 is a valid shift constant, and C2 is a power of two, i.e.
12367 a single bit. */
12368 if (TREE_CODE (arg0) == BIT_AND_EXPR
12369 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12370 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12371 == INTEGER_CST
12372 && integer_pow2p (TREE_OPERAND (arg0, 1))
12373 && integer_zerop (arg1))
12375 tree itype = TREE_TYPE (arg0);
12376 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12377 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12379 /* Check for a valid shift count. */
12380 if (TREE_INT_CST_HIGH (arg001) == 0
12381 && TREE_INT_CST_LOW (arg001) < prec)
12383 tree arg01 = TREE_OPERAND (arg0, 1);
12384 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12385 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12386 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12387 can be rewritten as (X & (C2 << C1)) != 0. */
12388 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12390 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12391 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12392 return fold_build2 (code, type, tem, arg1);
12394 /* Otherwise, for signed (arithmetic) shifts,
12395 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12396 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12397 else if (!TYPE_UNSIGNED (itype))
12398 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12399 arg000, build_int_cst (itype, 0));
12400 /* Otherwise, of unsigned (logical) shifts,
12401 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12402 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12403 else
12404 return omit_one_operand (type,
12405 code == EQ_EXPR ? integer_one_node
12406 : integer_zero_node,
12407 arg000);
12411 /* If this is an NE comparison of zero with an AND of one, remove the
12412 comparison since the AND will give the correct value. */
12413 if (code == NE_EXPR
12414 && integer_zerop (arg1)
12415 && TREE_CODE (arg0) == BIT_AND_EXPR
12416 && integer_onep (TREE_OPERAND (arg0, 1)))
12417 return fold_convert (type, arg0);
12419 /* If we have (A & C) == C where C is a power of 2, convert this into
12420 (A & C) != 0. Similarly for NE_EXPR. */
12421 if (TREE_CODE (arg0) == BIT_AND_EXPR
12422 && integer_pow2p (TREE_OPERAND (arg0, 1))
12423 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12424 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12425 arg0, fold_convert (TREE_TYPE (arg0),
12426 integer_zero_node));
12428 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12429 bit, then fold the expression into A < 0 or A >= 0. */
12430 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12431 if (tem)
12432 return tem;
12434 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12435 Similarly for NE_EXPR. */
12436 if (TREE_CODE (arg0) == BIT_AND_EXPR
12437 && TREE_CODE (arg1) == INTEGER_CST
12438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12440 tree notc = fold_build1 (BIT_NOT_EXPR,
12441 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12442 TREE_OPERAND (arg0, 1));
12443 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12444 arg1, notc);
12445 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12446 if (integer_nonzerop (dandnotc))
12447 return omit_one_operand (type, rslt, arg0);
12450 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12451 Similarly for NE_EXPR. */
12452 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12453 && TREE_CODE (arg1) == INTEGER_CST
12454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12456 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12457 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12458 TREE_OPERAND (arg0, 1), notd);
12459 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12460 if (integer_nonzerop (candnotd))
12461 return omit_one_operand (type, rslt, arg0);
12464 /* If this is a comparison of a field, we may be able to simplify it. */
12465 if ((TREE_CODE (arg0) == COMPONENT_REF
12466 || TREE_CODE (arg0) == BIT_FIELD_REF)
12467 /* Handle the constant case even without -O
12468 to make sure the warnings are given. */
12469 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12471 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12472 if (t1)
12473 return t1;
12476 /* Optimize comparisons of strlen vs zero to a compare of the
12477 first character of the string vs zero. To wit,
12478 strlen(ptr) == 0 => *ptr == 0
12479 strlen(ptr) != 0 => *ptr != 0
12480 Other cases should reduce to one of these two (or a constant)
12481 due to the return value of strlen being unsigned. */
12482 if (TREE_CODE (arg0) == CALL_EXPR
12483 && integer_zerop (arg1))
12485 tree fndecl = get_callee_fndecl (arg0);
12487 if (fndecl
12488 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12489 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12490 && call_expr_nargs (arg0) == 1
12491 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12493 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12494 return fold_build2 (code, type, iref,
12495 build_int_cst (TREE_TYPE (iref), 0));
12499 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12500 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12501 if (TREE_CODE (arg0) == RSHIFT_EXPR
12502 && integer_zerop (arg1)
12503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12505 tree arg00 = TREE_OPERAND (arg0, 0);
12506 tree arg01 = TREE_OPERAND (arg0, 1);
12507 tree itype = TREE_TYPE (arg00);
12508 if (TREE_INT_CST_HIGH (arg01) == 0
12509 && TREE_INT_CST_LOW (arg01)
12510 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12512 if (TYPE_UNSIGNED (itype))
12514 itype = signed_type_for (itype);
12515 arg00 = fold_convert (itype, arg00);
12517 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12518 type, arg00, build_int_cst (itype, 0));
12522 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12523 if (integer_zerop (arg1)
12524 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12525 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12526 TREE_OPERAND (arg0, 1));
12528 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12529 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12530 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12531 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12532 build_int_cst (TREE_TYPE (arg1), 0));
12533 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12534 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12535 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12536 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12537 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12538 build_int_cst (TREE_TYPE (arg1), 0));
12540 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12541 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12542 && TREE_CODE (arg1) == INTEGER_CST
12543 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12544 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12545 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12546 TREE_OPERAND (arg0, 1), arg1));
12548 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12549 (X & C) == 0 when C is a single bit. */
12550 if (TREE_CODE (arg0) == BIT_AND_EXPR
12551 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12552 && integer_zerop (arg1)
12553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12555 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12556 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12557 TREE_OPERAND (arg0, 1));
12558 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12559 type, tem, arg1);
12562 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12563 constant C is a power of two, i.e. a single bit. */
12564 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12565 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12566 && integer_zerop (arg1)
12567 && integer_pow2p (TREE_OPERAND (arg0, 1))
12568 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12569 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12571 tree arg00 = TREE_OPERAND (arg0, 0);
12572 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12573 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12576 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12577 when is C is a power of two, i.e. a single bit. */
12578 if (TREE_CODE (arg0) == BIT_AND_EXPR
12579 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12580 && integer_zerop (arg1)
12581 && integer_pow2p (TREE_OPERAND (arg0, 1))
12582 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12583 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12585 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12586 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12587 arg000, TREE_OPERAND (arg0, 1));
12588 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12589 tem, build_int_cst (TREE_TYPE (tem), 0));
12592 if (integer_zerop (arg1)
12593 && tree_expr_nonzero_p (arg0))
12595 tree res = constant_boolean_node (code==NE_EXPR, type);
12596 return omit_one_operand (type, res, arg0);
12599 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12600 if (TREE_CODE (arg0) == NEGATE_EXPR
12601 && TREE_CODE (arg1) == NEGATE_EXPR)
12602 return fold_build2 (code, type,
12603 TREE_OPERAND (arg0, 0),
12604 TREE_OPERAND (arg1, 0));
12606 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12607 if (TREE_CODE (arg0) == BIT_AND_EXPR
12608 && TREE_CODE (arg1) == BIT_AND_EXPR)
12610 tree arg00 = TREE_OPERAND (arg0, 0);
12611 tree arg01 = TREE_OPERAND (arg0, 1);
12612 tree arg10 = TREE_OPERAND (arg1, 0);
12613 tree arg11 = TREE_OPERAND (arg1, 1);
12614 tree itype = TREE_TYPE (arg0);
12616 if (operand_equal_p (arg01, arg11, 0))
12617 return fold_build2 (code, type,
12618 fold_build2 (BIT_AND_EXPR, itype,
12619 fold_build2 (BIT_XOR_EXPR, itype,
12620 arg00, arg10),
12621 arg01),
12622 build_int_cst (itype, 0));
12624 if (operand_equal_p (arg01, arg10, 0))
12625 return fold_build2 (code, type,
12626 fold_build2 (BIT_AND_EXPR, itype,
12627 fold_build2 (BIT_XOR_EXPR, itype,
12628 arg00, arg11),
12629 arg01),
12630 build_int_cst (itype, 0));
12632 if (operand_equal_p (arg00, arg11, 0))
12633 return fold_build2 (code, type,
12634 fold_build2 (BIT_AND_EXPR, itype,
12635 fold_build2 (BIT_XOR_EXPR, itype,
12636 arg01, arg10),
12637 arg00),
12638 build_int_cst (itype, 0));
12640 if (operand_equal_p (arg00, arg10, 0))
12641 return fold_build2 (code, type,
12642 fold_build2 (BIT_AND_EXPR, itype,
12643 fold_build2 (BIT_XOR_EXPR, itype,
12644 arg01, arg11),
12645 arg00),
12646 build_int_cst (itype, 0));
12649 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12650 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12652 tree arg00 = TREE_OPERAND (arg0, 0);
12653 tree arg01 = TREE_OPERAND (arg0, 1);
12654 tree arg10 = TREE_OPERAND (arg1, 0);
12655 tree arg11 = TREE_OPERAND (arg1, 1);
12656 tree itype = TREE_TYPE (arg0);
12658 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12659 operand_equal_p guarantees no side-effects so we don't need
12660 to use omit_one_operand on Z. */
12661 if (operand_equal_p (arg01, arg11, 0))
12662 return fold_build2 (code, type, arg00, arg10);
12663 if (operand_equal_p (arg01, arg10, 0))
12664 return fold_build2 (code, type, arg00, arg11);
12665 if (operand_equal_p (arg00, arg11, 0))
12666 return fold_build2 (code, type, arg01, arg10);
12667 if (operand_equal_p (arg00, arg10, 0))
12668 return fold_build2 (code, type, arg01, arg11);
12670 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12671 if (TREE_CODE (arg01) == INTEGER_CST
12672 && TREE_CODE (arg11) == INTEGER_CST)
12673 return fold_build2 (code, type,
12674 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12675 fold_build2 (BIT_XOR_EXPR, itype,
12676 arg01, arg11)),
12677 arg10);
12680 /* Attempt to simplify equality/inequality comparisons of complex
12681 values. Only lower the comparison if the result is known or
12682 can be simplified to a single scalar comparison. */
12683 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12684 || TREE_CODE (arg0) == COMPLEX_CST)
12685 && (TREE_CODE (arg1) == COMPLEX_EXPR
12686 || TREE_CODE (arg1) == COMPLEX_CST))
12688 tree real0, imag0, real1, imag1;
12689 tree rcond, icond;
12691 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12693 real0 = TREE_OPERAND (arg0, 0);
12694 imag0 = TREE_OPERAND (arg0, 1);
12696 else
12698 real0 = TREE_REALPART (arg0);
12699 imag0 = TREE_IMAGPART (arg0);
12702 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12704 real1 = TREE_OPERAND (arg1, 0);
12705 imag1 = TREE_OPERAND (arg1, 1);
12707 else
12709 real1 = TREE_REALPART (arg1);
12710 imag1 = TREE_IMAGPART (arg1);
12713 rcond = fold_binary (code, type, real0, real1);
12714 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12716 if (integer_zerop (rcond))
12718 if (code == EQ_EXPR)
12719 return omit_two_operands (type, boolean_false_node,
12720 imag0, imag1);
12721 return fold_build2 (NE_EXPR, type, imag0, imag1);
12723 else
12725 if (code == NE_EXPR)
12726 return omit_two_operands (type, boolean_true_node,
12727 imag0, imag1);
12728 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12732 icond = fold_binary (code, type, imag0, imag1);
12733 if (icond && TREE_CODE (icond) == INTEGER_CST)
12735 if (integer_zerop (icond))
12737 if (code == EQ_EXPR)
12738 return omit_two_operands (type, boolean_false_node,
12739 real0, real1);
12740 return fold_build2 (NE_EXPR, type, real0, real1);
12742 else
12744 if (code == NE_EXPR)
12745 return omit_two_operands (type, boolean_true_node,
12746 real0, real1);
12747 return fold_build2 (EQ_EXPR, type, real0, real1);
12752 return NULL_TREE;
12754 case LT_EXPR:
12755 case GT_EXPR:
12756 case LE_EXPR:
12757 case GE_EXPR:
12758 tem = fold_comparison (code, type, op0, op1);
12759 if (tem != NULL_TREE)
12760 return tem;
12762 /* Transform comparisons of the form X +- C CMP X. */
12763 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12764 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12765 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12766 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12767 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12768 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12770 tree arg01 = TREE_OPERAND (arg0, 1);
12771 enum tree_code code0 = TREE_CODE (arg0);
12772 int is_positive;
12774 if (TREE_CODE (arg01) == REAL_CST)
12775 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12776 else
12777 is_positive = tree_int_cst_sgn (arg01);
12779 /* (X - c) > X becomes false. */
12780 if (code == GT_EXPR
12781 && ((code0 == MINUS_EXPR && is_positive >= 0)
12782 || (code0 == PLUS_EXPR && is_positive <= 0)))
12784 if (TREE_CODE (arg01) == INTEGER_CST
12785 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12786 fold_overflow_warning (("assuming signed overflow does not "
12787 "occur when assuming that (X - c) > X "
12788 "is always false"),
12789 WARN_STRICT_OVERFLOW_ALL);
12790 return constant_boolean_node (0, type);
12793 /* Likewise (X + c) < X becomes false. */
12794 if (code == LT_EXPR
12795 && ((code0 == PLUS_EXPR && is_positive >= 0)
12796 || (code0 == MINUS_EXPR && is_positive <= 0)))
12798 if (TREE_CODE (arg01) == INTEGER_CST
12799 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12800 fold_overflow_warning (("assuming signed overflow does not "
12801 "occur when assuming that "
12802 "(X + c) < X is always false"),
12803 WARN_STRICT_OVERFLOW_ALL);
12804 return constant_boolean_node (0, type);
12807 /* Convert (X - c) <= X to true. */
12808 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12809 && code == LE_EXPR
12810 && ((code0 == MINUS_EXPR && is_positive >= 0)
12811 || (code0 == PLUS_EXPR && is_positive <= 0)))
12813 if (TREE_CODE (arg01) == INTEGER_CST
12814 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12815 fold_overflow_warning (("assuming signed overflow does not "
12816 "occur when assuming that "
12817 "(X - c) <= X is always true"),
12818 WARN_STRICT_OVERFLOW_ALL);
12819 return constant_boolean_node (1, type);
12822 /* Convert (X + c) >= X to true. */
12823 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12824 && code == GE_EXPR
12825 && ((code0 == PLUS_EXPR && is_positive >= 0)
12826 || (code0 == MINUS_EXPR && is_positive <= 0)))
12828 if (TREE_CODE (arg01) == INTEGER_CST
12829 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12830 fold_overflow_warning (("assuming signed overflow does not "
12831 "occur when assuming that "
12832 "(X + c) >= X is always true"),
12833 WARN_STRICT_OVERFLOW_ALL);
12834 return constant_boolean_node (1, type);
12837 if (TREE_CODE (arg01) == INTEGER_CST)
12839 /* Convert X + c > X and X - c < X to true for integers. */
12840 if (code == GT_EXPR
12841 && ((code0 == PLUS_EXPR && is_positive > 0)
12842 || (code0 == MINUS_EXPR && is_positive < 0)))
12844 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12845 fold_overflow_warning (("assuming signed overflow does "
12846 "not occur when assuming that "
12847 "(X + c) > X is always true"),
12848 WARN_STRICT_OVERFLOW_ALL);
12849 return constant_boolean_node (1, type);
12852 if (code == LT_EXPR
12853 && ((code0 == MINUS_EXPR && is_positive > 0)
12854 || (code0 == PLUS_EXPR && is_positive < 0)))
12856 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12857 fold_overflow_warning (("assuming signed overflow does "
12858 "not occur when assuming that "
12859 "(X - c) < X is always true"),
12860 WARN_STRICT_OVERFLOW_ALL);
12861 return constant_boolean_node (1, type);
12864 /* Convert X + c <= X and X - c >= X to false for integers. */
12865 if (code == LE_EXPR
12866 && ((code0 == PLUS_EXPR && is_positive > 0)
12867 || (code0 == MINUS_EXPR && is_positive < 0)))
12869 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12870 fold_overflow_warning (("assuming signed overflow does "
12871 "not occur when assuming that "
12872 "(X + c) <= X is always false"),
12873 WARN_STRICT_OVERFLOW_ALL);
12874 return constant_boolean_node (0, type);
12877 if (code == GE_EXPR
12878 && ((code0 == MINUS_EXPR && is_positive > 0)
12879 || (code0 == PLUS_EXPR && is_positive < 0)))
12881 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12882 fold_overflow_warning (("assuming signed overflow does "
12883 "not occur when assuming that "
12884 "(X - c) >= X is always false"),
12885 WARN_STRICT_OVERFLOW_ALL);
12886 return constant_boolean_node (0, type);
12891 /* Comparisons with the highest or lowest possible integer of
12892 the specified precision will have known values. */
12894 tree arg1_type = TREE_TYPE (arg1);
12895 unsigned int width = TYPE_PRECISION (arg1_type);
12897 if (TREE_CODE (arg1) == INTEGER_CST
12898 && width <= 2 * HOST_BITS_PER_WIDE_INT
12899 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12901 HOST_WIDE_INT signed_max_hi;
12902 unsigned HOST_WIDE_INT signed_max_lo;
12903 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12905 if (width <= HOST_BITS_PER_WIDE_INT)
12907 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12908 - 1;
12909 signed_max_hi = 0;
12910 max_hi = 0;
12912 if (TYPE_UNSIGNED (arg1_type))
12914 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12915 min_lo = 0;
12916 min_hi = 0;
12918 else
12920 max_lo = signed_max_lo;
12921 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12922 min_hi = -1;
12925 else
12927 width -= HOST_BITS_PER_WIDE_INT;
12928 signed_max_lo = -1;
12929 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12930 - 1;
12931 max_lo = -1;
12932 min_lo = 0;
12934 if (TYPE_UNSIGNED (arg1_type))
12936 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12937 min_hi = 0;
12939 else
12941 max_hi = signed_max_hi;
12942 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12946 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12947 && TREE_INT_CST_LOW (arg1) == max_lo)
12948 switch (code)
12950 case GT_EXPR:
12951 return omit_one_operand (type, integer_zero_node, arg0);
12953 case GE_EXPR:
12954 return fold_build2 (EQ_EXPR, type, op0, op1);
12956 case LE_EXPR:
12957 return omit_one_operand (type, integer_one_node, arg0);
12959 case LT_EXPR:
12960 return fold_build2 (NE_EXPR, type, op0, op1);
12962 /* The GE_EXPR and LT_EXPR cases above are not normally
12963 reached because of previous transformations. */
12965 default:
12966 break;
12968 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12969 == max_hi
12970 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12971 switch (code)
12973 case GT_EXPR:
12974 arg1 = const_binop (PLUS_EXPR, arg1,
12975 build_int_cst (TREE_TYPE (arg1), 1), 0);
12976 return fold_build2 (EQ_EXPR, type,
12977 fold_convert (TREE_TYPE (arg1), arg0),
12978 arg1);
12979 case LE_EXPR:
12980 arg1 = const_binop (PLUS_EXPR, arg1,
12981 build_int_cst (TREE_TYPE (arg1), 1), 0);
12982 return fold_build2 (NE_EXPR, type,
12983 fold_convert (TREE_TYPE (arg1), arg0),
12984 arg1);
12985 default:
12986 break;
12988 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12989 == min_hi
12990 && TREE_INT_CST_LOW (arg1) == min_lo)
12991 switch (code)
12993 case LT_EXPR:
12994 return omit_one_operand (type, integer_zero_node, arg0);
12996 case LE_EXPR:
12997 return fold_build2 (EQ_EXPR, type, op0, op1);
12999 case GE_EXPR:
13000 return omit_one_operand (type, integer_one_node, arg0);
13002 case GT_EXPR:
13003 return fold_build2 (NE_EXPR, type, op0, op1);
13005 default:
13006 break;
13008 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13009 == min_hi
13010 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13011 switch (code)
13013 case GE_EXPR:
13014 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13015 return fold_build2 (NE_EXPR, type,
13016 fold_convert (TREE_TYPE (arg1), arg0),
13017 arg1);
13018 case LT_EXPR:
13019 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13020 return fold_build2 (EQ_EXPR, type,
13021 fold_convert (TREE_TYPE (arg1), arg0),
13022 arg1);
13023 default:
13024 break;
13027 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13028 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13029 && TYPE_UNSIGNED (arg1_type)
13030 /* We will flip the signedness of the comparison operator
13031 associated with the mode of arg1, so the sign bit is
13032 specified by this mode. Check that arg1 is the signed
13033 max associated with this sign bit. */
13034 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13035 /* signed_type does not work on pointer types. */
13036 && INTEGRAL_TYPE_P (arg1_type))
13038 /* The following case also applies to X < signed_max+1
13039 and X >= signed_max+1 because previous transformations. */
13040 if (code == LE_EXPR || code == GT_EXPR)
13042 tree st;
13043 st = signed_type_for (TREE_TYPE (arg1));
13044 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13045 type, fold_convert (st, arg0),
13046 build_int_cst (st, 0));
13052 /* If we are comparing an ABS_EXPR with a constant, we can
13053 convert all the cases into explicit comparisons, but they may
13054 well not be faster than doing the ABS and one comparison.
13055 But ABS (X) <= C is a range comparison, which becomes a subtraction
13056 and a comparison, and is probably faster. */
13057 if (code == LE_EXPR
13058 && TREE_CODE (arg1) == INTEGER_CST
13059 && TREE_CODE (arg0) == ABS_EXPR
13060 && ! TREE_SIDE_EFFECTS (arg0)
13061 && (0 != (tem = negate_expr (arg1)))
13062 && TREE_CODE (tem) == INTEGER_CST
13063 && !TREE_OVERFLOW (tem))
13064 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13065 build2 (GE_EXPR, type,
13066 TREE_OPERAND (arg0, 0), tem),
13067 build2 (LE_EXPR, type,
13068 TREE_OPERAND (arg0, 0), arg1));
13070 /* Convert ABS_EXPR<x> >= 0 to true. */
13071 strict_overflow_p = false;
13072 if (code == GE_EXPR
13073 && (integer_zerop (arg1)
13074 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13075 && real_zerop (arg1)))
13076 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13078 if (strict_overflow_p)
13079 fold_overflow_warning (("assuming signed overflow does not occur "
13080 "when simplifying comparison of "
13081 "absolute value and zero"),
13082 WARN_STRICT_OVERFLOW_CONDITIONAL);
13083 return omit_one_operand (type, integer_one_node, arg0);
13086 /* Convert ABS_EXPR<x> < 0 to false. */
13087 strict_overflow_p = false;
13088 if (code == LT_EXPR
13089 && (integer_zerop (arg1) || real_zerop (arg1))
13090 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13092 if (strict_overflow_p)
13093 fold_overflow_warning (("assuming signed overflow does not occur "
13094 "when simplifying comparison of "
13095 "absolute value and zero"),
13096 WARN_STRICT_OVERFLOW_CONDITIONAL);
13097 return omit_one_operand (type, integer_zero_node, arg0);
13100 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13101 and similarly for >= into !=. */
13102 if ((code == LT_EXPR || code == GE_EXPR)
13103 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13104 && TREE_CODE (arg1) == LSHIFT_EXPR
13105 && integer_onep (TREE_OPERAND (arg1, 0)))
13106 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13107 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13108 TREE_OPERAND (arg1, 1)),
13109 build_int_cst (TREE_TYPE (arg0), 0));
13111 if ((code == LT_EXPR || code == GE_EXPR)
13112 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13113 && CONVERT_EXPR_P (arg1)
13114 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13115 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13116 return
13117 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13118 fold_convert (TREE_TYPE (arg0),
13119 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13120 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13121 1))),
13122 build_int_cst (TREE_TYPE (arg0), 0));
13124 return NULL_TREE;
13126 case UNORDERED_EXPR:
13127 case ORDERED_EXPR:
13128 case UNLT_EXPR:
13129 case UNLE_EXPR:
13130 case UNGT_EXPR:
13131 case UNGE_EXPR:
13132 case UNEQ_EXPR:
13133 case LTGT_EXPR:
13134 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13136 t1 = fold_relational_const (code, type, arg0, arg1);
13137 if (t1 != NULL_TREE)
13138 return t1;
13141 /* If the first operand is NaN, the result is constant. */
13142 if (TREE_CODE (arg0) == REAL_CST
13143 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13144 && (code != LTGT_EXPR || ! flag_trapping_math))
13146 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13147 ? integer_zero_node
13148 : integer_one_node;
13149 return omit_one_operand (type, t1, arg1);
13152 /* If the second operand is NaN, the result is constant. */
13153 if (TREE_CODE (arg1) == REAL_CST
13154 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13155 && (code != LTGT_EXPR || ! flag_trapping_math))
13157 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13158 ? integer_zero_node
13159 : integer_one_node;
13160 return omit_one_operand (type, t1, arg0);
13163 /* Simplify unordered comparison of something with itself. */
13164 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13165 && operand_equal_p (arg0, arg1, 0))
13166 return constant_boolean_node (1, type);
13168 if (code == LTGT_EXPR
13169 && !flag_trapping_math
13170 && operand_equal_p (arg0, arg1, 0))
13171 return constant_boolean_node (0, type);
13173 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13175 tree targ0 = strip_float_extensions (arg0);
13176 tree targ1 = strip_float_extensions (arg1);
13177 tree newtype = TREE_TYPE (targ0);
13179 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13180 newtype = TREE_TYPE (targ1);
13182 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13183 return fold_build2 (code, type, fold_convert (newtype, targ0),
13184 fold_convert (newtype, targ1));
13187 return NULL_TREE;
13189 case COMPOUND_EXPR:
13190 /* When pedantic, a compound expression can be neither an lvalue
13191 nor an integer constant expression. */
13192 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13193 return NULL_TREE;
13194 /* Don't let (0, 0) be null pointer constant. */
13195 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13196 : fold_convert (type, arg1);
13197 return pedantic_non_lvalue (tem);
13199 case COMPLEX_EXPR:
13200 if ((TREE_CODE (arg0) == REAL_CST
13201 && TREE_CODE (arg1) == REAL_CST)
13202 || (TREE_CODE (arg0) == INTEGER_CST
13203 && TREE_CODE (arg1) == INTEGER_CST))
13204 return build_complex (type, arg0, arg1);
13205 return NULL_TREE;
13207 case ASSERT_EXPR:
13208 /* An ASSERT_EXPR should never be passed to fold_binary. */
13209 gcc_unreachable ();
13211 default:
13212 return NULL_TREE;
13213 } /* switch (code) */
13216 /* Callback for walk_tree, looking for LABEL_EXPR.
13217 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13218 Do not check the sub-tree of GOTO_EXPR. */
13220 static tree
13221 contains_label_1 (tree *tp,
13222 int *walk_subtrees,
13223 void *data ATTRIBUTE_UNUSED)
13225 switch (TREE_CODE (*tp))
13227 case LABEL_EXPR:
13228 return *tp;
13229 case GOTO_EXPR:
13230 *walk_subtrees = 0;
13231 /* no break */
13232 default:
13233 return NULL_TREE;
13237 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13238 accessible from outside the sub-tree. Returns NULL_TREE if no
13239 addressable label is found. */
13241 static bool
13242 contains_label_p (tree st)
13244 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13247 /* Fold a ternary expression of code CODE and type TYPE with operands
13248 OP0, OP1, and OP2. Return the folded expression if folding is
13249 successful. Otherwise, return NULL_TREE. */
13251 tree
13252 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13254 tree tem;
13255 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13256 enum tree_code_class kind = TREE_CODE_CLASS (code);
13258 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13259 && TREE_CODE_LENGTH (code) == 3);
13261 /* Strip any conversions that don't change the mode. This is safe
13262 for every expression, except for a comparison expression because
13263 its signedness is derived from its operands. So, in the latter
13264 case, only strip conversions that don't change the signedness.
13266 Note that this is done as an internal manipulation within the
13267 constant folder, in order to find the simplest representation of
13268 the arguments so that their form can be studied. In any cases,
13269 the appropriate type conversions should be put back in the tree
13270 that will get out of the constant folder. */
13271 if (op0)
13273 arg0 = op0;
13274 STRIP_NOPS (arg0);
13277 if (op1)
13279 arg1 = op1;
13280 STRIP_NOPS (arg1);
13283 switch (code)
13285 case COMPONENT_REF:
13286 if (TREE_CODE (arg0) == CONSTRUCTOR
13287 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13289 unsigned HOST_WIDE_INT idx;
13290 tree field, value;
13291 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13292 if (field == arg1)
13293 return value;
13295 return NULL_TREE;
13297 case COND_EXPR:
13298 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13299 so all simple results must be passed through pedantic_non_lvalue. */
13300 if (TREE_CODE (arg0) == INTEGER_CST)
13302 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13303 tem = integer_zerop (arg0) ? op2 : op1;
13304 /* Only optimize constant conditions when the selected branch
13305 has the same type as the COND_EXPR. This avoids optimizing
13306 away "c ? x : throw", where the throw has a void type.
13307 Avoid throwing away that operand which contains label. */
13308 if ((!TREE_SIDE_EFFECTS (unused_op)
13309 || !contains_label_p (unused_op))
13310 && (! VOID_TYPE_P (TREE_TYPE (tem))
13311 || VOID_TYPE_P (type)))
13312 return pedantic_non_lvalue (tem);
13313 return NULL_TREE;
13315 if (operand_equal_p (arg1, op2, 0))
13316 return pedantic_omit_one_operand (type, arg1, arg0);
13318 /* If we have A op B ? A : C, we may be able to convert this to a
13319 simpler expression, depending on the operation and the values
13320 of B and C. Signed zeros prevent all of these transformations,
13321 for reasons given above each one.
13323 Also try swapping the arguments and inverting the conditional. */
13324 if (COMPARISON_CLASS_P (arg0)
13325 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13326 arg1, TREE_OPERAND (arg0, 1))
13327 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13329 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13330 if (tem)
13331 return tem;
13334 if (COMPARISON_CLASS_P (arg0)
13335 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13336 op2,
13337 TREE_OPERAND (arg0, 1))
13338 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13340 tem = fold_truth_not_expr (arg0);
13341 if (tem && COMPARISON_CLASS_P (tem))
13343 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13344 if (tem)
13345 return tem;
13349 /* If the second operand is simpler than the third, swap them
13350 since that produces better jump optimization results. */
13351 if (truth_value_p (TREE_CODE (arg0))
13352 && tree_swap_operands_p (op1, op2, false))
13354 /* See if this can be inverted. If it can't, possibly because
13355 it was a floating-point inequality comparison, don't do
13356 anything. */
13357 tem = fold_truth_not_expr (arg0);
13358 if (tem)
13359 return fold_build3 (code, type, tem, op2, op1);
13362 /* Convert A ? 1 : 0 to simply A. */
13363 if (integer_onep (op1)
13364 && integer_zerop (op2)
13365 /* If we try to convert OP0 to our type, the
13366 call to fold will try to move the conversion inside
13367 a COND, which will recurse. In that case, the COND_EXPR
13368 is probably the best choice, so leave it alone. */
13369 && type == TREE_TYPE (arg0))
13370 return pedantic_non_lvalue (arg0);
13372 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13373 over COND_EXPR in cases such as floating point comparisons. */
13374 if (integer_zerop (op1)
13375 && integer_onep (op2)
13376 && truth_value_p (TREE_CODE (arg0)))
13377 return pedantic_non_lvalue (fold_convert (type,
13378 invert_truthvalue (arg0)));
13380 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13381 if (TREE_CODE (arg0) == LT_EXPR
13382 && integer_zerop (TREE_OPERAND (arg0, 1))
13383 && integer_zerop (op2)
13384 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13386 /* sign_bit_p only checks ARG1 bits within A's precision.
13387 If <sign bit of A> has wider type than A, bits outside
13388 of A's precision in <sign bit of A> need to be checked.
13389 If they are all 0, this optimization needs to be done
13390 in unsigned A's type, if they are all 1 in signed A's type,
13391 otherwise this can't be done. */
13392 if (TYPE_PRECISION (TREE_TYPE (tem))
13393 < TYPE_PRECISION (TREE_TYPE (arg1))
13394 && TYPE_PRECISION (TREE_TYPE (tem))
13395 < TYPE_PRECISION (type))
13397 unsigned HOST_WIDE_INT mask_lo;
13398 HOST_WIDE_INT mask_hi;
13399 int inner_width, outer_width;
13400 tree tem_type;
13402 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13403 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13404 if (outer_width > TYPE_PRECISION (type))
13405 outer_width = TYPE_PRECISION (type);
13407 if (outer_width > HOST_BITS_PER_WIDE_INT)
13409 mask_hi = ((unsigned HOST_WIDE_INT) -1
13410 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13411 mask_lo = -1;
13413 else
13415 mask_hi = 0;
13416 mask_lo = ((unsigned HOST_WIDE_INT) -1
13417 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13419 if (inner_width > HOST_BITS_PER_WIDE_INT)
13421 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13422 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13423 mask_lo = 0;
13425 else
13426 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13427 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13429 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13430 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13432 tem_type = signed_type_for (TREE_TYPE (tem));
13433 tem = fold_convert (tem_type, tem);
13435 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13436 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13438 tem_type = unsigned_type_for (TREE_TYPE (tem));
13439 tem = fold_convert (tem_type, tem);
13441 else
13442 tem = NULL;
13445 if (tem)
13446 return fold_convert (type,
13447 fold_build2 (BIT_AND_EXPR,
13448 TREE_TYPE (tem), tem,
13449 fold_convert (TREE_TYPE (tem),
13450 arg1)));
13453 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13454 already handled above. */
13455 if (TREE_CODE (arg0) == BIT_AND_EXPR
13456 && integer_onep (TREE_OPERAND (arg0, 1))
13457 && integer_zerop (op2)
13458 && integer_pow2p (arg1))
13460 tree tem = TREE_OPERAND (arg0, 0);
13461 STRIP_NOPS (tem);
13462 if (TREE_CODE (tem) == RSHIFT_EXPR
13463 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13464 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13465 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13466 return fold_build2 (BIT_AND_EXPR, type,
13467 TREE_OPERAND (tem, 0), arg1);
13470 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13471 is probably obsolete because the first operand should be a
13472 truth value (that's why we have the two cases above), but let's
13473 leave it in until we can confirm this for all front-ends. */
13474 if (integer_zerop (op2)
13475 && TREE_CODE (arg0) == NE_EXPR
13476 && integer_zerop (TREE_OPERAND (arg0, 1))
13477 && integer_pow2p (arg1)
13478 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13479 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13480 arg1, OEP_ONLY_CONST))
13481 return pedantic_non_lvalue (fold_convert (type,
13482 TREE_OPERAND (arg0, 0)));
13484 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13485 if (integer_zerop (op2)
13486 && truth_value_p (TREE_CODE (arg0))
13487 && truth_value_p (TREE_CODE (arg1)))
13488 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13489 fold_convert (type, arg0),
13490 arg1);
13492 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13493 if (integer_onep (op2)
13494 && truth_value_p (TREE_CODE (arg0))
13495 && truth_value_p (TREE_CODE (arg1)))
13497 /* Only perform transformation if ARG0 is easily inverted. */
13498 tem = fold_truth_not_expr (arg0);
13499 if (tem)
13500 return fold_build2 (TRUTH_ORIF_EXPR, type,
13501 fold_convert (type, tem),
13502 arg1);
13505 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13506 if (integer_zerop (arg1)
13507 && truth_value_p (TREE_CODE (arg0))
13508 && truth_value_p (TREE_CODE (op2)))
13510 /* Only perform transformation if ARG0 is easily inverted. */
13511 tem = fold_truth_not_expr (arg0);
13512 if (tem)
13513 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13514 fold_convert (type, tem),
13515 op2);
13518 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13519 if (integer_onep (arg1)
13520 && truth_value_p (TREE_CODE (arg0))
13521 && truth_value_p (TREE_CODE (op2)))
13522 return fold_build2 (TRUTH_ORIF_EXPR, type,
13523 fold_convert (type, arg0),
13524 op2);
13526 return NULL_TREE;
13528 case CALL_EXPR:
13529 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13530 of fold_ternary on them. */
13531 gcc_unreachable ();
13533 case BIT_FIELD_REF:
13534 if ((TREE_CODE (arg0) == VECTOR_CST
13535 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13536 && type == TREE_TYPE (TREE_TYPE (arg0)))
13538 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13539 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13541 if (width != 0
13542 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13543 && (idx % width) == 0
13544 && (idx = idx / width)
13545 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13547 tree elements = NULL_TREE;
13549 if (TREE_CODE (arg0) == VECTOR_CST)
13550 elements = TREE_VECTOR_CST_ELTS (arg0);
13551 else
13553 unsigned HOST_WIDE_INT idx;
13554 tree value;
13556 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13557 elements = tree_cons (NULL_TREE, value, elements);
13559 while (idx-- > 0 && elements)
13560 elements = TREE_CHAIN (elements);
13561 if (elements)
13562 return TREE_VALUE (elements);
13563 else
13564 return fold_convert (type, integer_zero_node);
13568 /* A bit-field-ref that referenced the full argument can be stripped. */
13569 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13570 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13571 && integer_zerop (op2))
13572 return fold_convert (type, arg0);
13574 return NULL_TREE;
13576 default:
13577 return NULL_TREE;
13578 } /* switch (code) */
13581 /* Perform constant folding and related simplification of EXPR.
13582 The related simplifications include x*1 => x, x*0 => 0, etc.,
13583 and application of the associative law.
13584 NOP_EXPR conversions may be removed freely (as long as we
13585 are careful not to change the type of the overall expression).
13586 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13587 but we can constant-fold them if they have constant operands. */
13589 #ifdef ENABLE_FOLD_CHECKING
13590 # define fold(x) fold_1 (x)
13591 static tree fold_1 (tree);
13592 static
13593 #endif
13594 tree
13595 fold (tree expr)
13597 const tree t = expr;
13598 enum tree_code code = TREE_CODE (t);
13599 enum tree_code_class kind = TREE_CODE_CLASS (code);
13600 tree tem;
13602 /* Return right away if a constant. */
13603 if (kind == tcc_constant)
13604 return t;
13606 /* CALL_EXPR-like objects with variable numbers of operands are
13607 treated specially. */
13608 if (kind == tcc_vl_exp)
13610 if (code == CALL_EXPR)
13612 tem = fold_call_expr (expr, false);
13613 return tem ? tem : expr;
13615 return expr;
13618 if (IS_EXPR_CODE_CLASS (kind))
13620 tree type = TREE_TYPE (t);
13621 tree op0, op1, op2;
13623 switch (TREE_CODE_LENGTH (code))
13625 case 1:
13626 op0 = TREE_OPERAND (t, 0);
13627 tem = fold_unary (code, type, op0);
13628 return tem ? tem : expr;
13629 case 2:
13630 op0 = TREE_OPERAND (t, 0);
13631 op1 = TREE_OPERAND (t, 1);
13632 tem = fold_binary (code, type, op0, op1);
13633 return tem ? tem : expr;
13634 case 3:
13635 op0 = TREE_OPERAND (t, 0);
13636 op1 = TREE_OPERAND (t, 1);
13637 op2 = TREE_OPERAND (t, 2);
13638 tem = fold_ternary (code, type, op0, op1, op2);
13639 return tem ? tem : expr;
13640 default:
13641 break;
13645 switch (code)
13647 case ARRAY_REF:
13649 tree op0 = TREE_OPERAND (t, 0);
13650 tree op1 = TREE_OPERAND (t, 1);
13652 if (TREE_CODE (op1) == INTEGER_CST
13653 && TREE_CODE (op0) == CONSTRUCTOR
13654 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13656 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13657 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13658 unsigned HOST_WIDE_INT begin = 0;
13660 /* Find a matching index by means of a binary search. */
13661 while (begin != end)
13663 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13664 tree index = VEC_index (constructor_elt, elts, middle)->index;
13666 if (TREE_CODE (index) == INTEGER_CST
13667 && tree_int_cst_lt (index, op1))
13668 begin = middle + 1;
13669 else if (TREE_CODE (index) == INTEGER_CST
13670 && tree_int_cst_lt (op1, index))
13671 end = middle;
13672 else if (TREE_CODE (index) == RANGE_EXPR
13673 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13674 begin = middle + 1;
13675 else if (TREE_CODE (index) == RANGE_EXPR
13676 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13677 end = middle;
13678 else
13679 return VEC_index (constructor_elt, elts, middle)->value;
13683 return t;
13686 case CONST_DECL:
13687 return fold (DECL_INITIAL (t));
13689 default:
13690 return t;
13691 } /* switch (code) */
13694 #ifdef ENABLE_FOLD_CHECKING
13695 #undef fold
13697 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13698 static void fold_check_failed (const_tree, const_tree);
13699 void print_fold_checksum (const_tree);
13701 /* When --enable-checking=fold, compute a digest of expr before
13702 and after actual fold call to see if fold did not accidentally
13703 change original expr. */
13705 tree
13706 fold (tree expr)
13708 tree ret;
13709 struct md5_ctx ctx;
13710 unsigned char checksum_before[16], checksum_after[16];
13711 htab_t ht;
13713 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13714 md5_init_ctx (&ctx);
13715 fold_checksum_tree (expr, &ctx, ht);
13716 md5_finish_ctx (&ctx, checksum_before);
13717 htab_empty (ht);
13719 ret = fold_1 (expr);
13721 md5_init_ctx (&ctx);
13722 fold_checksum_tree (expr, &ctx, ht);
13723 md5_finish_ctx (&ctx, checksum_after);
13724 htab_delete (ht);
13726 if (memcmp (checksum_before, checksum_after, 16))
13727 fold_check_failed (expr, ret);
13729 return ret;
13732 void
13733 print_fold_checksum (const_tree expr)
13735 struct md5_ctx ctx;
13736 unsigned char checksum[16], cnt;
13737 htab_t ht;
13739 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13740 md5_init_ctx (&ctx);
13741 fold_checksum_tree (expr, &ctx, ht);
13742 md5_finish_ctx (&ctx, checksum);
13743 htab_delete (ht);
13744 for (cnt = 0; cnt < 16; ++cnt)
13745 fprintf (stderr, "%02x", checksum[cnt]);
13746 putc ('\n', stderr);
13749 static void
13750 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13752 internal_error ("fold check: original tree changed by fold");
13755 static void
13756 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13758 const void **slot;
13759 enum tree_code code;
13760 union tree_node buf;
13761 int i, len;
13763 recursive_label:
13765 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13766 <= sizeof (struct tree_function_decl))
13767 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13768 if (expr == NULL)
13769 return;
13770 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13771 if (*slot != NULL)
13772 return;
13773 *slot = expr;
13774 code = TREE_CODE (expr);
13775 if (TREE_CODE_CLASS (code) == tcc_declaration
13776 && DECL_ASSEMBLER_NAME_SET_P (expr))
13778 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13779 memcpy ((char *) &buf, expr, tree_size (expr));
13780 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13781 expr = (tree) &buf;
13783 else if (TREE_CODE_CLASS (code) == tcc_type
13784 && (TYPE_POINTER_TO (expr)
13785 || TYPE_REFERENCE_TO (expr)
13786 || TYPE_CACHED_VALUES_P (expr)
13787 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13788 || TYPE_NEXT_VARIANT (expr)))
13790 /* Allow these fields to be modified. */
13791 tree tmp;
13792 memcpy ((char *) &buf, expr, tree_size (expr));
13793 expr = tmp = (tree) &buf;
13794 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13795 TYPE_POINTER_TO (tmp) = NULL;
13796 TYPE_REFERENCE_TO (tmp) = NULL;
13797 TYPE_NEXT_VARIANT (tmp) = NULL;
13798 if (TYPE_CACHED_VALUES_P (tmp))
13800 TYPE_CACHED_VALUES_P (tmp) = 0;
13801 TYPE_CACHED_VALUES (tmp) = NULL;
13804 md5_process_bytes (expr, tree_size (expr), ctx);
13805 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13806 if (TREE_CODE_CLASS (code) != tcc_type
13807 && TREE_CODE_CLASS (code) != tcc_declaration
13808 && code != TREE_LIST
13809 && code != SSA_NAME)
13810 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13811 switch (TREE_CODE_CLASS (code))
13813 case tcc_constant:
13814 switch (code)
13816 case STRING_CST:
13817 md5_process_bytes (TREE_STRING_POINTER (expr),
13818 TREE_STRING_LENGTH (expr), ctx);
13819 break;
13820 case COMPLEX_CST:
13821 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13822 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13823 break;
13824 case VECTOR_CST:
13825 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13826 break;
13827 default:
13828 break;
13830 break;
13831 case tcc_exceptional:
13832 switch (code)
13834 case TREE_LIST:
13835 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13836 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13837 expr = TREE_CHAIN (expr);
13838 goto recursive_label;
13839 break;
13840 case TREE_VEC:
13841 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13842 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13843 break;
13844 default:
13845 break;
13847 break;
13848 case tcc_expression:
13849 case tcc_reference:
13850 case tcc_comparison:
13851 case tcc_unary:
13852 case tcc_binary:
13853 case tcc_statement:
13854 case tcc_vl_exp:
13855 len = TREE_OPERAND_LENGTH (expr);
13856 for (i = 0; i < len; ++i)
13857 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13858 break;
13859 case tcc_declaration:
13860 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13861 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13862 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13864 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13865 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13866 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13867 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13868 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13870 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13871 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13873 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13875 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13876 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13877 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13879 break;
13880 case tcc_type:
13881 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13882 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13883 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13884 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13885 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13886 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13887 if (INTEGRAL_TYPE_P (expr)
13888 || SCALAR_FLOAT_TYPE_P (expr))
13890 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13891 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13893 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13894 if (TREE_CODE (expr) == RECORD_TYPE
13895 || TREE_CODE (expr) == UNION_TYPE
13896 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13897 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13898 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13899 break;
13900 default:
13901 break;
13905 /* Helper function for outputting the checksum of a tree T. When
13906 debugging with gdb, you can "define mynext" to be "next" followed
13907 by "call debug_fold_checksum (op0)", then just trace down till the
13908 outputs differ. */
13910 void
13911 debug_fold_checksum (const_tree t)
13913 int i;
13914 unsigned char checksum[16];
13915 struct md5_ctx ctx;
13916 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13918 md5_init_ctx (&ctx);
13919 fold_checksum_tree (t, &ctx, ht);
13920 md5_finish_ctx (&ctx, checksum);
13921 htab_empty (ht);
13923 for (i = 0; i < 16; i++)
13924 fprintf (stderr, "%d ", checksum[i]);
13926 fprintf (stderr, "\n");
13929 #endif
13931 /* Fold a unary tree expression with code CODE of type TYPE with an
13932 operand OP0. Return a folded expression if successful. Otherwise,
13933 return a tree expression with code CODE of type TYPE with an
13934 operand OP0. */
13936 tree
13937 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13939 tree tem;
13940 #ifdef ENABLE_FOLD_CHECKING
13941 unsigned char checksum_before[16], checksum_after[16];
13942 struct md5_ctx ctx;
13943 htab_t ht;
13945 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13946 md5_init_ctx (&ctx);
13947 fold_checksum_tree (op0, &ctx, ht);
13948 md5_finish_ctx (&ctx, checksum_before);
13949 htab_empty (ht);
13950 #endif
13952 tem = fold_unary (code, type, op0);
13953 if (!tem)
13954 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13956 #ifdef ENABLE_FOLD_CHECKING
13957 md5_init_ctx (&ctx);
13958 fold_checksum_tree (op0, &ctx, ht);
13959 md5_finish_ctx (&ctx, checksum_after);
13960 htab_delete (ht);
13962 if (memcmp (checksum_before, checksum_after, 16))
13963 fold_check_failed (op0, tem);
13964 #endif
13965 return tem;
13968 /* Fold a binary tree expression with code CODE of type TYPE with
13969 operands OP0 and OP1. Return a folded expression if successful.
13970 Otherwise, return a tree expression with code CODE of type TYPE
13971 with operands OP0 and OP1. */
13973 tree
13974 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13975 MEM_STAT_DECL)
13977 tree tem;
13978 #ifdef ENABLE_FOLD_CHECKING
13979 unsigned char checksum_before_op0[16],
13980 checksum_before_op1[16],
13981 checksum_after_op0[16],
13982 checksum_after_op1[16];
13983 struct md5_ctx ctx;
13984 htab_t ht;
13986 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13987 md5_init_ctx (&ctx);
13988 fold_checksum_tree (op0, &ctx, ht);
13989 md5_finish_ctx (&ctx, checksum_before_op0);
13990 htab_empty (ht);
13992 md5_init_ctx (&ctx);
13993 fold_checksum_tree (op1, &ctx, ht);
13994 md5_finish_ctx (&ctx, checksum_before_op1);
13995 htab_empty (ht);
13996 #endif
13998 tem = fold_binary (code, type, op0, op1);
13999 if (!tem)
14000 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14002 #ifdef ENABLE_FOLD_CHECKING
14003 md5_init_ctx (&ctx);
14004 fold_checksum_tree (op0, &ctx, ht);
14005 md5_finish_ctx (&ctx, checksum_after_op0);
14006 htab_empty (ht);
14008 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14009 fold_check_failed (op0, tem);
14011 md5_init_ctx (&ctx);
14012 fold_checksum_tree (op1, &ctx, ht);
14013 md5_finish_ctx (&ctx, checksum_after_op1);
14014 htab_delete (ht);
14016 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14017 fold_check_failed (op1, tem);
14018 #endif
14019 return tem;
14022 /* Fold a ternary tree expression with code CODE of type TYPE with
14023 operands OP0, OP1, and OP2. Return a folded expression if
14024 successful. Otherwise, return a tree expression with code CODE of
14025 type TYPE with operands OP0, OP1, and OP2. */
14027 tree
14028 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14029 MEM_STAT_DECL)
14031 tree tem;
14032 #ifdef ENABLE_FOLD_CHECKING
14033 unsigned char checksum_before_op0[16],
14034 checksum_before_op1[16],
14035 checksum_before_op2[16],
14036 checksum_after_op0[16],
14037 checksum_after_op1[16],
14038 checksum_after_op2[16];
14039 struct md5_ctx ctx;
14040 htab_t ht;
14042 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14043 md5_init_ctx (&ctx);
14044 fold_checksum_tree (op0, &ctx, ht);
14045 md5_finish_ctx (&ctx, checksum_before_op0);
14046 htab_empty (ht);
14048 md5_init_ctx (&ctx);
14049 fold_checksum_tree (op1, &ctx, ht);
14050 md5_finish_ctx (&ctx, checksum_before_op1);
14051 htab_empty (ht);
14053 md5_init_ctx (&ctx);
14054 fold_checksum_tree (op2, &ctx, ht);
14055 md5_finish_ctx (&ctx, checksum_before_op2);
14056 htab_empty (ht);
14057 #endif
14059 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14060 tem = fold_ternary (code, type, op0, op1, op2);
14061 if (!tem)
14062 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14064 #ifdef ENABLE_FOLD_CHECKING
14065 md5_init_ctx (&ctx);
14066 fold_checksum_tree (op0, &ctx, ht);
14067 md5_finish_ctx (&ctx, checksum_after_op0);
14068 htab_empty (ht);
14070 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14071 fold_check_failed (op0, tem);
14073 md5_init_ctx (&ctx);
14074 fold_checksum_tree (op1, &ctx, ht);
14075 md5_finish_ctx (&ctx, checksum_after_op1);
14076 htab_empty (ht);
14078 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14079 fold_check_failed (op1, tem);
14081 md5_init_ctx (&ctx);
14082 fold_checksum_tree (op2, &ctx, ht);
14083 md5_finish_ctx (&ctx, checksum_after_op2);
14084 htab_delete (ht);
14086 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14087 fold_check_failed (op2, tem);
14088 #endif
14089 return tem;
14092 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14093 arguments in ARGARRAY, and a null static chain.
14094 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14095 of type TYPE from the given operands as constructed by build_call_array. */
14097 tree
14098 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14100 tree tem;
14101 #ifdef ENABLE_FOLD_CHECKING
14102 unsigned char checksum_before_fn[16],
14103 checksum_before_arglist[16],
14104 checksum_after_fn[16],
14105 checksum_after_arglist[16];
14106 struct md5_ctx ctx;
14107 htab_t ht;
14108 int i;
14110 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14111 md5_init_ctx (&ctx);
14112 fold_checksum_tree (fn, &ctx, ht);
14113 md5_finish_ctx (&ctx, checksum_before_fn);
14114 htab_empty (ht);
14116 md5_init_ctx (&ctx);
14117 for (i = 0; i < nargs; i++)
14118 fold_checksum_tree (argarray[i], &ctx, ht);
14119 md5_finish_ctx (&ctx, checksum_before_arglist);
14120 htab_empty (ht);
14121 #endif
14123 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14125 #ifdef ENABLE_FOLD_CHECKING
14126 md5_init_ctx (&ctx);
14127 fold_checksum_tree (fn, &ctx, ht);
14128 md5_finish_ctx (&ctx, checksum_after_fn);
14129 htab_empty (ht);
14131 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14132 fold_check_failed (fn, tem);
14134 md5_init_ctx (&ctx);
14135 for (i = 0; i < nargs; i++)
14136 fold_checksum_tree (argarray[i], &ctx, ht);
14137 md5_finish_ctx (&ctx, checksum_after_arglist);
14138 htab_delete (ht);
14140 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14141 fold_check_failed (NULL_TREE, tem);
14142 #endif
14143 return tem;
14146 /* Perform constant folding and related simplification of initializer
14147 expression EXPR. These behave identically to "fold_buildN" but ignore
14148 potential run-time traps and exceptions that fold must preserve. */
14150 #define START_FOLD_INIT \
14151 int saved_signaling_nans = flag_signaling_nans;\
14152 int saved_trapping_math = flag_trapping_math;\
14153 int saved_rounding_math = flag_rounding_math;\
14154 int saved_trapv = flag_trapv;\
14155 int saved_folding_initializer = folding_initializer;\
14156 flag_signaling_nans = 0;\
14157 flag_trapping_math = 0;\
14158 flag_rounding_math = 0;\
14159 flag_trapv = 0;\
14160 folding_initializer = 1;
14162 #define END_FOLD_INIT \
14163 flag_signaling_nans = saved_signaling_nans;\
14164 flag_trapping_math = saved_trapping_math;\
14165 flag_rounding_math = saved_rounding_math;\
14166 flag_trapv = saved_trapv;\
14167 folding_initializer = saved_folding_initializer;
14169 tree
14170 fold_build1_initializer (enum tree_code code, tree type, tree op)
14172 tree result;
14173 START_FOLD_INIT;
14175 result = fold_build1 (code, type, op);
14177 END_FOLD_INIT;
14178 return result;
14181 tree
14182 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14184 tree result;
14185 START_FOLD_INIT;
14187 result = fold_build2 (code, type, op0, op1);
14189 END_FOLD_INIT;
14190 return result;
14193 tree
14194 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14195 tree op2)
14197 tree result;
14198 START_FOLD_INIT;
14200 result = fold_build3 (code, type, op0, op1, op2);
14202 END_FOLD_INIT;
14203 return result;
14206 tree
14207 fold_build_call_array_initializer (tree type, tree fn,
14208 int nargs, tree *argarray)
14210 tree result;
14211 START_FOLD_INIT;
14213 result = fold_build_call_array (type, fn, nargs, argarray);
14215 END_FOLD_INIT;
14216 return result;
14219 #undef START_FOLD_INIT
14220 #undef END_FOLD_INIT
14222 /* Determine if first argument is a multiple of second argument. Return 0 if
14223 it is not, or we cannot easily determined it to be.
14225 An example of the sort of thing we care about (at this point; this routine
14226 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14227 fold cases do now) is discovering that
14229 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14231 is a multiple of
14233 SAVE_EXPR (J * 8)
14235 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14237 This code also handles discovering that
14239 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14241 is a multiple of 8 so we don't have to worry about dealing with a
14242 possible remainder.
14244 Note that we *look* inside a SAVE_EXPR only to determine how it was
14245 calculated; it is not safe for fold to do much of anything else with the
14246 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14247 at run time. For example, the latter example above *cannot* be implemented
14248 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14249 evaluation time of the original SAVE_EXPR is not necessarily the same at
14250 the time the new expression is evaluated. The only optimization of this
14251 sort that would be valid is changing
14253 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14255 divided by 8 to
14257 SAVE_EXPR (I) * SAVE_EXPR (J)
14259 (where the same SAVE_EXPR (J) is used in the original and the
14260 transformed version). */
14263 multiple_of_p (tree type, const_tree top, const_tree bottom)
14265 if (operand_equal_p (top, bottom, 0))
14266 return 1;
14268 if (TREE_CODE (type) != INTEGER_TYPE)
14269 return 0;
14271 switch (TREE_CODE (top))
14273 case BIT_AND_EXPR:
14274 /* Bitwise and provides a power of two multiple. If the mask is
14275 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14276 if (!integer_pow2p (bottom))
14277 return 0;
14278 /* FALLTHRU */
14280 case MULT_EXPR:
14281 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14282 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14284 case PLUS_EXPR:
14285 case MINUS_EXPR:
14286 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14287 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14289 case LSHIFT_EXPR:
14290 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14292 tree op1, t1;
14294 op1 = TREE_OPERAND (top, 1);
14295 /* const_binop may not detect overflow correctly,
14296 so check for it explicitly here. */
14297 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14298 > TREE_INT_CST_LOW (op1)
14299 && TREE_INT_CST_HIGH (op1) == 0
14300 && 0 != (t1 = fold_convert (type,
14301 const_binop (LSHIFT_EXPR,
14302 size_one_node,
14303 op1, 0)))
14304 && !TREE_OVERFLOW (t1))
14305 return multiple_of_p (type, t1, bottom);
14307 return 0;
14309 case NOP_EXPR:
14310 /* Can't handle conversions from non-integral or wider integral type. */
14311 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14312 || (TYPE_PRECISION (type)
14313 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14314 return 0;
14316 /* .. fall through ... */
14318 case SAVE_EXPR:
14319 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14321 case INTEGER_CST:
14322 if (TREE_CODE (bottom) != INTEGER_CST
14323 || integer_zerop (bottom)
14324 || (TYPE_UNSIGNED (type)
14325 && (tree_int_cst_sgn (top) < 0
14326 || tree_int_cst_sgn (bottom) < 0)))
14327 return 0;
14328 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14329 top, bottom, 0));
14331 default:
14332 return 0;
14336 /* Return true if CODE or TYPE is known to be non-negative. */
14338 static bool
14339 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14341 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14342 && truth_value_p (code))
14343 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14344 have a signed:1 type (where the value is -1 and 0). */
14345 return true;
14346 return false;
14349 /* Return true if (CODE OP0) is known to be non-negative. If the return
14350 value is based on the assumption that signed overflow is undefined,
14351 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14352 *STRICT_OVERFLOW_P. */
14354 bool
14355 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14356 bool *strict_overflow_p)
14358 if (TYPE_UNSIGNED (type))
14359 return true;
14361 switch (code)
14363 case ABS_EXPR:
14364 /* We can't return 1 if flag_wrapv is set because
14365 ABS_EXPR<INT_MIN> = INT_MIN. */
14366 if (!INTEGRAL_TYPE_P (type))
14367 return true;
14368 if (TYPE_OVERFLOW_UNDEFINED (type))
14370 *strict_overflow_p = true;
14371 return true;
14373 break;
14375 case NON_LVALUE_EXPR:
14376 case FLOAT_EXPR:
14377 case FIX_TRUNC_EXPR:
14378 return tree_expr_nonnegative_warnv_p (op0,
14379 strict_overflow_p);
14381 case NOP_EXPR:
14383 tree inner_type = TREE_TYPE (op0);
14384 tree outer_type = type;
14386 if (TREE_CODE (outer_type) == REAL_TYPE)
14388 if (TREE_CODE (inner_type) == REAL_TYPE)
14389 return tree_expr_nonnegative_warnv_p (op0,
14390 strict_overflow_p);
14391 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14393 if (TYPE_UNSIGNED (inner_type))
14394 return true;
14395 return tree_expr_nonnegative_warnv_p (op0,
14396 strict_overflow_p);
14399 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14401 if (TREE_CODE (inner_type) == REAL_TYPE)
14402 return tree_expr_nonnegative_warnv_p (op0,
14403 strict_overflow_p);
14404 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14405 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14406 && TYPE_UNSIGNED (inner_type);
14409 break;
14411 default:
14412 return tree_simple_nonnegative_warnv_p (code, type);
14415 /* We don't know sign of `t', so be conservative and return false. */
14416 return false;
14419 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14420 value is based on the assumption that signed overflow is undefined,
14421 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14422 *STRICT_OVERFLOW_P. */
14424 bool
14425 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14426 tree op1, bool *strict_overflow_p)
14428 if (TYPE_UNSIGNED (type))
14429 return true;
14431 switch (code)
14433 case POINTER_PLUS_EXPR:
14434 case PLUS_EXPR:
14435 if (FLOAT_TYPE_P (type))
14436 return (tree_expr_nonnegative_warnv_p (op0,
14437 strict_overflow_p)
14438 && tree_expr_nonnegative_warnv_p (op1,
14439 strict_overflow_p));
14441 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14442 both unsigned and at least 2 bits shorter than the result. */
14443 if (TREE_CODE (type) == INTEGER_TYPE
14444 && TREE_CODE (op0) == NOP_EXPR
14445 && TREE_CODE (op1) == NOP_EXPR)
14447 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14448 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14449 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14450 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14452 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14453 TYPE_PRECISION (inner2)) + 1;
14454 return prec < TYPE_PRECISION (type);
14457 break;
14459 case MULT_EXPR:
14460 if (FLOAT_TYPE_P (type))
14462 /* x * x for floating point x is always non-negative. */
14463 if (operand_equal_p (op0, op1, 0))
14464 return true;
14465 return (tree_expr_nonnegative_warnv_p (op0,
14466 strict_overflow_p)
14467 && tree_expr_nonnegative_warnv_p (op1,
14468 strict_overflow_p));
14471 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14472 both unsigned and their total bits is shorter than the result. */
14473 if (TREE_CODE (type) == INTEGER_TYPE
14474 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14475 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14477 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14478 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14479 : TREE_TYPE (op0);
14480 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14481 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14482 : TREE_TYPE (op1);
14484 bool unsigned0 = TYPE_UNSIGNED (inner0);
14485 bool unsigned1 = TYPE_UNSIGNED (inner1);
14487 if (TREE_CODE (op0) == INTEGER_CST)
14488 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14490 if (TREE_CODE (op1) == INTEGER_CST)
14491 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14493 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14494 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14496 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14497 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14498 : TYPE_PRECISION (inner0);
14500 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14501 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14502 : TYPE_PRECISION (inner1);
14504 return precision0 + precision1 < TYPE_PRECISION (type);
14507 return false;
14509 case BIT_AND_EXPR:
14510 case MAX_EXPR:
14511 return (tree_expr_nonnegative_warnv_p (op0,
14512 strict_overflow_p)
14513 || tree_expr_nonnegative_warnv_p (op1,
14514 strict_overflow_p));
14516 case BIT_IOR_EXPR:
14517 case BIT_XOR_EXPR:
14518 case MIN_EXPR:
14519 case RDIV_EXPR:
14520 case TRUNC_DIV_EXPR:
14521 case CEIL_DIV_EXPR:
14522 case FLOOR_DIV_EXPR:
14523 case ROUND_DIV_EXPR:
14524 return (tree_expr_nonnegative_warnv_p (op0,
14525 strict_overflow_p)
14526 && tree_expr_nonnegative_warnv_p (op1,
14527 strict_overflow_p));
14529 case TRUNC_MOD_EXPR:
14530 case CEIL_MOD_EXPR:
14531 case FLOOR_MOD_EXPR:
14532 case ROUND_MOD_EXPR:
14533 return tree_expr_nonnegative_warnv_p (op0,
14534 strict_overflow_p);
14535 default:
14536 return tree_simple_nonnegative_warnv_p (code, type);
14539 /* We don't know sign of `t', so be conservative and return false. */
14540 return false;
14543 /* Return true if T is known to be non-negative. If the return
14544 value is based on the assumption that signed overflow is undefined,
14545 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14546 *STRICT_OVERFLOW_P. */
14548 bool
14549 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14551 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14552 return true;
14554 switch (TREE_CODE (t))
14556 case INTEGER_CST:
14557 return tree_int_cst_sgn (t) >= 0;
14559 case REAL_CST:
14560 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14562 case FIXED_CST:
14563 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14565 case COND_EXPR:
14566 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14567 strict_overflow_p)
14568 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14569 strict_overflow_p));
14570 default:
14571 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14572 TREE_TYPE (t));
14574 /* We don't know sign of `t', so be conservative and return false. */
14575 return false;
14578 /* Return true if T is known to be non-negative. If the return
14579 value is based on the assumption that signed overflow is undefined,
14580 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14581 *STRICT_OVERFLOW_P. */
14583 bool
14584 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14585 tree arg0, tree arg1, bool *strict_overflow_p)
14587 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14588 switch (DECL_FUNCTION_CODE (fndecl))
14590 CASE_FLT_FN (BUILT_IN_ACOS):
14591 CASE_FLT_FN (BUILT_IN_ACOSH):
14592 CASE_FLT_FN (BUILT_IN_CABS):
14593 CASE_FLT_FN (BUILT_IN_COSH):
14594 CASE_FLT_FN (BUILT_IN_ERFC):
14595 CASE_FLT_FN (BUILT_IN_EXP):
14596 CASE_FLT_FN (BUILT_IN_EXP10):
14597 CASE_FLT_FN (BUILT_IN_EXP2):
14598 CASE_FLT_FN (BUILT_IN_FABS):
14599 CASE_FLT_FN (BUILT_IN_FDIM):
14600 CASE_FLT_FN (BUILT_IN_HYPOT):
14601 CASE_FLT_FN (BUILT_IN_POW10):
14602 CASE_INT_FN (BUILT_IN_FFS):
14603 CASE_INT_FN (BUILT_IN_PARITY):
14604 CASE_INT_FN (BUILT_IN_POPCOUNT):
14605 case BUILT_IN_BSWAP32:
14606 case BUILT_IN_BSWAP64:
14607 /* Always true. */
14608 return true;
14610 CASE_FLT_FN (BUILT_IN_SQRT):
14611 /* sqrt(-0.0) is -0.0. */
14612 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14613 return true;
14614 return tree_expr_nonnegative_warnv_p (arg0,
14615 strict_overflow_p);
14617 CASE_FLT_FN (BUILT_IN_ASINH):
14618 CASE_FLT_FN (BUILT_IN_ATAN):
14619 CASE_FLT_FN (BUILT_IN_ATANH):
14620 CASE_FLT_FN (BUILT_IN_CBRT):
14621 CASE_FLT_FN (BUILT_IN_CEIL):
14622 CASE_FLT_FN (BUILT_IN_ERF):
14623 CASE_FLT_FN (BUILT_IN_EXPM1):
14624 CASE_FLT_FN (BUILT_IN_FLOOR):
14625 CASE_FLT_FN (BUILT_IN_FMOD):
14626 CASE_FLT_FN (BUILT_IN_FREXP):
14627 CASE_FLT_FN (BUILT_IN_LCEIL):
14628 CASE_FLT_FN (BUILT_IN_LDEXP):
14629 CASE_FLT_FN (BUILT_IN_LFLOOR):
14630 CASE_FLT_FN (BUILT_IN_LLCEIL):
14631 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14632 CASE_FLT_FN (BUILT_IN_LLRINT):
14633 CASE_FLT_FN (BUILT_IN_LLROUND):
14634 CASE_FLT_FN (BUILT_IN_LRINT):
14635 CASE_FLT_FN (BUILT_IN_LROUND):
14636 CASE_FLT_FN (BUILT_IN_MODF):
14637 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14638 CASE_FLT_FN (BUILT_IN_RINT):
14639 CASE_FLT_FN (BUILT_IN_ROUND):
14640 CASE_FLT_FN (BUILT_IN_SCALB):
14641 CASE_FLT_FN (BUILT_IN_SCALBLN):
14642 CASE_FLT_FN (BUILT_IN_SCALBN):
14643 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14644 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14645 CASE_FLT_FN (BUILT_IN_SINH):
14646 CASE_FLT_FN (BUILT_IN_TANH):
14647 CASE_FLT_FN (BUILT_IN_TRUNC):
14648 /* True if the 1st argument is nonnegative. */
14649 return tree_expr_nonnegative_warnv_p (arg0,
14650 strict_overflow_p);
14652 CASE_FLT_FN (BUILT_IN_FMAX):
14653 /* True if the 1st OR 2nd arguments are nonnegative. */
14654 return (tree_expr_nonnegative_warnv_p (arg0,
14655 strict_overflow_p)
14656 || (tree_expr_nonnegative_warnv_p (arg1,
14657 strict_overflow_p)));
14659 CASE_FLT_FN (BUILT_IN_FMIN):
14660 /* True if the 1st AND 2nd arguments are nonnegative. */
14661 return (tree_expr_nonnegative_warnv_p (arg0,
14662 strict_overflow_p)
14663 && (tree_expr_nonnegative_warnv_p (arg1,
14664 strict_overflow_p)));
14666 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14667 /* True if the 2nd argument is nonnegative. */
14668 return tree_expr_nonnegative_warnv_p (arg1,
14669 strict_overflow_p);
14671 CASE_FLT_FN (BUILT_IN_POWI):
14672 /* True if the 1st argument is nonnegative or the second
14673 argument is an even integer. */
14674 if (TREE_CODE (arg1) == INTEGER_CST
14675 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14676 return true;
14677 return tree_expr_nonnegative_warnv_p (arg0,
14678 strict_overflow_p);
14680 CASE_FLT_FN (BUILT_IN_POW):
14681 /* True if the 1st argument is nonnegative or the second
14682 argument is an even integer valued real. */
14683 if (TREE_CODE (arg1) == REAL_CST)
14685 REAL_VALUE_TYPE c;
14686 HOST_WIDE_INT n;
14688 c = TREE_REAL_CST (arg1);
14689 n = real_to_integer (&c);
14690 if ((n & 1) == 0)
14692 REAL_VALUE_TYPE cint;
14693 real_from_integer (&cint, VOIDmode, n,
14694 n < 0 ? -1 : 0, 0);
14695 if (real_identical (&c, &cint))
14696 return true;
14699 return tree_expr_nonnegative_warnv_p (arg0,
14700 strict_overflow_p);
14702 default:
14703 break;
14705 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14706 type);
14709 /* Return true if T is known to be non-negative. If the return
14710 value is based on the assumption that signed overflow is undefined,
14711 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14712 *STRICT_OVERFLOW_P. */
14714 bool
14715 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14717 enum tree_code code = TREE_CODE (t);
14718 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14719 return true;
14721 switch (code)
14723 case TARGET_EXPR:
14725 tree temp = TARGET_EXPR_SLOT (t);
14726 t = TARGET_EXPR_INITIAL (t);
14728 /* If the initializer is non-void, then it's a normal expression
14729 that will be assigned to the slot. */
14730 if (!VOID_TYPE_P (t))
14731 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14733 /* Otherwise, the initializer sets the slot in some way. One common
14734 way is an assignment statement at the end of the initializer. */
14735 while (1)
14737 if (TREE_CODE (t) == BIND_EXPR)
14738 t = expr_last (BIND_EXPR_BODY (t));
14739 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14740 || TREE_CODE (t) == TRY_CATCH_EXPR)
14741 t = expr_last (TREE_OPERAND (t, 0));
14742 else if (TREE_CODE (t) == STATEMENT_LIST)
14743 t = expr_last (t);
14744 else
14745 break;
14747 if (TREE_CODE (t) == MODIFY_EXPR
14748 && TREE_OPERAND (t, 0) == temp)
14749 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14750 strict_overflow_p);
14752 return false;
14755 case CALL_EXPR:
14757 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14758 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14760 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14761 get_callee_fndecl (t),
14762 arg0,
14763 arg1,
14764 strict_overflow_p);
14766 case COMPOUND_EXPR:
14767 case MODIFY_EXPR:
14768 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14769 strict_overflow_p);
14770 case BIND_EXPR:
14771 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14772 strict_overflow_p);
14773 case SAVE_EXPR:
14774 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14775 strict_overflow_p);
14777 default:
14778 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14779 TREE_TYPE (t));
14782 /* We don't know sign of `t', so be conservative and return false. */
14783 return false;
14786 /* Return true if T is known to be non-negative. If the return
14787 value is based on the assumption that signed overflow is undefined,
14788 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14789 *STRICT_OVERFLOW_P. */
14791 bool
14792 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14794 enum tree_code code;
14795 if (t == error_mark_node)
14796 return false;
14798 code = TREE_CODE (t);
14799 switch (TREE_CODE_CLASS (code))
14801 case tcc_binary:
14802 case tcc_comparison:
14803 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14804 TREE_TYPE (t),
14805 TREE_OPERAND (t, 0),
14806 TREE_OPERAND (t, 1),
14807 strict_overflow_p);
14809 case tcc_unary:
14810 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14811 TREE_TYPE (t),
14812 TREE_OPERAND (t, 0),
14813 strict_overflow_p);
14815 case tcc_constant:
14816 case tcc_declaration:
14817 case tcc_reference:
14818 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14820 default:
14821 break;
14824 switch (code)
14826 case TRUTH_AND_EXPR:
14827 case TRUTH_OR_EXPR:
14828 case TRUTH_XOR_EXPR:
14829 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14830 TREE_TYPE (t),
14831 TREE_OPERAND (t, 0),
14832 TREE_OPERAND (t, 1),
14833 strict_overflow_p);
14834 case TRUTH_NOT_EXPR:
14835 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14836 TREE_TYPE (t),
14837 TREE_OPERAND (t, 0),
14838 strict_overflow_p);
14840 case COND_EXPR:
14841 case CONSTRUCTOR:
14842 case OBJ_TYPE_REF:
14843 case ASSERT_EXPR:
14844 case ADDR_EXPR:
14845 case WITH_SIZE_EXPR:
14846 case EXC_PTR_EXPR:
14847 case SSA_NAME:
14848 case FILTER_EXPR:
14849 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14851 default:
14852 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14856 /* Return true if `t' is known to be non-negative. Handle warnings
14857 about undefined signed overflow. */
14859 bool
14860 tree_expr_nonnegative_p (tree t)
14862 bool ret, strict_overflow_p;
14864 strict_overflow_p = false;
14865 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14866 if (strict_overflow_p)
14867 fold_overflow_warning (("assuming signed overflow does not occur when "
14868 "determining that expression is always "
14869 "non-negative"),
14870 WARN_STRICT_OVERFLOW_MISC);
14871 return ret;
14875 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14876 For floating point we further ensure that T is not denormal.
14877 Similar logic is present in nonzero_address in rtlanal.h.
14879 If the return value is based on the assumption that signed overflow
14880 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14881 change *STRICT_OVERFLOW_P. */
14883 bool
14884 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14885 bool *strict_overflow_p)
14887 switch (code)
14889 case ABS_EXPR:
14890 return tree_expr_nonzero_warnv_p (op0,
14891 strict_overflow_p);
14893 case NOP_EXPR:
14895 tree inner_type = TREE_TYPE (op0);
14896 tree outer_type = type;
14898 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14899 && tree_expr_nonzero_warnv_p (op0,
14900 strict_overflow_p));
14902 break;
14904 case NON_LVALUE_EXPR:
14905 return tree_expr_nonzero_warnv_p (op0,
14906 strict_overflow_p);
14908 default:
14909 break;
14912 return false;
14915 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14916 For floating point we further ensure that T is not denormal.
14917 Similar logic is present in nonzero_address in rtlanal.h.
14919 If the return value is based on the assumption that signed overflow
14920 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14921 change *STRICT_OVERFLOW_P. */
14923 bool
14924 tree_binary_nonzero_warnv_p (enum tree_code code,
14925 tree type ATTRIBUTE_UNUSED,
14926 tree op0,
14927 tree op1, bool *strict_overflow_p)
14929 bool sub_strict_overflow_p;
14930 switch (code)
14932 case POINTER_PLUS_EXPR:
14933 case PLUS_EXPR:
14934 if (TYPE_OVERFLOW_UNDEFINED (type))
14936 /* With the presence of negative values it is hard
14937 to say something. */
14938 sub_strict_overflow_p = false;
14939 if (!tree_expr_nonnegative_warnv_p (op0,
14940 &sub_strict_overflow_p)
14941 || !tree_expr_nonnegative_warnv_p (op1,
14942 &sub_strict_overflow_p))
14943 return false;
14944 /* One of operands must be positive and the other non-negative. */
14945 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14946 overflows, on a twos-complement machine the sum of two
14947 nonnegative numbers can never be zero. */
14948 return (tree_expr_nonzero_warnv_p (op0,
14949 strict_overflow_p)
14950 || tree_expr_nonzero_warnv_p (op1,
14951 strict_overflow_p));
14953 break;
14955 case MULT_EXPR:
14956 if (TYPE_OVERFLOW_UNDEFINED (type))
14958 if (tree_expr_nonzero_warnv_p (op0,
14959 strict_overflow_p)
14960 && tree_expr_nonzero_warnv_p (op1,
14961 strict_overflow_p))
14963 *strict_overflow_p = true;
14964 return true;
14967 break;
14969 case MIN_EXPR:
14970 sub_strict_overflow_p = false;
14971 if (tree_expr_nonzero_warnv_p (op0,
14972 &sub_strict_overflow_p)
14973 && tree_expr_nonzero_warnv_p (op1,
14974 &sub_strict_overflow_p))
14976 if (sub_strict_overflow_p)
14977 *strict_overflow_p = true;
14979 break;
14981 case MAX_EXPR:
14982 sub_strict_overflow_p = false;
14983 if (tree_expr_nonzero_warnv_p (op0,
14984 &sub_strict_overflow_p))
14986 if (sub_strict_overflow_p)
14987 *strict_overflow_p = true;
14989 /* When both operands are nonzero, then MAX must be too. */
14990 if (tree_expr_nonzero_warnv_p (op1,
14991 strict_overflow_p))
14992 return true;
14994 /* MAX where operand 0 is positive is positive. */
14995 return tree_expr_nonnegative_warnv_p (op0,
14996 strict_overflow_p);
14998 /* MAX where operand 1 is positive is positive. */
14999 else if (tree_expr_nonzero_warnv_p (op1,
15000 &sub_strict_overflow_p)
15001 && tree_expr_nonnegative_warnv_p (op1,
15002 &sub_strict_overflow_p))
15004 if (sub_strict_overflow_p)
15005 *strict_overflow_p = true;
15006 return true;
15008 break;
15010 case BIT_IOR_EXPR:
15011 return (tree_expr_nonzero_warnv_p (op1,
15012 strict_overflow_p)
15013 || tree_expr_nonzero_warnv_p (op0,
15014 strict_overflow_p));
15016 default:
15017 break;
15020 return false;
15023 /* Return true when T is an address and is known to be nonzero.
15024 For floating point we further ensure that T is not denormal.
15025 Similar logic is present in nonzero_address in rtlanal.h.
15027 If the return value is based on the assumption that signed overflow
15028 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15029 change *STRICT_OVERFLOW_P. */
15031 bool
15032 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15034 bool sub_strict_overflow_p;
15035 switch (TREE_CODE (t))
15037 case INTEGER_CST:
15038 return !integer_zerop (t);
15040 case ADDR_EXPR:
15042 tree base = get_base_address (TREE_OPERAND (t, 0));
15044 if (!base)
15045 return false;
15047 /* Weak declarations may link to NULL. */
15048 if (VAR_OR_FUNCTION_DECL_P (base))
15049 return !DECL_WEAK (base);
15051 /* Constants are never weak. */
15052 if (CONSTANT_CLASS_P (base))
15053 return true;
15055 return false;
15058 case COND_EXPR:
15059 sub_strict_overflow_p = false;
15060 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15061 &sub_strict_overflow_p)
15062 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15063 &sub_strict_overflow_p))
15065 if (sub_strict_overflow_p)
15066 *strict_overflow_p = true;
15067 return true;
15069 break;
15071 default:
15072 break;
15074 return false;
15077 /* Return true when T is an address and is known to be nonzero.
15078 For floating point we further ensure that T is not denormal.
15079 Similar logic is present in nonzero_address in rtlanal.h.
15081 If the return value is based on the assumption that signed overflow
15082 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15083 change *STRICT_OVERFLOW_P. */
15085 bool
15086 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15088 tree type = TREE_TYPE (t);
15089 enum tree_code code;
15091 /* Doing something useful for floating point would need more work. */
15092 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15093 return false;
15095 code = TREE_CODE (t);
15096 switch (TREE_CODE_CLASS (code))
15098 case tcc_unary:
15099 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15100 strict_overflow_p);
15101 case tcc_binary:
15102 case tcc_comparison:
15103 return tree_binary_nonzero_warnv_p (code, type,
15104 TREE_OPERAND (t, 0),
15105 TREE_OPERAND (t, 1),
15106 strict_overflow_p);
15107 case tcc_constant:
15108 case tcc_declaration:
15109 case tcc_reference:
15110 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15112 default:
15113 break;
15116 switch (code)
15118 case TRUTH_NOT_EXPR:
15119 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15120 strict_overflow_p);
15122 case TRUTH_AND_EXPR:
15123 case TRUTH_OR_EXPR:
15124 case TRUTH_XOR_EXPR:
15125 return tree_binary_nonzero_warnv_p (code, type,
15126 TREE_OPERAND (t, 0),
15127 TREE_OPERAND (t, 1),
15128 strict_overflow_p);
15130 case COND_EXPR:
15131 case CONSTRUCTOR:
15132 case OBJ_TYPE_REF:
15133 case ASSERT_EXPR:
15134 case ADDR_EXPR:
15135 case WITH_SIZE_EXPR:
15136 case EXC_PTR_EXPR:
15137 case SSA_NAME:
15138 case FILTER_EXPR:
15139 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15141 case COMPOUND_EXPR:
15142 case MODIFY_EXPR:
15143 case BIND_EXPR:
15144 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15145 strict_overflow_p);
15147 case SAVE_EXPR:
15148 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15149 strict_overflow_p);
15151 case CALL_EXPR:
15152 return alloca_call_p (t);
15154 default:
15155 break;
15157 return false;
15160 /* Return true when T is an address and is known to be nonzero.
15161 Handle warnings about undefined signed overflow. */
15163 bool
15164 tree_expr_nonzero_p (tree t)
15166 bool ret, strict_overflow_p;
15168 strict_overflow_p = false;
15169 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15170 if (strict_overflow_p)
15171 fold_overflow_warning (("assuming signed overflow does not occur when "
15172 "determining that expression is always "
15173 "non-zero"),
15174 WARN_STRICT_OVERFLOW_MISC);
15175 return ret;
15178 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15179 attempt to fold the expression to a constant without modifying TYPE,
15180 OP0 or OP1.
15182 If the expression could be simplified to a constant, then return
15183 the constant. If the expression would not be simplified to a
15184 constant, then return NULL_TREE. */
15186 tree
15187 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15189 tree tem = fold_binary (code, type, op0, op1);
15190 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15193 /* Given the components of a unary expression CODE, TYPE and OP0,
15194 attempt to fold the expression to a constant without modifying
15195 TYPE or OP0.
15197 If the expression could be simplified to a constant, then return
15198 the constant. If the expression would not be simplified to a
15199 constant, then return NULL_TREE. */
15201 tree
15202 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15204 tree tem = fold_unary (code, type, op0);
15205 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15208 /* If EXP represents referencing an element in a constant string
15209 (either via pointer arithmetic or array indexing), return the
15210 tree representing the value accessed, otherwise return NULL. */
15212 tree
15213 fold_read_from_constant_string (tree exp)
15215 if ((TREE_CODE (exp) == INDIRECT_REF
15216 || TREE_CODE (exp) == ARRAY_REF)
15217 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15219 tree exp1 = TREE_OPERAND (exp, 0);
15220 tree index;
15221 tree string;
15223 if (TREE_CODE (exp) == INDIRECT_REF)
15224 string = string_constant (exp1, &index);
15225 else
15227 tree low_bound = array_ref_low_bound (exp);
15228 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15230 /* Optimize the special-case of a zero lower bound.
15232 We convert the low_bound to sizetype to avoid some problems
15233 with constant folding. (E.g. suppose the lower bound is 1,
15234 and its mode is QI. Without the conversion,l (ARRAY
15235 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15236 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15237 if (! integer_zerop (low_bound))
15238 index = size_diffop (index, fold_convert (sizetype, low_bound));
15240 string = exp1;
15243 if (string
15244 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15245 && TREE_CODE (string) == STRING_CST
15246 && TREE_CODE (index) == INTEGER_CST
15247 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15248 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15249 == MODE_INT)
15250 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15251 return build_int_cst_type (TREE_TYPE (exp),
15252 (TREE_STRING_POINTER (string)
15253 [TREE_INT_CST_LOW (index)]));
15255 return NULL;
15258 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15259 an integer constant, real, or fixed-point constant.
15261 TYPE is the type of the result. */
15263 static tree
15264 fold_negate_const (tree arg0, tree type)
15266 tree t = NULL_TREE;
15268 switch (TREE_CODE (arg0))
15270 case INTEGER_CST:
15272 unsigned HOST_WIDE_INT low;
15273 HOST_WIDE_INT high;
15274 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15275 TREE_INT_CST_HIGH (arg0),
15276 &low, &high);
15277 t = force_fit_type_double (type, low, high, 1,
15278 (overflow | TREE_OVERFLOW (arg0))
15279 && !TYPE_UNSIGNED (type));
15280 break;
15283 case REAL_CST:
15284 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15285 break;
15287 case FIXED_CST:
15289 FIXED_VALUE_TYPE f;
15290 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15291 &(TREE_FIXED_CST (arg0)), NULL,
15292 TYPE_SATURATING (type));
15293 t = build_fixed (type, f);
15294 /* Propagate overflow flags. */
15295 if (overflow_p | TREE_OVERFLOW (arg0))
15297 TREE_OVERFLOW (t) = 1;
15298 TREE_CONSTANT_OVERFLOW (t) = 1;
15300 else if (TREE_CONSTANT_OVERFLOW (arg0))
15301 TREE_CONSTANT_OVERFLOW (t) = 1;
15302 break;
15305 default:
15306 gcc_unreachable ();
15309 return t;
15312 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15313 an integer constant or real constant.
15315 TYPE is the type of the result. */
15317 tree
15318 fold_abs_const (tree arg0, tree type)
15320 tree t = NULL_TREE;
15322 switch (TREE_CODE (arg0))
15324 case INTEGER_CST:
15325 /* If the value is unsigned, then the absolute value is
15326 the same as the ordinary value. */
15327 if (TYPE_UNSIGNED (type))
15328 t = arg0;
15329 /* Similarly, if the value is non-negative. */
15330 else if (INT_CST_LT (integer_minus_one_node, arg0))
15331 t = arg0;
15332 /* If the value is negative, then the absolute value is
15333 its negation. */
15334 else
15336 unsigned HOST_WIDE_INT low;
15337 HOST_WIDE_INT high;
15338 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15339 TREE_INT_CST_HIGH (arg0),
15340 &low, &high);
15341 t = force_fit_type_double (type, low, high, -1,
15342 overflow | TREE_OVERFLOW (arg0));
15344 break;
15346 case REAL_CST:
15347 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15348 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15349 else
15350 t = arg0;
15351 break;
15353 default:
15354 gcc_unreachable ();
15357 return t;
15360 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15361 constant. TYPE is the type of the result. */
15363 static tree
15364 fold_not_const (tree arg0, tree type)
15366 tree t = NULL_TREE;
15368 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15370 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15371 ~TREE_INT_CST_HIGH (arg0), 0,
15372 TREE_OVERFLOW (arg0));
15374 return t;
15377 /* Given CODE, a relational operator, the target type, TYPE and two
15378 constant operands OP0 and OP1, return the result of the
15379 relational operation. If the result is not a compile time
15380 constant, then return NULL_TREE. */
15382 static tree
15383 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15385 int result, invert;
15387 /* From here on, the only cases we handle are when the result is
15388 known to be a constant. */
15390 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15392 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15393 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15395 /* Handle the cases where either operand is a NaN. */
15396 if (real_isnan (c0) || real_isnan (c1))
15398 switch (code)
15400 case EQ_EXPR:
15401 case ORDERED_EXPR:
15402 result = 0;
15403 break;
15405 case NE_EXPR:
15406 case UNORDERED_EXPR:
15407 case UNLT_EXPR:
15408 case UNLE_EXPR:
15409 case UNGT_EXPR:
15410 case UNGE_EXPR:
15411 case UNEQ_EXPR:
15412 result = 1;
15413 break;
15415 case LT_EXPR:
15416 case LE_EXPR:
15417 case GT_EXPR:
15418 case GE_EXPR:
15419 case LTGT_EXPR:
15420 if (flag_trapping_math)
15421 return NULL_TREE;
15422 result = 0;
15423 break;
15425 default:
15426 gcc_unreachable ();
15429 return constant_boolean_node (result, type);
15432 return constant_boolean_node (real_compare (code, c0, c1), type);
15435 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15437 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15438 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15439 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15442 /* Handle equality/inequality of complex constants. */
15443 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15445 tree rcond = fold_relational_const (code, type,
15446 TREE_REALPART (op0),
15447 TREE_REALPART (op1));
15448 tree icond = fold_relational_const (code, type,
15449 TREE_IMAGPART (op0),
15450 TREE_IMAGPART (op1));
15451 if (code == EQ_EXPR)
15452 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15453 else if (code == NE_EXPR)
15454 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15455 else
15456 return NULL_TREE;
15459 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15461 To compute GT, swap the arguments and do LT.
15462 To compute GE, do LT and invert the result.
15463 To compute LE, swap the arguments, do LT and invert the result.
15464 To compute NE, do EQ and invert the result.
15466 Therefore, the code below must handle only EQ and LT. */
15468 if (code == LE_EXPR || code == GT_EXPR)
15470 tree tem = op0;
15471 op0 = op1;
15472 op1 = tem;
15473 code = swap_tree_comparison (code);
15476 /* Note that it is safe to invert for real values here because we
15477 have already handled the one case that it matters. */
15479 invert = 0;
15480 if (code == NE_EXPR || code == GE_EXPR)
15482 invert = 1;
15483 code = invert_tree_comparison (code, false);
15486 /* Compute a result for LT or EQ if args permit;
15487 Otherwise return T. */
15488 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15490 if (code == EQ_EXPR)
15491 result = tree_int_cst_equal (op0, op1);
15492 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15493 result = INT_CST_LT_UNSIGNED (op0, op1);
15494 else
15495 result = INT_CST_LT (op0, op1);
15497 else
15498 return NULL_TREE;
15500 if (invert)
15501 result ^= 1;
15502 return constant_boolean_node (result, type);
15505 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15506 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15507 itself. */
15509 tree
15510 fold_build_cleanup_point_expr (tree type, tree expr)
15512 /* If the expression does not have side effects then we don't have to wrap
15513 it with a cleanup point expression. */
15514 if (!TREE_SIDE_EFFECTS (expr))
15515 return expr;
15517 /* If the expression is a return, check to see if the expression inside the
15518 return has no side effects or the right hand side of the modify expression
15519 inside the return. If either don't have side effects set we don't need to
15520 wrap the expression in a cleanup point expression. Note we don't check the
15521 left hand side of the modify because it should always be a return decl. */
15522 if (TREE_CODE (expr) == RETURN_EXPR)
15524 tree op = TREE_OPERAND (expr, 0);
15525 if (!op || !TREE_SIDE_EFFECTS (op))
15526 return expr;
15527 op = TREE_OPERAND (op, 1);
15528 if (!TREE_SIDE_EFFECTS (op))
15529 return expr;
15532 return build1 (CLEANUP_POINT_EXPR, type, expr);
15535 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15536 of an indirection through OP0, or NULL_TREE if no simplification is
15537 possible. */
15539 tree
15540 fold_indirect_ref_1 (tree type, tree op0)
15542 tree sub = op0;
15543 tree subtype;
15545 STRIP_NOPS (sub);
15546 subtype = TREE_TYPE (sub);
15547 if (!POINTER_TYPE_P (subtype))
15548 return NULL_TREE;
15550 if (TREE_CODE (sub) == ADDR_EXPR)
15552 tree op = TREE_OPERAND (sub, 0);
15553 tree optype = TREE_TYPE (op);
15554 /* *&CONST_DECL -> to the value of the const decl. */
15555 if (TREE_CODE (op) == CONST_DECL)
15556 return DECL_INITIAL (op);
15557 /* *&p => p; make sure to handle *&"str"[cst] here. */
15558 if (type == optype)
15560 tree fop = fold_read_from_constant_string (op);
15561 if (fop)
15562 return fop;
15563 else
15564 return op;
15566 /* *(foo *)&fooarray => fooarray[0] */
15567 else if (TREE_CODE (optype) == ARRAY_TYPE
15568 && type == TREE_TYPE (optype))
15570 tree type_domain = TYPE_DOMAIN (optype);
15571 tree min_val = size_zero_node;
15572 if (type_domain && TYPE_MIN_VALUE (type_domain))
15573 min_val = TYPE_MIN_VALUE (type_domain);
15574 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15576 /* *(foo *)&complexfoo => __real__ complexfoo */
15577 else if (TREE_CODE (optype) == COMPLEX_TYPE
15578 && type == TREE_TYPE (optype))
15579 return fold_build1 (REALPART_EXPR, type, op);
15580 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15581 else if (TREE_CODE (optype) == VECTOR_TYPE
15582 && type == TREE_TYPE (optype))
15584 tree part_width = TYPE_SIZE (type);
15585 tree index = bitsize_int (0);
15586 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15590 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15591 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15592 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15594 tree op00 = TREE_OPERAND (sub, 0);
15595 tree op01 = TREE_OPERAND (sub, 1);
15596 tree op00type;
15598 STRIP_NOPS (op00);
15599 op00type = TREE_TYPE (op00);
15600 if (TREE_CODE (op00) == ADDR_EXPR
15601 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15602 && type == TREE_TYPE (TREE_TYPE (op00type)))
15604 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15605 tree part_width = TYPE_SIZE (type);
15606 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15607 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15608 tree index = bitsize_int (indexi);
15610 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15611 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15612 part_width, index);
15618 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15619 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15620 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15622 tree op00 = TREE_OPERAND (sub, 0);
15623 tree op01 = TREE_OPERAND (sub, 1);
15624 tree op00type;
15626 STRIP_NOPS (op00);
15627 op00type = TREE_TYPE (op00);
15628 if (TREE_CODE (op00) == ADDR_EXPR
15629 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15630 && type == TREE_TYPE (TREE_TYPE (op00type)))
15632 tree size = TYPE_SIZE_UNIT (type);
15633 if (tree_int_cst_equal (size, op01))
15634 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15638 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15639 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15640 && type == TREE_TYPE (TREE_TYPE (subtype)))
15642 tree type_domain;
15643 tree min_val = size_zero_node;
15644 sub = build_fold_indirect_ref (sub);
15645 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15646 if (type_domain && TYPE_MIN_VALUE (type_domain))
15647 min_val = TYPE_MIN_VALUE (type_domain);
15648 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15651 return NULL_TREE;
15654 /* Builds an expression for an indirection through T, simplifying some
15655 cases. */
15657 tree
15658 build_fold_indirect_ref (tree t)
15660 tree type = TREE_TYPE (TREE_TYPE (t));
15661 tree sub = fold_indirect_ref_1 (type, t);
15663 if (sub)
15664 return sub;
15665 else
15666 return build1 (INDIRECT_REF, type, t);
15669 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15671 tree
15672 fold_indirect_ref (tree t)
15674 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15676 if (sub)
15677 return sub;
15678 else
15679 return t;
15682 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15683 whose result is ignored. The type of the returned tree need not be
15684 the same as the original expression. */
15686 tree
15687 fold_ignored_result (tree t)
15689 if (!TREE_SIDE_EFFECTS (t))
15690 return integer_zero_node;
15692 for (;;)
15693 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15695 case tcc_unary:
15696 t = TREE_OPERAND (t, 0);
15697 break;
15699 case tcc_binary:
15700 case tcc_comparison:
15701 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15702 t = TREE_OPERAND (t, 0);
15703 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15704 t = TREE_OPERAND (t, 1);
15705 else
15706 return t;
15707 break;
15709 case tcc_expression:
15710 switch (TREE_CODE (t))
15712 case COMPOUND_EXPR:
15713 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15714 return t;
15715 t = TREE_OPERAND (t, 0);
15716 break;
15718 case COND_EXPR:
15719 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15720 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15721 return t;
15722 t = TREE_OPERAND (t, 0);
15723 break;
15725 default:
15726 return t;
15728 break;
15730 default:
15731 return t;
15735 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15736 This can only be applied to objects of a sizetype. */
15738 tree
15739 round_up (tree value, int divisor)
15741 tree div = NULL_TREE;
15743 gcc_assert (divisor > 0);
15744 if (divisor == 1)
15745 return value;
15747 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15748 have to do anything. Only do this when we are not given a const,
15749 because in that case, this check is more expensive than just
15750 doing it. */
15751 if (TREE_CODE (value) != INTEGER_CST)
15753 div = build_int_cst (TREE_TYPE (value), divisor);
15755 if (multiple_of_p (TREE_TYPE (value), value, div))
15756 return value;
15759 /* If divisor is a power of two, simplify this to bit manipulation. */
15760 if (divisor == (divisor & -divisor))
15762 if (TREE_CODE (value) == INTEGER_CST)
15764 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15765 unsigned HOST_WIDE_INT high;
15766 bool overflow_p;
15768 if ((low & (divisor - 1)) == 0)
15769 return value;
15771 overflow_p = TREE_OVERFLOW (value);
15772 high = TREE_INT_CST_HIGH (value);
15773 low &= ~(divisor - 1);
15774 low += divisor;
15775 if (low == 0)
15777 high++;
15778 if (high == 0)
15779 overflow_p = true;
15782 return force_fit_type_double (TREE_TYPE (value), low, high,
15783 -1, overflow_p);
15785 else
15787 tree t;
15789 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15790 value = size_binop (PLUS_EXPR, value, t);
15791 t = build_int_cst (TREE_TYPE (value), -divisor);
15792 value = size_binop (BIT_AND_EXPR, value, t);
15795 else
15797 if (!div)
15798 div = build_int_cst (TREE_TYPE (value), divisor);
15799 value = size_binop (CEIL_DIV_EXPR, value, div);
15800 value = size_binop (MULT_EXPR, value, div);
15803 return value;
15806 /* Likewise, but round down. */
15808 tree
15809 round_down (tree value, int divisor)
15811 tree div = NULL_TREE;
15813 gcc_assert (divisor > 0);
15814 if (divisor == 1)
15815 return value;
15817 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15818 have to do anything. Only do this when we are not given a const,
15819 because in that case, this check is more expensive than just
15820 doing it. */
15821 if (TREE_CODE (value) != INTEGER_CST)
15823 div = build_int_cst (TREE_TYPE (value), divisor);
15825 if (multiple_of_p (TREE_TYPE (value), value, div))
15826 return value;
15829 /* If divisor is a power of two, simplify this to bit manipulation. */
15830 if (divisor == (divisor & -divisor))
15832 tree t;
15834 t = build_int_cst (TREE_TYPE (value), -divisor);
15835 value = size_binop (BIT_AND_EXPR, value, t);
15837 else
15839 if (!div)
15840 div = build_int_cst (TREE_TYPE (value), divisor);
15841 value = size_binop (FLOOR_DIV_EXPR, value, div);
15842 value = size_binop (MULT_EXPR, value, div);
15845 return value;
15848 /* Returns the pointer to the base of the object addressed by EXP and
15849 extracts the information about the offset of the access, storing it
15850 to PBITPOS and POFFSET. */
15852 static tree
15853 split_address_to_core_and_offset (tree exp,
15854 HOST_WIDE_INT *pbitpos, tree *poffset)
15856 tree core;
15857 enum machine_mode mode;
15858 int unsignedp, volatilep;
15859 HOST_WIDE_INT bitsize;
15861 if (TREE_CODE (exp) == ADDR_EXPR)
15863 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15864 poffset, &mode, &unsignedp, &volatilep,
15865 false);
15866 core = fold_addr_expr (core);
15868 else
15870 core = exp;
15871 *pbitpos = 0;
15872 *poffset = NULL_TREE;
15875 return core;
15878 /* Returns true if addresses of E1 and E2 differ by a constant, false
15879 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15881 bool
15882 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15884 tree core1, core2;
15885 HOST_WIDE_INT bitpos1, bitpos2;
15886 tree toffset1, toffset2, tdiff, type;
15888 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15889 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15891 if (bitpos1 % BITS_PER_UNIT != 0
15892 || bitpos2 % BITS_PER_UNIT != 0
15893 || !operand_equal_p (core1, core2, 0))
15894 return false;
15896 if (toffset1 && toffset2)
15898 type = TREE_TYPE (toffset1);
15899 if (type != TREE_TYPE (toffset2))
15900 toffset2 = fold_convert (type, toffset2);
15902 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15903 if (!cst_and_fits_in_hwi (tdiff))
15904 return false;
15906 *diff = int_cst_value (tdiff);
15908 else if (toffset1 || toffset2)
15910 /* If only one of the offsets is non-constant, the difference cannot
15911 be a constant. */
15912 return false;
15914 else
15915 *diff = 0;
15917 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15918 return true;
15921 /* Simplify the floating point expression EXP when the sign of the
15922 result is not significant. Return NULL_TREE if no simplification
15923 is possible. */
15925 tree
15926 fold_strip_sign_ops (tree exp)
15928 tree arg0, arg1;
15930 switch (TREE_CODE (exp))
15932 case ABS_EXPR:
15933 case NEGATE_EXPR:
15934 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15935 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15937 case MULT_EXPR:
15938 case RDIV_EXPR:
15939 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15940 return NULL_TREE;
15941 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15942 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15943 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15944 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15945 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15946 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15947 break;
15949 case COMPOUND_EXPR:
15950 arg0 = TREE_OPERAND (exp, 0);
15951 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15952 if (arg1)
15953 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15954 break;
15956 case COND_EXPR:
15957 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15958 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15959 if (arg0 || arg1)
15960 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15961 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15962 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15963 break;
15965 case CALL_EXPR:
15967 const enum built_in_function fcode = builtin_mathfn_code (exp);
15968 switch (fcode)
15970 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15971 /* Strip copysign function call, return the 1st argument. */
15972 arg0 = CALL_EXPR_ARG (exp, 0);
15973 arg1 = CALL_EXPR_ARG (exp, 1);
15974 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15976 default:
15977 /* Strip sign ops from the argument of "odd" math functions. */
15978 if (negate_mathfn_p (fcode))
15980 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15981 if (arg0)
15982 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15984 break;
15987 break;
15989 default:
15990 break;
15992 return NULL_TREE;