re PR rtl-optimization/39110 (Revision 143939 breaks bootstrap on Linux/ia64)
[official-gcc.git] / gcc / fold-const.c
blobda4d50d5e9496e4931d239c7631362d0405f662e
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "toplev.h"
63 #include "intl.h"
64 #include "ggc.h"
65 #include "hashtab.h"
66 #include "langhooks.h"
67 #include "md5.h"
68 #include "gimple.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
71 otherwise. */
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
78 COMPCODE_FALSE = 0,
79 COMPCODE_LT = 1,
80 COMPCODE_EQ = 2,
81 COMPCODE_LE = 3,
82 COMPCODE_GT = 4,
83 COMPCODE_LTGT = 5,
84 COMPCODE_GE = 6,
85 COMPCODE_ORD = 7,
86 COMPCODE_UNORD = 8,
87 COMPCODE_UNLT = 9,
88 COMPCODE_UNEQ = 10,
89 COMPCODE_UNLE = 11,
90 COMPCODE_UNGT = 12,
91 COMPCODE_NE = 13,
92 COMPCODE_UNGE = 14,
93 COMPCODE_TRUE = 15
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
114 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
115 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
116 enum machine_mode *, int *, int *,
117 tree *, tree *);
118 static int all_ones_mask_p (const_tree, int);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
122 static tree range_predecessor (tree);
123 static tree range_successor (tree);
124 static tree make_range (tree, int *, tree *, tree *, bool *);
125 static tree build_range_check (tree, tree, int, tree, tree);
126 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 tree);
128 static tree fold_range_test (enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
133 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
134 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
135 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
151 addition.
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 sign. */
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 #define LOWPART(x) \
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
173 static void
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 static void
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 HOST_WIDE_INT *hi)
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
205 unsigned int prec;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = POINTER_SIZE;
211 else
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 else
226 h1 = 0;
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
246 h1 = -1;
248 else
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 h1 = -1;
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 *lv = l1;
259 *hv = h1;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
280 tree
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
283 bool overflowed)
285 int sign_extended_type;
286 bool overflow;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
298 if (overflowed
299 || overflowable < 0
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
307 return t;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 bool unsigned_p)
327 unsigned HOST_WIDE_INT l;
328 HOST_WIDE_INT h;
330 l = l1 + l2;
331 h = h1 + h2 + (l < l1);
333 *lv = l;
334 *hv = h;
336 if (unsigned_p)
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 else
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
351 if (l1 == 0)
353 *lv = 0;
354 *hv = - h1;
355 return (*hv & h1) < 0;
357 else
359 *lv = -l1;
360 *hv = ~h1;
361 return 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 bool unsigned_p)
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
381 int i, j, k;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
388 memset (prod, 0, sizeof prod);
390 for (i = 0; i < 4; i++)
392 carry = 0;
393 for (j = 0; j < 4; j++)
395 k = i + j;
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 carry += prod[k];
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
403 prod[i + 4] = carry;
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
409 /* Unsigned overflow is immediate. */
410 if (unsigned_p)
411 return (toplow | tophigh) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
415 if (h1 < 0)
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 if (h2 < 0)
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 void
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
439 unsigned HOST_WIDE_INT signmask;
441 if (count < 0)
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 return;
447 if (SHIFT_COUNT_TRUNCATED)
448 count %= prec;
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
454 *hv = 0;
455 *lv = 0;
457 else if (count >= HOST_BITS_PER_WIDE_INT)
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
460 *lv = 0;
462 else
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 *lv = l1 << count;
469 /* Sign extend all bits that are beyond the precision. */
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
483 else
485 *hv = signmask;
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 void
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 int arith)
502 unsigned HOST_WIDE_INT signmask;
504 signmask = (arith
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 : 0);
508 if (SHIFT_COUNT_TRUNCATED)
509 count %= prec;
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
515 *hv = 0;
516 *lv = 0;
518 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *hv = 0;
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
523 else
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 *lv = ((l1 >> count)
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count >= (HOST_WIDE_INT)prec)
534 *hv = signmask;
535 *lv = signmask;
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
544 else
546 *hv = signmask;
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 void
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
565 count %= prec;
566 if (count < 0)
567 count += prec;
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
571 *lv = s1l | s2l;
572 *hv = s1h | s2h;
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 void
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
587 count %= prec;
588 if (count < 0)
589 count += prec;
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
593 *lv = s1l | s2l;
594 *hv = s1h | s2h;
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 or EXACT_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT *hrem)
616 int quo_neg = 0;
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
619 int i, j;
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
626 int overflow = 0;
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
632 if (!uns)
634 if (hnum < 0)
636 quo_neg = ~ quo_neg;
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
640 overflow = 1;
642 if (hden < 0)
644 quo_neg = ~ quo_neg;
645 neg_double (lden, hden, &lden, &hden);
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
651 *hquo = *hrem = 0;
652 /* This unsigned division rounds toward zero. */
653 *lquo = lnum / lden;
654 goto finish_up;
657 if (hnum == 0)
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
660 *hquo = *lquo = 0;
661 *hrem = hnum;
662 *lrem = lnum;
663 goto finish_up;
666 memset (quo, 0, sizeof quo);
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
682 carry = work % lden;
685 else
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
694 if (den[i] != 0)
696 den_hi_sig = i;
697 break;
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale = BASE / (den[den_hi_sig] + 1);
704 if (scale > 1)
705 { /* scale divisor and dividend */
706 carry = 0;
707 for (i = 0; i <= 4 - 1; i++)
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
714 num[4] = carry;
715 carry = 0;
716 for (i = 0; i <= 4 - 1; i++)
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
725 num_hi_sig = 4;
727 /* Main loop */
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
739 else
740 quo_est = BASE - 1;
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
744 if (tmp < BASE
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
747 quo_est--;
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
753 carry = 0;
754 for (j = 0; j <= den_hi_sig; j++)
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 quo_est--;
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
776 num [num_hi_sig] += carry;
779 /* Store the quotient digit. */
780 quo[i] = quo_est;
784 decode (quo, lquo, hquo);
786 finish_up:
787 /* If result is negative, make it so. */
788 if (quo_neg)
789 neg_double (*lquo, *hquo, lquo, hquo);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
796 switch (code)
798 case TRUNC_DIV_EXPR:
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 return overflow;
803 case FLOOR_DIV_EXPR:
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 /* quo = quo - 1; */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
809 lquo, hquo);
811 else
812 return overflow;
813 break;
815 case CEIL_DIV_EXPR:
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
824 break;
826 case ROUND_DIV_EXPR:
827 case ROUND_MOD_EXPR: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
834 /* Get absolute values. */
835 if (*hrem < 0)
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 if (hden < 0)
838 neg_double (lden, hden, &labs_den, &habs_den);
840 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, &ltwice, &htwice);
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den <= ltwice)))
850 if (*hquo < 0)
851 /* quo = quo - 1; */
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 else
855 /* quo = quo + 1; */
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
857 lquo, hquo);
859 else
860 return overflow;
862 break;
864 default:
865 gcc_unreachable ();
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 return overflow;
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
879 static tree
880 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
895 uns = false;
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
898 type);
900 else
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
908 return NULL_TREE;
910 return build_int_cst_wide (type, quol, quoh);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
920 used. */
922 static int fold_deferring_overflow_warnings;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
939 void
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
952 deferred code. */
954 void
955 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
957 const char *warnmsg;
958 location_t locus;
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
964 if (fold_deferred_overflow_warning != NULL
965 && code != 0
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
968 return;
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
974 if (!issue || warnmsg == NULL)
975 return;
977 if (gimple_no_warning_p (stmt))
978 return;
980 /* Use the smallest code level when deciding to issue the
981 warning. */
982 if (code == 0 || code > (int) fold_deferred_overflow_code)
983 code = fold_deferred_overflow_code;
985 if (!issue_strict_overflow_warning (code))
986 return;
988 if (stmt == NULL)
989 locus = input_location;
990 else
991 locus = gimple_location (stmt);
992 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
995 /* Stop deferring overflow warnings, ignoring any deferred
996 warnings. */
998 void
999 fold_undefer_and_ignore_overflow_warnings (void)
1001 fold_undefer_overflow_warnings (false, NULL, 0);
1004 /* Whether we are deferring overflow warnings. */
1006 bool
1007 fold_deferring_overflow_warnings_p (void)
1009 return fold_deferring_overflow_warnings > 0;
1012 /* This is called when we fold something based on the fact that signed
1013 overflow is undefined. */
1015 static void
1016 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1018 if (fold_deferring_overflow_warnings > 0)
1020 if (fold_deferred_overflow_warning == NULL
1021 || wc < fold_deferred_overflow_code)
1023 fold_deferred_overflow_warning = gmsgid;
1024 fold_deferred_overflow_code = wc;
1027 else if (issue_strict_overflow_warning (wc))
1028 warning (OPT_Wstrict_overflow, gmsgid);
1031 /* Return true if the built-in mathematical function specified by CODE
1032 is odd, i.e. -f(x) == f(-x). */
1034 static bool
1035 negate_mathfn_p (enum built_in_function code)
1037 switch (code)
1039 CASE_FLT_FN (BUILT_IN_ASIN):
1040 CASE_FLT_FN (BUILT_IN_ASINH):
1041 CASE_FLT_FN (BUILT_IN_ATAN):
1042 CASE_FLT_FN (BUILT_IN_ATANH):
1043 CASE_FLT_FN (BUILT_IN_CASIN):
1044 CASE_FLT_FN (BUILT_IN_CASINH):
1045 CASE_FLT_FN (BUILT_IN_CATAN):
1046 CASE_FLT_FN (BUILT_IN_CATANH):
1047 CASE_FLT_FN (BUILT_IN_CBRT):
1048 CASE_FLT_FN (BUILT_IN_CPROJ):
1049 CASE_FLT_FN (BUILT_IN_CSIN):
1050 CASE_FLT_FN (BUILT_IN_CSINH):
1051 CASE_FLT_FN (BUILT_IN_CTAN):
1052 CASE_FLT_FN (BUILT_IN_CTANH):
1053 CASE_FLT_FN (BUILT_IN_ERF):
1054 CASE_FLT_FN (BUILT_IN_LLROUND):
1055 CASE_FLT_FN (BUILT_IN_LROUND):
1056 CASE_FLT_FN (BUILT_IN_ROUND):
1057 CASE_FLT_FN (BUILT_IN_SIN):
1058 CASE_FLT_FN (BUILT_IN_SINH):
1059 CASE_FLT_FN (BUILT_IN_TAN):
1060 CASE_FLT_FN (BUILT_IN_TANH):
1061 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 return true;
1064 CASE_FLT_FN (BUILT_IN_LLRINT):
1065 CASE_FLT_FN (BUILT_IN_LRINT):
1066 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1067 CASE_FLT_FN (BUILT_IN_RINT):
1068 return !flag_rounding_math;
1070 default:
1071 break;
1073 return false;
1076 /* Check whether we may negate an integer constant T without causing
1077 overflow. */
1079 bool
1080 may_negate_without_overflow_p (const_tree t)
1082 unsigned HOST_WIDE_INT val;
1083 unsigned int prec;
1084 tree type;
1086 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1088 type = TREE_TYPE (t);
1089 if (TYPE_UNSIGNED (type))
1090 return false;
1092 prec = TYPE_PRECISION (type);
1093 if (prec > HOST_BITS_PER_WIDE_INT)
1095 if (TREE_INT_CST_LOW (t) != 0)
1096 return true;
1097 prec -= HOST_BITS_PER_WIDE_INT;
1098 val = TREE_INT_CST_HIGH (t);
1100 else
1101 val = TREE_INT_CST_LOW (t);
1102 if (prec < HOST_BITS_PER_WIDE_INT)
1103 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1104 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1107 /* Determine whether an expression T can be cheaply negated using
1108 the function negate_expr without introducing undefined overflow. */
1110 static bool
1111 negate_expr_p (tree t)
1113 tree type;
1115 if (t == 0)
1116 return false;
1118 type = TREE_TYPE (t);
1120 STRIP_SIGN_NOPS (t);
1121 switch (TREE_CODE (t))
1123 case INTEGER_CST:
1124 if (TYPE_OVERFLOW_WRAPS (type))
1125 return true;
1127 /* Check that -CST will not overflow type. */
1128 return may_negate_without_overflow_p (t);
1129 case BIT_NOT_EXPR:
1130 return (INTEGRAL_TYPE_P (type)
1131 && TYPE_OVERFLOW_WRAPS (type));
1133 case FIXED_CST:
1134 case REAL_CST:
1135 case NEGATE_EXPR:
1136 return true;
1138 case COMPLEX_CST:
1139 return negate_expr_p (TREE_REALPART (t))
1140 && negate_expr_p (TREE_IMAGPART (t));
1142 case COMPLEX_EXPR:
1143 return negate_expr_p (TREE_OPERAND (t, 0))
1144 && negate_expr_p (TREE_OPERAND (t, 1));
1146 case CONJ_EXPR:
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1149 case PLUS_EXPR:
1150 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1151 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1152 return false;
1153 /* -(A + B) -> (-B) - A. */
1154 if (negate_expr_p (TREE_OPERAND (t, 1))
1155 && reorder_operands_p (TREE_OPERAND (t, 0),
1156 TREE_OPERAND (t, 1)))
1157 return true;
1158 /* -(A + B) -> (-A) - B. */
1159 return negate_expr_p (TREE_OPERAND (t, 0));
1161 case MINUS_EXPR:
1162 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1163 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1164 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1165 && reorder_operands_p (TREE_OPERAND (t, 0),
1166 TREE_OPERAND (t, 1));
1168 case MULT_EXPR:
1169 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1170 break;
1172 /* Fall through. */
1174 case RDIV_EXPR:
1175 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1176 return negate_expr_p (TREE_OPERAND (t, 1))
1177 || negate_expr_p (TREE_OPERAND (t, 0));
1178 break;
1180 case TRUNC_DIV_EXPR:
1181 case ROUND_DIV_EXPR:
1182 case FLOOR_DIV_EXPR:
1183 case CEIL_DIV_EXPR:
1184 case EXACT_DIV_EXPR:
1185 /* In general we can't negate A / B, because if A is INT_MIN and
1186 B is 1, we may turn this into INT_MIN / -1 which is undefined
1187 and actually traps on some architectures. But if overflow is
1188 undefined, we can negate, because - (INT_MIN / 1) is an
1189 overflow. */
1190 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1191 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1192 break;
1193 return negate_expr_p (TREE_OPERAND (t, 1))
1194 || negate_expr_p (TREE_OPERAND (t, 0));
1196 case NOP_EXPR:
1197 /* Negate -((double)float) as (double)(-float). */
1198 if (TREE_CODE (type) == REAL_TYPE)
1200 tree tem = strip_float_extensions (t);
1201 if (tem != t)
1202 return negate_expr_p (tem);
1204 break;
1206 case CALL_EXPR:
1207 /* Negate -f(x) as f(-x). */
1208 if (negate_mathfn_p (builtin_mathfn_code (t)))
1209 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1210 break;
1212 case RSHIFT_EXPR:
1213 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1214 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1216 tree op1 = TREE_OPERAND (t, 1);
1217 if (TREE_INT_CST_HIGH (op1) == 0
1218 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1219 == TREE_INT_CST_LOW (op1))
1220 return true;
1222 break;
1224 default:
1225 break;
1227 return false;
1230 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1231 simplification is possible.
1232 If negate_expr_p would return true for T, NULL_TREE will never be
1233 returned. */
1235 static tree
1236 fold_negate_expr (tree t)
1238 tree type = TREE_TYPE (t);
1239 tree tem;
1241 switch (TREE_CODE (t))
1243 /* Convert - (~A) to A + 1. */
1244 case BIT_NOT_EXPR:
1245 if (INTEGRAL_TYPE_P (type))
1246 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1247 build_int_cst (type, 1));
1248 break;
1250 case INTEGER_CST:
1251 tem = fold_negate_const (t, type);
1252 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1253 || !TYPE_OVERFLOW_TRAPS (type))
1254 return tem;
1255 break;
1257 case REAL_CST:
1258 tem = fold_negate_const (t, type);
1259 /* Two's complement FP formats, such as c4x, may overflow. */
1260 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1261 return tem;
1262 break;
1264 case FIXED_CST:
1265 tem = fold_negate_const (t, type);
1266 return tem;
1268 case COMPLEX_CST:
1270 tree rpart = negate_expr (TREE_REALPART (t));
1271 tree ipart = negate_expr (TREE_IMAGPART (t));
1273 if ((TREE_CODE (rpart) == REAL_CST
1274 && TREE_CODE (ipart) == REAL_CST)
1275 || (TREE_CODE (rpart) == INTEGER_CST
1276 && TREE_CODE (ipart) == INTEGER_CST))
1277 return build_complex (type, rpart, ipart);
1279 break;
1281 case COMPLEX_EXPR:
1282 if (negate_expr_p (t))
1283 return fold_build2 (COMPLEX_EXPR, type,
1284 fold_negate_expr (TREE_OPERAND (t, 0)),
1285 fold_negate_expr (TREE_OPERAND (t, 1)));
1286 break;
1288 case CONJ_EXPR:
1289 if (negate_expr_p (t))
1290 return fold_build1 (CONJ_EXPR, type,
1291 fold_negate_expr (TREE_OPERAND (t, 0)));
1292 break;
1294 case NEGATE_EXPR:
1295 return TREE_OPERAND (t, 0);
1297 case PLUS_EXPR:
1298 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1299 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1301 /* -(A + B) -> (-B) - A. */
1302 if (negate_expr_p (TREE_OPERAND (t, 1))
1303 && reorder_operands_p (TREE_OPERAND (t, 0),
1304 TREE_OPERAND (t, 1)))
1306 tem = negate_expr (TREE_OPERAND (t, 1));
1307 return fold_build2 (MINUS_EXPR, type,
1308 tem, TREE_OPERAND (t, 0));
1311 /* -(A + B) -> (-A) - B. */
1312 if (negate_expr_p (TREE_OPERAND (t, 0)))
1314 tem = negate_expr (TREE_OPERAND (t, 0));
1315 return fold_build2 (MINUS_EXPR, type,
1316 tem, TREE_OPERAND (t, 1));
1319 break;
1321 case MINUS_EXPR:
1322 /* - (A - B) -> B - A */
1323 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1324 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1325 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1326 return fold_build2 (MINUS_EXPR, type,
1327 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1328 break;
1330 case MULT_EXPR:
1331 if (TYPE_UNSIGNED (type))
1332 break;
1334 /* Fall through. */
1336 case RDIV_EXPR:
1337 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1339 tem = TREE_OPERAND (t, 1);
1340 if (negate_expr_p (tem))
1341 return fold_build2 (TREE_CODE (t), type,
1342 TREE_OPERAND (t, 0), negate_expr (tem));
1343 tem = TREE_OPERAND (t, 0);
1344 if (negate_expr_p (tem))
1345 return fold_build2 (TREE_CODE (t), type,
1346 negate_expr (tem), TREE_OPERAND (t, 1));
1348 break;
1350 case TRUNC_DIV_EXPR:
1351 case ROUND_DIV_EXPR:
1352 case FLOOR_DIV_EXPR:
1353 case CEIL_DIV_EXPR:
1354 case EXACT_DIV_EXPR:
1355 /* In general we can't negate A / B, because if A is INT_MIN and
1356 B is 1, we may turn this into INT_MIN / -1 which is undefined
1357 and actually traps on some architectures. But if overflow is
1358 undefined, we can negate, because - (INT_MIN / 1) is an
1359 overflow. */
1360 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1362 const char * const warnmsg = G_("assuming signed overflow does not "
1363 "occur when negating a division");
1364 tem = TREE_OPERAND (t, 1);
1365 if (negate_expr_p (tem))
1367 if (INTEGRAL_TYPE_P (type)
1368 && (TREE_CODE (tem) != INTEGER_CST
1369 || integer_onep (tem)))
1370 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1371 return fold_build2 (TREE_CODE (t), type,
1372 TREE_OPERAND (t, 0), negate_expr (tem));
1374 tem = TREE_OPERAND (t, 0);
1375 if (negate_expr_p (tem))
1377 if (INTEGRAL_TYPE_P (type)
1378 && (TREE_CODE (tem) != INTEGER_CST
1379 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1380 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1381 return fold_build2 (TREE_CODE (t), type,
1382 negate_expr (tem), TREE_OPERAND (t, 1));
1385 break;
1387 case NOP_EXPR:
1388 /* Convert -((double)float) into (double)(-float). */
1389 if (TREE_CODE (type) == REAL_TYPE)
1391 tem = strip_float_extensions (t);
1392 if (tem != t && negate_expr_p (tem))
1393 return fold_convert (type, negate_expr (tem));
1395 break;
1397 case CALL_EXPR:
1398 /* Negate -f(x) as f(-x). */
1399 if (negate_mathfn_p (builtin_mathfn_code (t))
1400 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1402 tree fndecl, arg;
1404 fndecl = get_callee_fndecl (t);
1405 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1406 return build_call_expr (fndecl, 1, arg);
1408 break;
1410 case RSHIFT_EXPR:
1411 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1412 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1414 tree op1 = TREE_OPERAND (t, 1);
1415 if (TREE_INT_CST_HIGH (op1) == 0
1416 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1417 == TREE_INT_CST_LOW (op1))
1419 tree ntype = TYPE_UNSIGNED (type)
1420 ? signed_type_for (type)
1421 : unsigned_type_for (type);
1422 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1423 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1424 return fold_convert (type, temp);
1427 break;
1429 default:
1430 break;
1433 return NULL_TREE;
1436 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1437 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1438 return NULL_TREE. */
1440 static tree
1441 negate_expr (tree t)
1443 tree type, tem;
1445 if (t == NULL_TREE)
1446 return NULL_TREE;
1448 type = TREE_TYPE (t);
1449 STRIP_SIGN_NOPS (t);
1451 tem = fold_negate_expr (t);
1452 if (!tem)
1453 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1454 return fold_convert (type, tem);
1457 /* Split a tree IN into a constant, literal and variable parts that could be
1458 combined with CODE to make IN. "constant" means an expression with
1459 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1460 commutative arithmetic operation. Store the constant part into *CONP,
1461 the literal in *LITP and return the variable part. If a part isn't
1462 present, set it to null. If the tree does not decompose in this way,
1463 return the entire tree as the variable part and the other parts as null.
1465 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1466 case, we negate an operand that was subtracted. Except if it is a
1467 literal for which we use *MINUS_LITP instead.
1469 If NEGATE_P is true, we are negating all of IN, again except a literal
1470 for which we use *MINUS_LITP instead.
1472 If IN is itself a literal or constant, return it as appropriate.
1474 Note that we do not guarantee that any of the three values will be the
1475 same type as IN, but they will have the same signedness and mode. */
1477 static tree
1478 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1479 tree *minus_litp, int negate_p)
1481 tree var = 0;
1483 *conp = 0;
1484 *litp = 0;
1485 *minus_litp = 0;
1487 /* Strip any conversions that don't change the machine mode or signedness. */
1488 STRIP_SIGN_NOPS (in);
1490 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1491 || TREE_CODE (in) == FIXED_CST)
1492 *litp = in;
1493 else if (TREE_CODE (in) == code
1494 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1495 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1496 /* We can associate addition and subtraction together (even
1497 though the C standard doesn't say so) for integers because
1498 the value is not affected. For reals, the value might be
1499 affected, so we can't. */
1500 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1501 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1503 tree op0 = TREE_OPERAND (in, 0);
1504 tree op1 = TREE_OPERAND (in, 1);
1505 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1506 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1508 /* First see if either of the operands is a literal, then a constant. */
1509 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1510 || TREE_CODE (op0) == FIXED_CST)
1511 *litp = op0, op0 = 0;
1512 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1513 || TREE_CODE (op1) == FIXED_CST)
1514 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1516 if (op0 != 0 && TREE_CONSTANT (op0))
1517 *conp = op0, op0 = 0;
1518 else if (op1 != 0 && TREE_CONSTANT (op1))
1519 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1521 /* If we haven't dealt with either operand, this is not a case we can
1522 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1523 if (op0 != 0 && op1 != 0)
1524 var = in;
1525 else if (op0 != 0)
1526 var = op0;
1527 else
1528 var = op1, neg_var_p = neg1_p;
1530 /* Now do any needed negations. */
1531 if (neg_litp_p)
1532 *minus_litp = *litp, *litp = 0;
1533 if (neg_conp_p)
1534 *conp = negate_expr (*conp);
1535 if (neg_var_p)
1536 var = negate_expr (var);
1538 else if (TREE_CONSTANT (in))
1539 *conp = in;
1540 else
1541 var = in;
1543 if (negate_p)
1545 if (*litp)
1546 *minus_litp = *litp, *litp = 0;
1547 else if (*minus_litp)
1548 *litp = *minus_litp, *minus_litp = 0;
1549 *conp = negate_expr (*conp);
1550 var = negate_expr (var);
1553 return var;
1556 /* Re-associate trees split by the above function. T1 and T2 are either
1557 expressions to associate or null. Return the new expression, if any. If
1558 we build an operation, do it in TYPE and with CODE. */
1560 static tree
1561 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1563 if (t1 == 0)
1564 return t2;
1565 else if (t2 == 0)
1566 return t1;
1568 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1569 try to fold this since we will have infinite recursion. But do
1570 deal with any NEGATE_EXPRs. */
1571 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1572 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1574 if (code == PLUS_EXPR)
1576 if (TREE_CODE (t1) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1578 fold_convert (type, TREE_OPERAND (t1, 0)));
1579 else if (TREE_CODE (t2) == NEGATE_EXPR)
1580 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1581 fold_convert (type, TREE_OPERAND (t2, 0)));
1582 else if (integer_zerop (t2))
1583 return fold_convert (type, t1);
1585 else if (code == MINUS_EXPR)
1587 if (integer_zerop (t2))
1588 return fold_convert (type, t1);
1591 return build2 (code, type, fold_convert (type, t1),
1592 fold_convert (type, t2));
1595 return fold_build2 (code, type, fold_convert (type, t1),
1596 fold_convert (type, t2));
1599 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1600 for use in int_const_binop, size_binop and size_diffop. */
1602 static bool
1603 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1605 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1606 return false;
1607 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1608 return false;
1610 switch (code)
1612 case LSHIFT_EXPR:
1613 case RSHIFT_EXPR:
1614 case LROTATE_EXPR:
1615 case RROTATE_EXPR:
1616 return true;
1618 default:
1619 break;
1622 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1623 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1624 && TYPE_MODE (type1) == TYPE_MODE (type2);
1628 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1629 to produce a new constant. Return NULL_TREE if we don't know how
1630 to evaluate CODE at compile-time.
1632 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1634 tree
1635 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1637 unsigned HOST_WIDE_INT int1l, int2l;
1638 HOST_WIDE_INT int1h, int2h;
1639 unsigned HOST_WIDE_INT low;
1640 HOST_WIDE_INT hi;
1641 unsigned HOST_WIDE_INT garbagel;
1642 HOST_WIDE_INT garbageh;
1643 tree t;
1644 tree type = TREE_TYPE (arg1);
1645 int uns = TYPE_UNSIGNED (type);
1646 int is_sizetype
1647 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1648 int overflow = 0;
1650 int1l = TREE_INT_CST_LOW (arg1);
1651 int1h = TREE_INT_CST_HIGH (arg1);
1652 int2l = TREE_INT_CST_LOW (arg2);
1653 int2h = TREE_INT_CST_HIGH (arg2);
1655 switch (code)
1657 case BIT_IOR_EXPR:
1658 low = int1l | int2l, hi = int1h | int2h;
1659 break;
1661 case BIT_XOR_EXPR:
1662 low = int1l ^ int2l, hi = int1h ^ int2h;
1663 break;
1665 case BIT_AND_EXPR:
1666 low = int1l & int2l, hi = int1h & int2h;
1667 break;
1669 case RSHIFT_EXPR:
1670 int2l = -int2l;
1671 case LSHIFT_EXPR:
1672 /* It's unclear from the C standard whether shifts can overflow.
1673 The following code ignores overflow; perhaps a C standard
1674 interpretation ruling is needed. */
1675 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1676 &low, &hi, !uns);
1677 break;
1679 case RROTATE_EXPR:
1680 int2l = - int2l;
1681 case LROTATE_EXPR:
1682 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1683 &low, &hi);
1684 break;
1686 case PLUS_EXPR:
1687 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1688 break;
1690 case MINUS_EXPR:
1691 neg_double (int2l, int2h, &low, &hi);
1692 add_double (int1l, int1h, low, hi, &low, &hi);
1693 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1694 break;
1696 case MULT_EXPR:
1697 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1698 break;
1700 case TRUNC_DIV_EXPR:
1701 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1702 case EXACT_DIV_EXPR:
1703 /* This is a shortcut for a common special case. */
1704 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1705 && !TREE_OVERFLOW (arg1)
1706 && !TREE_OVERFLOW (arg2)
1707 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1709 if (code == CEIL_DIV_EXPR)
1710 int1l += int2l - 1;
1712 low = int1l / int2l, hi = 0;
1713 break;
1716 /* ... fall through ... */
1718 case ROUND_DIV_EXPR:
1719 if (int2h == 0 && int2l == 0)
1720 return NULL_TREE;
1721 if (int2h == 0 && int2l == 1)
1723 low = int1l, hi = int1h;
1724 break;
1726 if (int1l == int2l && int1h == int2h
1727 && ! (int1l == 0 && int1h == 0))
1729 low = 1, hi = 0;
1730 break;
1732 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1733 &low, &hi, &garbagel, &garbageh);
1734 break;
1736 case TRUNC_MOD_EXPR:
1737 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1738 /* This is a shortcut for a common special case. */
1739 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1740 && !TREE_OVERFLOW (arg1)
1741 && !TREE_OVERFLOW (arg2)
1742 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1744 if (code == CEIL_MOD_EXPR)
1745 int1l += int2l - 1;
1746 low = int1l % int2l, hi = 0;
1747 break;
1750 /* ... fall through ... */
1752 case ROUND_MOD_EXPR:
1753 if (int2h == 0 && int2l == 0)
1754 return NULL_TREE;
1755 overflow = div_and_round_double (code, uns,
1756 int1l, int1h, int2l, int2h,
1757 &garbagel, &garbageh, &low, &hi);
1758 break;
1760 case MIN_EXPR:
1761 case MAX_EXPR:
1762 if (uns)
1763 low = (((unsigned HOST_WIDE_INT) int1h
1764 < (unsigned HOST_WIDE_INT) int2h)
1765 || (((unsigned HOST_WIDE_INT) int1h
1766 == (unsigned HOST_WIDE_INT) int2h)
1767 && int1l < int2l));
1768 else
1769 low = (int1h < int2h
1770 || (int1h == int2h && int1l < int2l));
1772 if (low == (code == MIN_EXPR))
1773 low = int1l, hi = int1h;
1774 else
1775 low = int2l, hi = int2h;
1776 break;
1778 default:
1779 return NULL_TREE;
1782 if (notrunc)
1784 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1786 /* Propagate overflow flags ourselves. */
1787 if (((!uns || is_sizetype) && overflow)
1788 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1790 t = copy_node (t);
1791 TREE_OVERFLOW (t) = 1;
1794 else
1795 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1796 ((!uns || is_sizetype) && overflow)
1797 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1799 return t;
1802 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1803 constant. We assume ARG1 and ARG2 have the same data type, or at least
1804 are the same kind of constant and the same machine mode. Return zero if
1805 combining the constants is not allowed in the current operating mode.
1807 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1809 static tree
1810 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1812 /* Sanity check for the recursive cases. */
1813 if (!arg1 || !arg2)
1814 return NULL_TREE;
1816 STRIP_NOPS (arg1);
1817 STRIP_NOPS (arg2);
1819 if (TREE_CODE (arg1) == INTEGER_CST)
1820 return int_const_binop (code, arg1, arg2, notrunc);
1822 if (TREE_CODE (arg1) == REAL_CST)
1824 enum machine_mode mode;
1825 REAL_VALUE_TYPE d1;
1826 REAL_VALUE_TYPE d2;
1827 REAL_VALUE_TYPE value;
1828 REAL_VALUE_TYPE result;
1829 bool inexact;
1830 tree t, type;
1832 /* The following codes are handled by real_arithmetic. */
1833 switch (code)
1835 case PLUS_EXPR:
1836 case MINUS_EXPR:
1837 case MULT_EXPR:
1838 case RDIV_EXPR:
1839 case MIN_EXPR:
1840 case MAX_EXPR:
1841 break;
1843 default:
1844 return NULL_TREE;
1847 d1 = TREE_REAL_CST (arg1);
1848 d2 = TREE_REAL_CST (arg2);
1850 type = TREE_TYPE (arg1);
1851 mode = TYPE_MODE (type);
1853 /* Don't perform operation if we honor signaling NaNs and
1854 either operand is a NaN. */
1855 if (HONOR_SNANS (mode)
1856 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1857 return NULL_TREE;
1859 /* Don't perform operation if it would raise a division
1860 by zero exception. */
1861 if (code == RDIV_EXPR
1862 && REAL_VALUES_EQUAL (d2, dconst0)
1863 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1864 return NULL_TREE;
1866 /* If either operand is a NaN, just return it. Otherwise, set up
1867 for floating-point trap; we return an overflow. */
1868 if (REAL_VALUE_ISNAN (d1))
1869 return arg1;
1870 else if (REAL_VALUE_ISNAN (d2))
1871 return arg2;
1873 inexact = real_arithmetic (&value, code, &d1, &d2);
1874 real_convert (&result, mode, &value);
1876 /* Don't constant fold this floating point operation if
1877 the result has overflowed and flag_trapping_math. */
1878 if (flag_trapping_math
1879 && MODE_HAS_INFINITIES (mode)
1880 && REAL_VALUE_ISINF (result)
1881 && !REAL_VALUE_ISINF (d1)
1882 && !REAL_VALUE_ISINF (d2))
1883 return NULL_TREE;
1885 /* Don't constant fold this floating point operation if the
1886 result may dependent upon the run-time rounding mode and
1887 flag_rounding_math is set, or if GCC's software emulation
1888 is unable to accurately represent the result. */
1889 if ((flag_rounding_math
1890 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1891 && (inexact || !real_identical (&result, &value)))
1892 return NULL_TREE;
1894 t = build_real (type, result);
1896 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1897 return t;
1900 if (TREE_CODE (arg1) == FIXED_CST)
1902 FIXED_VALUE_TYPE f1;
1903 FIXED_VALUE_TYPE f2;
1904 FIXED_VALUE_TYPE result;
1905 tree t, type;
1906 int sat_p;
1907 bool overflow_p;
1909 /* The following codes are handled by fixed_arithmetic. */
1910 switch (code)
1912 case PLUS_EXPR:
1913 case MINUS_EXPR:
1914 case MULT_EXPR:
1915 case TRUNC_DIV_EXPR:
1916 f2 = TREE_FIXED_CST (arg2);
1917 break;
1919 case LSHIFT_EXPR:
1920 case RSHIFT_EXPR:
1921 f2.data.high = TREE_INT_CST_HIGH (arg2);
1922 f2.data.low = TREE_INT_CST_LOW (arg2);
1923 f2.mode = SImode;
1924 break;
1926 default:
1927 return NULL_TREE;
1930 f1 = TREE_FIXED_CST (arg1);
1931 type = TREE_TYPE (arg1);
1932 sat_p = TYPE_SATURATING (type);
1933 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1934 t = build_fixed (type, result);
1935 /* Propagate overflow flags. */
1936 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1938 TREE_OVERFLOW (t) = 1;
1939 TREE_CONSTANT_OVERFLOW (t) = 1;
1941 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1942 TREE_CONSTANT_OVERFLOW (t) = 1;
1943 return t;
1946 if (TREE_CODE (arg1) == COMPLEX_CST)
1948 tree type = TREE_TYPE (arg1);
1949 tree r1 = TREE_REALPART (arg1);
1950 tree i1 = TREE_IMAGPART (arg1);
1951 tree r2 = TREE_REALPART (arg2);
1952 tree i2 = TREE_IMAGPART (arg2);
1953 tree real, imag;
1955 switch (code)
1957 case PLUS_EXPR:
1958 case MINUS_EXPR:
1959 real = const_binop (code, r1, r2, notrunc);
1960 imag = const_binop (code, i1, i2, notrunc);
1961 break;
1963 case MULT_EXPR:
1964 real = const_binop (MINUS_EXPR,
1965 const_binop (MULT_EXPR, r1, r2, notrunc),
1966 const_binop (MULT_EXPR, i1, i2, notrunc),
1967 notrunc);
1968 imag = const_binop (PLUS_EXPR,
1969 const_binop (MULT_EXPR, r1, i2, notrunc),
1970 const_binop (MULT_EXPR, i1, r2, notrunc),
1971 notrunc);
1972 break;
1974 case RDIV_EXPR:
1976 tree magsquared
1977 = const_binop (PLUS_EXPR,
1978 const_binop (MULT_EXPR, r2, r2, notrunc),
1979 const_binop (MULT_EXPR, i2, i2, notrunc),
1980 notrunc);
1981 tree t1
1982 = const_binop (PLUS_EXPR,
1983 const_binop (MULT_EXPR, r1, r2, notrunc),
1984 const_binop (MULT_EXPR, i1, i2, notrunc),
1985 notrunc);
1986 tree t2
1987 = const_binop (MINUS_EXPR,
1988 const_binop (MULT_EXPR, i1, r2, notrunc),
1989 const_binop (MULT_EXPR, r1, i2, notrunc),
1990 notrunc);
1992 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1993 code = TRUNC_DIV_EXPR;
1995 real = const_binop (code, t1, magsquared, notrunc);
1996 imag = const_binop (code, t2, magsquared, notrunc);
1998 break;
2000 default:
2001 return NULL_TREE;
2004 if (real && imag)
2005 return build_complex (type, real, imag);
2008 return NULL_TREE;
2011 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2012 indicates which particular sizetype to create. */
2014 tree
2015 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2017 return build_int_cst (sizetype_tab[(int) kind], number);
2020 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2021 is a tree code. The type of the result is taken from the operands.
2022 Both must be equivalent integer types, ala int_binop_types_match_p.
2023 If the operands are constant, so is the result. */
2025 tree
2026 size_binop (enum tree_code code, tree arg0, tree arg1)
2028 tree type = TREE_TYPE (arg0);
2030 if (arg0 == error_mark_node || arg1 == error_mark_node)
2031 return error_mark_node;
2033 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2034 TREE_TYPE (arg1)));
2036 /* Handle the special case of two integer constants faster. */
2037 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2039 /* And some specific cases even faster than that. */
2040 if (code == PLUS_EXPR)
2042 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2043 return arg1;
2044 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2045 return arg0;
2047 else if (code == MINUS_EXPR)
2049 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2050 return arg0;
2052 else if (code == MULT_EXPR)
2054 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2055 return arg1;
2058 /* Handle general case of two integer constants. */
2059 return int_const_binop (code, arg0, arg1, 0);
2062 return fold_build2 (code, type, arg0, arg1);
2065 /* Given two values, either both of sizetype or both of bitsizetype,
2066 compute the difference between the two values. Return the value
2067 in signed type corresponding to the type of the operands. */
2069 tree
2070 size_diffop (tree arg0, tree arg1)
2072 tree type = TREE_TYPE (arg0);
2073 tree ctype;
2075 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2076 TREE_TYPE (arg1)));
2078 /* If the type is already signed, just do the simple thing. */
2079 if (!TYPE_UNSIGNED (type))
2080 return size_binop (MINUS_EXPR, arg0, arg1);
2082 if (type == sizetype)
2083 ctype = ssizetype;
2084 else if (type == bitsizetype)
2085 ctype = sbitsizetype;
2086 else
2087 ctype = signed_type_for (type);
2089 /* If either operand is not a constant, do the conversions to the signed
2090 type and subtract. The hardware will do the right thing with any
2091 overflow in the subtraction. */
2092 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2093 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2094 fold_convert (ctype, arg1));
2096 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2097 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2098 overflow) and negate (which can't either). Special-case a result
2099 of zero while we're here. */
2100 if (tree_int_cst_equal (arg0, arg1))
2101 return build_int_cst (ctype, 0);
2102 else if (tree_int_cst_lt (arg1, arg0))
2103 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2104 else
2105 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2106 fold_convert (ctype, size_binop (MINUS_EXPR,
2107 arg1, arg0)));
2110 /* A subroutine of fold_convert_const handling conversions of an
2111 INTEGER_CST to another integer type. */
2113 static tree
2114 fold_convert_const_int_from_int (tree type, const_tree arg1)
2116 tree t;
2118 /* Given an integer constant, make new constant with new type,
2119 appropriately sign-extended or truncated. */
2120 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2121 TREE_INT_CST_HIGH (arg1),
2122 /* Don't set the overflow when
2123 converting from a pointer, */
2124 !POINTER_TYPE_P (TREE_TYPE (arg1))
2125 /* or to a sizetype with same signedness
2126 and the precision is unchanged.
2127 ??? sizetype is always sign-extended,
2128 but its signedness depends on the
2129 frontend. Thus we see spurious overflows
2130 here if we do not check this. */
2131 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2132 == TYPE_PRECISION (type))
2133 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2134 == TYPE_UNSIGNED (type))
2135 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2137 || (TREE_CODE (type) == INTEGER_TYPE
2138 && TYPE_IS_SIZETYPE (type)))),
2139 (TREE_INT_CST_HIGH (arg1) < 0
2140 && (TYPE_UNSIGNED (type)
2141 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2142 | TREE_OVERFLOW (arg1));
2144 return t;
2147 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2148 to an integer type. */
2150 static tree
2151 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2153 int overflow = 0;
2154 tree t;
2156 /* The following code implements the floating point to integer
2157 conversion rules required by the Java Language Specification,
2158 that IEEE NaNs are mapped to zero and values that overflow
2159 the target precision saturate, i.e. values greater than
2160 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2161 are mapped to INT_MIN. These semantics are allowed by the
2162 C and C++ standards that simply state that the behavior of
2163 FP-to-integer conversion is unspecified upon overflow. */
2165 HOST_WIDE_INT high, low;
2166 REAL_VALUE_TYPE r;
2167 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2169 switch (code)
2171 case FIX_TRUNC_EXPR:
2172 real_trunc (&r, VOIDmode, &x);
2173 break;
2175 default:
2176 gcc_unreachable ();
2179 /* If R is NaN, return zero and show we have an overflow. */
2180 if (REAL_VALUE_ISNAN (r))
2182 overflow = 1;
2183 high = 0;
2184 low = 0;
2187 /* See if R is less than the lower bound or greater than the
2188 upper bound. */
2190 if (! overflow)
2192 tree lt = TYPE_MIN_VALUE (type);
2193 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2194 if (REAL_VALUES_LESS (r, l))
2196 overflow = 1;
2197 high = TREE_INT_CST_HIGH (lt);
2198 low = TREE_INT_CST_LOW (lt);
2202 if (! overflow)
2204 tree ut = TYPE_MAX_VALUE (type);
2205 if (ut)
2207 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2208 if (REAL_VALUES_LESS (u, r))
2210 overflow = 1;
2211 high = TREE_INT_CST_HIGH (ut);
2212 low = TREE_INT_CST_LOW (ut);
2217 if (! overflow)
2218 REAL_VALUE_TO_INT (&low, &high, r);
2220 t = force_fit_type_double (type, low, high, -1,
2221 overflow | TREE_OVERFLOW (arg1));
2222 return t;
2225 /* A subroutine of fold_convert_const handling conversions of a
2226 FIXED_CST to an integer type. */
2228 static tree
2229 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2231 tree t;
2232 double_int temp, temp_trunc;
2233 unsigned int mode;
2235 /* Right shift FIXED_CST to temp by fbit. */
2236 temp = TREE_FIXED_CST (arg1).data;
2237 mode = TREE_FIXED_CST (arg1).mode;
2238 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2240 lshift_double (temp.low, temp.high,
2241 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2242 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2244 /* Left shift temp to temp_trunc by fbit. */
2245 lshift_double (temp.low, temp.high,
2246 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2247 &temp_trunc.low, &temp_trunc.high,
2248 SIGNED_FIXED_POINT_MODE_P (mode));
2250 else
2252 temp.low = 0;
2253 temp.high = 0;
2254 temp_trunc.low = 0;
2255 temp_trunc.high = 0;
2258 /* If FIXED_CST is negative, we need to round the value toward 0.
2259 By checking if the fractional bits are not zero to add 1 to temp. */
2260 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2261 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2263 double_int one;
2264 one.low = 1;
2265 one.high = 0;
2266 temp = double_int_add (temp, one);
2269 /* Given a fixed-point constant, make new constant with new type,
2270 appropriately sign-extended or truncated. */
2271 t = force_fit_type_double (type, temp.low, temp.high, -1,
2272 (temp.high < 0
2273 && (TYPE_UNSIGNED (type)
2274 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2275 | TREE_OVERFLOW (arg1));
2277 return t;
2280 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2281 to another floating point type. */
2283 static tree
2284 fold_convert_const_real_from_real (tree type, const_tree arg1)
2286 REAL_VALUE_TYPE value;
2287 tree t;
2289 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2290 t = build_real (type, value);
2292 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2293 return t;
2296 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2297 to a floating point type. */
2299 static tree
2300 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2302 REAL_VALUE_TYPE value;
2303 tree t;
2305 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2306 t = build_real (type, value);
2308 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2309 TREE_CONSTANT_OVERFLOW (t)
2310 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2311 return t;
2314 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2315 to another fixed-point type. */
2317 static tree
2318 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2320 FIXED_VALUE_TYPE value;
2321 tree t;
2322 bool overflow_p;
2324 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2325 TYPE_SATURATING (type));
2326 t = build_fixed (type, value);
2328 /* Propagate overflow flags. */
2329 if (overflow_p | TREE_OVERFLOW (arg1))
2331 TREE_OVERFLOW (t) = 1;
2332 TREE_CONSTANT_OVERFLOW (t) = 1;
2334 else if (TREE_CONSTANT_OVERFLOW (arg1))
2335 TREE_CONSTANT_OVERFLOW (t) = 1;
2336 return t;
2339 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2340 to a fixed-point type. */
2342 static tree
2343 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2345 FIXED_VALUE_TYPE value;
2346 tree t;
2347 bool overflow_p;
2349 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2350 TREE_INT_CST (arg1),
2351 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2352 TYPE_SATURATING (type));
2353 t = build_fixed (type, value);
2355 /* Propagate overflow flags. */
2356 if (overflow_p | TREE_OVERFLOW (arg1))
2358 TREE_OVERFLOW (t) = 1;
2359 TREE_CONSTANT_OVERFLOW (t) = 1;
2361 else if (TREE_CONSTANT_OVERFLOW (arg1))
2362 TREE_CONSTANT_OVERFLOW (t) = 1;
2363 return t;
2366 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2367 to a fixed-point type. */
2369 static tree
2370 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2372 FIXED_VALUE_TYPE value;
2373 tree t;
2374 bool overflow_p;
2376 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2377 &TREE_REAL_CST (arg1),
2378 TYPE_SATURATING (type));
2379 t = build_fixed (type, value);
2381 /* Propagate overflow flags. */
2382 if (overflow_p | TREE_OVERFLOW (arg1))
2384 TREE_OVERFLOW (t) = 1;
2385 TREE_CONSTANT_OVERFLOW (t) = 1;
2387 else if (TREE_CONSTANT_OVERFLOW (arg1))
2388 TREE_CONSTANT_OVERFLOW (t) = 1;
2389 return t;
2392 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2393 type TYPE. If no simplification can be done return NULL_TREE. */
2395 static tree
2396 fold_convert_const (enum tree_code code, tree type, tree arg1)
2398 if (TREE_TYPE (arg1) == type)
2399 return arg1;
2401 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2402 || TREE_CODE (type) == OFFSET_TYPE)
2404 if (TREE_CODE (arg1) == INTEGER_CST)
2405 return fold_convert_const_int_from_int (type, arg1);
2406 else if (TREE_CODE (arg1) == REAL_CST)
2407 return fold_convert_const_int_from_real (code, type, arg1);
2408 else if (TREE_CODE (arg1) == FIXED_CST)
2409 return fold_convert_const_int_from_fixed (type, arg1);
2411 else if (TREE_CODE (type) == REAL_TYPE)
2413 if (TREE_CODE (arg1) == INTEGER_CST)
2414 return build_real_from_int_cst (type, arg1);
2415 else if (TREE_CODE (arg1) == REAL_CST)
2416 return fold_convert_const_real_from_real (type, arg1);
2417 else if (TREE_CODE (arg1) == FIXED_CST)
2418 return fold_convert_const_real_from_fixed (type, arg1);
2420 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2422 if (TREE_CODE (arg1) == FIXED_CST)
2423 return fold_convert_const_fixed_from_fixed (type, arg1);
2424 else if (TREE_CODE (arg1) == INTEGER_CST)
2425 return fold_convert_const_fixed_from_int (type, arg1);
2426 else if (TREE_CODE (arg1) == REAL_CST)
2427 return fold_convert_const_fixed_from_real (type, arg1);
2429 return NULL_TREE;
2432 /* Construct a vector of zero elements of vector type TYPE. */
2434 static tree
2435 build_zero_vector (tree type)
2437 tree elem, list;
2438 int i, units;
2440 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2441 units = TYPE_VECTOR_SUBPARTS (type);
2443 list = NULL_TREE;
2444 for (i = 0; i < units; i++)
2445 list = tree_cons (NULL_TREE, elem, list);
2446 return build_vector (type, list);
2449 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2451 bool
2452 fold_convertible_p (const_tree type, const_tree arg)
2454 tree orig = TREE_TYPE (arg);
2456 if (type == orig)
2457 return true;
2459 if (TREE_CODE (arg) == ERROR_MARK
2460 || TREE_CODE (type) == ERROR_MARK
2461 || TREE_CODE (orig) == ERROR_MARK)
2462 return false;
2464 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2465 return true;
2467 switch (TREE_CODE (type))
2469 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2470 case POINTER_TYPE: case REFERENCE_TYPE:
2471 case OFFSET_TYPE:
2472 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2473 || TREE_CODE (orig) == OFFSET_TYPE)
2474 return true;
2475 return (TREE_CODE (orig) == VECTOR_TYPE
2476 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2478 case REAL_TYPE:
2479 case FIXED_POINT_TYPE:
2480 case COMPLEX_TYPE:
2481 case VECTOR_TYPE:
2482 case VOID_TYPE:
2483 return TREE_CODE (type) == TREE_CODE (orig);
2485 default:
2486 return false;
2490 /* Convert expression ARG to type TYPE. Used by the middle-end for
2491 simple conversions in preference to calling the front-end's convert. */
2493 tree
2494 fold_convert (tree type, tree arg)
2496 tree orig = TREE_TYPE (arg);
2497 tree tem;
2499 if (type == orig)
2500 return arg;
2502 if (TREE_CODE (arg) == ERROR_MARK
2503 || TREE_CODE (type) == ERROR_MARK
2504 || TREE_CODE (orig) == ERROR_MARK)
2505 return error_mark_node;
2507 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2508 return fold_build1 (NOP_EXPR, type, arg);
2510 switch (TREE_CODE (type))
2512 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2513 case POINTER_TYPE: case REFERENCE_TYPE:
2514 case OFFSET_TYPE:
2515 if (TREE_CODE (arg) == INTEGER_CST)
2517 tem = fold_convert_const (NOP_EXPR, type, arg);
2518 if (tem != NULL_TREE)
2519 return tem;
2521 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2522 || TREE_CODE (orig) == OFFSET_TYPE)
2523 return fold_build1 (NOP_EXPR, type, arg);
2524 if (TREE_CODE (orig) == COMPLEX_TYPE)
2526 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2527 return fold_convert (type, tem);
2529 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2530 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2531 return fold_build1 (NOP_EXPR, type, arg);
2533 case REAL_TYPE:
2534 if (TREE_CODE (arg) == INTEGER_CST)
2536 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2537 if (tem != NULL_TREE)
2538 return tem;
2540 else if (TREE_CODE (arg) == REAL_CST)
2542 tem = fold_convert_const (NOP_EXPR, type, arg);
2543 if (tem != NULL_TREE)
2544 return tem;
2546 else if (TREE_CODE (arg) == FIXED_CST)
2548 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2549 if (tem != NULL_TREE)
2550 return tem;
2553 switch (TREE_CODE (orig))
2555 case INTEGER_TYPE:
2556 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2557 case POINTER_TYPE: case REFERENCE_TYPE:
2558 return fold_build1 (FLOAT_EXPR, type, arg);
2560 case REAL_TYPE:
2561 return fold_build1 (NOP_EXPR, type, arg);
2563 case FIXED_POINT_TYPE:
2564 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2566 case COMPLEX_TYPE:
2567 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2568 return fold_convert (type, tem);
2570 default:
2571 gcc_unreachable ();
2574 case FIXED_POINT_TYPE:
2575 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2576 || TREE_CODE (arg) == REAL_CST)
2578 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2579 if (tem != NULL_TREE)
2580 return tem;
2583 switch (TREE_CODE (orig))
2585 case FIXED_POINT_TYPE:
2586 case INTEGER_TYPE:
2587 case ENUMERAL_TYPE:
2588 case BOOLEAN_TYPE:
2589 case REAL_TYPE:
2590 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2592 case COMPLEX_TYPE:
2593 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2594 return fold_convert (type, tem);
2596 default:
2597 gcc_unreachable ();
2600 case COMPLEX_TYPE:
2601 switch (TREE_CODE (orig))
2603 case INTEGER_TYPE:
2604 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2605 case POINTER_TYPE: case REFERENCE_TYPE:
2606 case REAL_TYPE:
2607 case FIXED_POINT_TYPE:
2608 return build2 (COMPLEX_EXPR, type,
2609 fold_convert (TREE_TYPE (type), arg),
2610 fold_convert (TREE_TYPE (type), integer_zero_node));
2611 case COMPLEX_TYPE:
2613 tree rpart, ipart;
2615 if (TREE_CODE (arg) == COMPLEX_EXPR)
2617 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2618 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2619 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2622 arg = save_expr (arg);
2623 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2624 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2625 rpart = fold_convert (TREE_TYPE (type), rpart);
2626 ipart = fold_convert (TREE_TYPE (type), ipart);
2627 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2630 default:
2631 gcc_unreachable ();
2634 case VECTOR_TYPE:
2635 if (integer_zerop (arg))
2636 return build_zero_vector (type);
2637 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2638 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2639 || TREE_CODE (orig) == VECTOR_TYPE);
2640 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2642 case VOID_TYPE:
2643 tem = fold_ignored_result (arg);
2644 if (TREE_CODE (tem) == MODIFY_EXPR)
2645 return tem;
2646 return fold_build1 (NOP_EXPR, type, tem);
2648 default:
2649 gcc_unreachable ();
2653 /* Return false if expr can be assumed not to be an lvalue, true
2654 otherwise. */
2656 static bool
2657 maybe_lvalue_p (const_tree x)
2659 /* We only need to wrap lvalue tree codes. */
2660 switch (TREE_CODE (x))
2662 case VAR_DECL:
2663 case PARM_DECL:
2664 case RESULT_DECL:
2665 case LABEL_DECL:
2666 case FUNCTION_DECL:
2667 case SSA_NAME:
2669 case COMPONENT_REF:
2670 case INDIRECT_REF:
2671 case ALIGN_INDIRECT_REF:
2672 case MISALIGNED_INDIRECT_REF:
2673 case ARRAY_REF:
2674 case ARRAY_RANGE_REF:
2675 case BIT_FIELD_REF:
2676 case OBJ_TYPE_REF:
2678 case REALPART_EXPR:
2679 case IMAGPART_EXPR:
2680 case PREINCREMENT_EXPR:
2681 case PREDECREMENT_EXPR:
2682 case SAVE_EXPR:
2683 case TRY_CATCH_EXPR:
2684 case WITH_CLEANUP_EXPR:
2685 case COMPOUND_EXPR:
2686 case MODIFY_EXPR:
2687 case TARGET_EXPR:
2688 case COND_EXPR:
2689 case BIND_EXPR:
2690 case MIN_EXPR:
2691 case MAX_EXPR:
2692 break;
2694 default:
2695 /* Assume the worst for front-end tree codes. */
2696 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2697 break;
2698 return false;
2701 return true;
2704 /* Return an expr equal to X but certainly not valid as an lvalue. */
2706 tree
2707 non_lvalue (tree x)
2709 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2710 us. */
2711 if (in_gimple_form)
2712 return x;
2714 if (! maybe_lvalue_p (x))
2715 return x;
2716 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2719 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2720 Zero means allow extended lvalues. */
2722 int pedantic_lvalues;
2724 /* When pedantic, return an expr equal to X but certainly not valid as a
2725 pedantic lvalue. Otherwise, return X. */
2727 static tree
2728 pedantic_non_lvalue (tree x)
2730 if (pedantic_lvalues)
2731 return non_lvalue (x);
2732 else
2733 return x;
2736 /* Given a tree comparison code, return the code that is the logical inverse
2737 of the given code. It is not safe to do this for floating-point
2738 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2739 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2741 enum tree_code
2742 invert_tree_comparison (enum tree_code code, bool honor_nans)
2744 if (honor_nans && flag_trapping_math)
2745 return ERROR_MARK;
2747 switch (code)
2749 case EQ_EXPR:
2750 return NE_EXPR;
2751 case NE_EXPR:
2752 return EQ_EXPR;
2753 case GT_EXPR:
2754 return honor_nans ? UNLE_EXPR : LE_EXPR;
2755 case GE_EXPR:
2756 return honor_nans ? UNLT_EXPR : LT_EXPR;
2757 case LT_EXPR:
2758 return honor_nans ? UNGE_EXPR : GE_EXPR;
2759 case LE_EXPR:
2760 return honor_nans ? UNGT_EXPR : GT_EXPR;
2761 case LTGT_EXPR:
2762 return UNEQ_EXPR;
2763 case UNEQ_EXPR:
2764 return LTGT_EXPR;
2765 case UNGT_EXPR:
2766 return LE_EXPR;
2767 case UNGE_EXPR:
2768 return LT_EXPR;
2769 case UNLT_EXPR:
2770 return GE_EXPR;
2771 case UNLE_EXPR:
2772 return GT_EXPR;
2773 case ORDERED_EXPR:
2774 return UNORDERED_EXPR;
2775 case UNORDERED_EXPR:
2776 return ORDERED_EXPR;
2777 default:
2778 gcc_unreachable ();
2782 /* Similar, but return the comparison that results if the operands are
2783 swapped. This is safe for floating-point. */
2785 enum tree_code
2786 swap_tree_comparison (enum tree_code code)
2788 switch (code)
2790 case EQ_EXPR:
2791 case NE_EXPR:
2792 case ORDERED_EXPR:
2793 case UNORDERED_EXPR:
2794 case LTGT_EXPR:
2795 case UNEQ_EXPR:
2796 return code;
2797 case GT_EXPR:
2798 return LT_EXPR;
2799 case GE_EXPR:
2800 return LE_EXPR;
2801 case LT_EXPR:
2802 return GT_EXPR;
2803 case LE_EXPR:
2804 return GE_EXPR;
2805 case UNGT_EXPR:
2806 return UNLT_EXPR;
2807 case UNGE_EXPR:
2808 return UNLE_EXPR;
2809 case UNLT_EXPR:
2810 return UNGT_EXPR;
2811 case UNLE_EXPR:
2812 return UNGE_EXPR;
2813 default:
2814 gcc_unreachable ();
2819 /* Convert a comparison tree code from an enum tree_code representation
2820 into a compcode bit-based encoding. This function is the inverse of
2821 compcode_to_comparison. */
2823 static enum comparison_code
2824 comparison_to_compcode (enum tree_code code)
2826 switch (code)
2828 case LT_EXPR:
2829 return COMPCODE_LT;
2830 case EQ_EXPR:
2831 return COMPCODE_EQ;
2832 case LE_EXPR:
2833 return COMPCODE_LE;
2834 case GT_EXPR:
2835 return COMPCODE_GT;
2836 case NE_EXPR:
2837 return COMPCODE_NE;
2838 case GE_EXPR:
2839 return COMPCODE_GE;
2840 case ORDERED_EXPR:
2841 return COMPCODE_ORD;
2842 case UNORDERED_EXPR:
2843 return COMPCODE_UNORD;
2844 case UNLT_EXPR:
2845 return COMPCODE_UNLT;
2846 case UNEQ_EXPR:
2847 return COMPCODE_UNEQ;
2848 case UNLE_EXPR:
2849 return COMPCODE_UNLE;
2850 case UNGT_EXPR:
2851 return COMPCODE_UNGT;
2852 case LTGT_EXPR:
2853 return COMPCODE_LTGT;
2854 case UNGE_EXPR:
2855 return COMPCODE_UNGE;
2856 default:
2857 gcc_unreachable ();
2861 /* Convert a compcode bit-based encoding of a comparison operator back
2862 to GCC's enum tree_code representation. This function is the
2863 inverse of comparison_to_compcode. */
2865 static enum tree_code
2866 compcode_to_comparison (enum comparison_code code)
2868 switch (code)
2870 case COMPCODE_LT:
2871 return LT_EXPR;
2872 case COMPCODE_EQ:
2873 return EQ_EXPR;
2874 case COMPCODE_LE:
2875 return LE_EXPR;
2876 case COMPCODE_GT:
2877 return GT_EXPR;
2878 case COMPCODE_NE:
2879 return NE_EXPR;
2880 case COMPCODE_GE:
2881 return GE_EXPR;
2882 case COMPCODE_ORD:
2883 return ORDERED_EXPR;
2884 case COMPCODE_UNORD:
2885 return UNORDERED_EXPR;
2886 case COMPCODE_UNLT:
2887 return UNLT_EXPR;
2888 case COMPCODE_UNEQ:
2889 return UNEQ_EXPR;
2890 case COMPCODE_UNLE:
2891 return UNLE_EXPR;
2892 case COMPCODE_UNGT:
2893 return UNGT_EXPR;
2894 case COMPCODE_LTGT:
2895 return LTGT_EXPR;
2896 case COMPCODE_UNGE:
2897 return UNGE_EXPR;
2898 default:
2899 gcc_unreachable ();
2903 /* Return a tree for the comparison which is the combination of
2904 doing the AND or OR (depending on CODE) of the two operations LCODE
2905 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2906 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2907 if this makes the transformation invalid. */
2909 tree
2910 combine_comparisons (enum tree_code code, enum tree_code lcode,
2911 enum tree_code rcode, tree truth_type,
2912 tree ll_arg, tree lr_arg)
2914 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2915 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2916 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2917 enum comparison_code compcode;
2919 switch (code)
2921 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2922 compcode = lcompcode & rcompcode;
2923 break;
2925 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2926 compcode = lcompcode | rcompcode;
2927 break;
2929 default:
2930 return NULL_TREE;
2933 if (!honor_nans)
2935 /* Eliminate unordered comparisons, as well as LTGT and ORD
2936 which are not used unless the mode has NaNs. */
2937 compcode &= ~COMPCODE_UNORD;
2938 if (compcode == COMPCODE_LTGT)
2939 compcode = COMPCODE_NE;
2940 else if (compcode == COMPCODE_ORD)
2941 compcode = COMPCODE_TRUE;
2943 else if (flag_trapping_math)
2945 /* Check that the original operation and the optimized ones will trap
2946 under the same condition. */
2947 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2948 && (lcompcode != COMPCODE_EQ)
2949 && (lcompcode != COMPCODE_ORD);
2950 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2951 && (rcompcode != COMPCODE_EQ)
2952 && (rcompcode != COMPCODE_ORD);
2953 bool trap = (compcode & COMPCODE_UNORD) == 0
2954 && (compcode != COMPCODE_EQ)
2955 && (compcode != COMPCODE_ORD);
2957 /* In a short-circuited boolean expression the LHS might be
2958 such that the RHS, if evaluated, will never trap. For
2959 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2960 if neither x nor y is NaN. (This is a mixed blessing: for
2961 example, the expression above will never trap, hence
2962 optimizing it to x < y would be invalid). */
2963 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2964 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2965 rtrap = false;
2967 /* If the comparison was short-circuited, and only the RHS
2968 trapped, we may now generate a spurious trap. */
2969 if (rtrap && !ltrap
2970 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2971 return NULL_TREE;
2973 /* If we changed the conditions that cause a trap, we lose. */
2974 if ((ltrap || rtrap) != trap)
2975 return NULL_TREE;
2978 if (compcode == COMPCODE_TRUE)
2979 return constant_boolean_node (true, truth_type);
2980 else if (compcode == COMPCODE_FALSE)
2981 return constant_boolean_node (false, truth_type);
2982 else
2983 return fold_build2 (compcode_to_comparison (compcode),
2984 truth_type, ll_arg, lr_arg);
2987 /* Return nonzero if two operands (typically of the same tree node)
2988 are necessarily equal. If either argument has side-effects this
2989 function returns zero. FLAGS modifies behavior as follows:
2991 If OEP_ONLY_CONST is set, only return nonzero for constants.
2992 This function tests whether the operands are indistinguishable;
2993 it does not test whether they are equal using C's == operation.
2994 The distinction is important for IEEE floating point, because
2995 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2996 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2998 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2999 even though it may hold multiple values during a function.
3000 This is because a GCC tree node guarantees that nothing else is
3001 executed between the evaluation of its "operands" (which may often
3002 be evaluated in arbitrary order). Hence if the operands themselves
3003 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3004 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3005 unset means assuming isochronic (or instantaneous) tree equivalence.
3006 Unless comparing arbitrary expression trees, such as from different
3007 statements, this flag can usually be left unset.
3009 If OEP_PURE_SAME is set, then pure functions with identical arguments
3010 are considered the same. It is used when the caller has other ways
3011 to ensure that global memory is unchanged in between. */
3014 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3016 /* If either is ERROR_MARK, they aren't equal. */
3017 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3018 return 0;
3020 /* Check equality of integer constants before bailing out due to
3021 precision differences. */
3022 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3023 return tree_int_cst_equal (arg0, arg1);
3025 /* If both types don't have the same signedness, then we can't consider
3026 them equal. We must check this before the STRIP_NOPS calls
3027 because they may change the signedness of the arguments. As pointers
3028 strictly don't have a signedness, require either two pointers or
3029 two non-pointers as well. */
3030 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3031 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3032 return 0;
3034 /* If both types don't have the same precision, then it is not safe
3035 to strip NOPs. */
3036 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3037 return 0;
3039 STRIP_NOPS (arg0);
3040 STRIP_NOPS (arg1);
3042 /* In case both args are comparisons but with different comparison
3043 code, try to swap the comparison operands of one arg to produce
3044 a match and compare that variant. */
3045 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3046 && COMPARISON_CLASS_P (arg0)
3047 && COMPARISON_CLASS_P (arg1))
3049 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3051 if (TREE_CODE (arg0) == swap_code)
3052 return operand_equal_p (TREE_OPERAND (arg0, 0),
3053 TREE_OPERAND (arg1, 1), flags)
3054 && operand_equal_p (TREE_OPERAND (arg0, 1),
3055 TREE_OPERAND (arg1, 0), flags);
3058 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3059 /* This is needed for conversions and for COMPONENT_REF.
3060 Might as well play it safe and always test this. */
3061 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3062 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3063 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3064 return 0;
3066 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3067 We don't care about side effects in that case because the SAVE_EXPR
3068 takes care of that for us. In all other cases, two expressions are
3069 equal if they have no side effects. If we have two identical
3070 expressions with side effects that should be treated the same due
3071 to the only side effects being identical SAVE_EXPR's, that will
3072 be detected in the recursive calls below. */
3073 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3074 && (TREE_CODE (arg0) == SAVE_EXPR
3075 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3076 return 1;
3078 /* Next handle constant cases, those for which we can return 1 even
3079 if ONLY_CONST is set. */
3080 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3081 switch (TREE_CODE (arg0))
3083 case INTEGER_CST:
3084 return tree_int_cst_equal (arg0, arg1);
3086 case FIXED_CST:
3087 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3088 TREE_FIXED_CST (arg1));
3090 case REAL_CST:
3091 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3092 TREE_REAL_CST (arg1)))
3093 return 1;
3096 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3098 /* If we do not distinguish between signed and unsigned zero,
3099 consider them equal. */
3100 if (real_zerop (arg0) && real_zerop (arg1))
3101 return 1;
3103 return 0;
3105 case VECTOR_CST:
3107 tree v1, v2;
3109 v1 = TREE_VECTOR_CST_ELTS (arg0);
3110 v2 = TREE_VECTOR_CST_ELTS (arg1);
3111 while (v1 && v2)
3113 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3114 flags))
3115 return 0;
3116 v1 = TREE_CHAIN (v1);
3117 v2 = TREE_CHAIN (v2);
3120 return v1 == v2;
3123 case COMPLEX_CST:
3124 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3125 flags)
3126 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3127 flags));
3129 case STRING_CST:
3130 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3131 && ! memcmp (TREE_STRING_POINTER (arg0),
3132 TREE_STRING_POINTER (arg1),
3133 TREE_STRING_LENGTH (arg0)));
3135 case ADDR_EXPR:
3136 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3138 default:
3139 break;
3142 if (flags & OEP_ONLY_CONST)
3143 return 0;
3145 /* Define macros to test an operand from arg0 and arg1 for equality and a
3146 variant that allows null and views null as being different from any
3147 non-null value. In the latter case, if either is null, the both
3148 must be; otherwise, do the normal comparison. */
3149 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3150 TREE_OPERAND (arg1, N), flags)
3152 #define OP_SAME_WITH_NULL(N) \
3153 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3154 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3156 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3158 case tcc_unary:
3159 /* Two conversions are equal only if signedness and modes match. */
3160 switch (TREE_CODE (arg0))
3162 CASE_CONVERT:
3163 case FIX_TRUNC_EXPR:
3164 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3165 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3166 return 0;
3167 break;
3168 default:
3169 break;
3172 return OP_SAME (0);
3175 case tcc_comparison:
3176 case tcc_binary:
3177 if (OP_SAME (0) && OP_SAME (1))
3178 return 1;
3180 /* For commutative ops, allow the other order. */
3181 return (commutative_tree_code (TREE_CODE (arg0))
3182 && operand_equal_p (TREE_OPERAND (arg0, 0),
3183 TREE_OPERAND (arg1, 1), flags)
3184 && operand_equal_p (TREE_OPERAND (arg0, 1),
3185 TREE_OPERAND (arg1, 0), flags));
3187 case tcc_reference:
3188 /* If either of the pointer (or reference) expressions we are
3189 dereferencing contain a side effect, these cannot be equal. */
3190 if (TREE_SIDE_EFFECTS (arg0)
3191 || TREE_SIDE_EFFECTS (arg1))
3192 return 0;
3194 switch (TREE_CODE (arg0))
3196 case INDIRECT_REF:
3197 case ALIGN_INDIRECT_REF:
3198 case MISALIGNED_INDIRECT_REF:
3199 case REALPART_EXPR:
3200 case IMAGPART_EXPR:
3201 return OP_SAME (0);
3203 case ARRAY_REF:
3204 case ARRAY_RANGE_REF:
3205 /* Operands 2 and 3 may be null.
3206 Compare the array index by value if it is constant first as we
3207 may have different types but same value here. */
3208 return (OP_SAME (0)
3209 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3210 TREE_OPERAND (arg1, 1))
3211 || OP_SAME (1))
3212 && OP_SAME_WITH_NULL (2)
3213 && OP_SAME_WITH_NULL (3));
3215 case COMPONENT_REF:
3216 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3217 may be NULL when we're called to compare MEM_EXPRs. */
3218 return OP_SAME_WITH_NULL (0)
3219 && OP_SAME (1)
3220 && OP_SAME_WITH_NULL (2);
3222 case BIT_FIELD_REF:
3223 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3225 default:
3226 return 0;
3229 case tcc_expression:
3230 switch (TREE_CODE (arg0))
3232 case ADDR_EXPR:
3233 case TRUTH_NOT_EXPR:
3234 return OP_SAME (0);
3236 case TRUTH_ANDIF_EXPR:
3237 case TRUTH_ORIF_EXPR:
3238 return OP_SAME (0) && OP_SAME (1);
3240 case TRUTH_AND_EXPR:
3241 case TRUTH_OR_EXPR:
3242 case TRUTH_XOR_EXPR:
3243 if (OP_SAME (0) && OP_SAME (1))
3244 return 1;
3246 /* Otherwise take into account this is a commutative operation. */
3247 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3248 TREE_OPERAND (arg1, 1), flags)
3249 && operand_equal_p (TREE_OPERAND (arg0, 1),
3250 TREE_OPERAND (arg1, 0), flags));
3252 case COND_EXPR:
3253 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3255 default:
3256 return 0;
3259 case tcc_vl_exp:
3260 switch (TREE_CODE (arg0))
3262 case CALL_EXPR:
3263 /* If the CALL_EXPRs call different functions, then they
3264 clearly can not be equal. */
3265 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3266 flags))
3267 return 0;
3270 unsigned int cef = call_expr_flags (arg0);
3271 if (flags & OEP_PURE_SAME)
3272 cef &= ECF_CONST | ECF_PURE;
3273 else
3274 cef &= ECF_CONST;
3275 if (!cef)
3276 return 0;
3279 /* Now see if all the arguments are the same. */
3281 const_call_expr_arg_iterator iter0, iter1;
3282 const_tree a0, a1;
3283 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3284 a1 = first_const_call_expr_arg (arg1, &iter1);
3285 a0 && a1;
3286 a0 = next_const_call_expr_arg (&iter0),
3287 a1 = next_const_call_expr_arg (&iter1))
3288 if (! operand_equal_p (a0, a1, flags))
3289 return 0;
3291 /* If we get here and both argument lists are exhausted
3292 then the CALL_EXPRs are equal. */
3293 return ! (a0 || a1);
3295 default:
3296 return 0;
3299 case tcc_declaration:
3300 /* Consider __builtin_sqrt equal to sqrt. */
3301 return (TREE_CODE (arg0) == FUNCTION_DECL
3302 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3303 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3304 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3306 default:
3307 return 0;
3310 #undef OP_SAME
3311 #undef OP_SAME_WITH_NULL
3314 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3315 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3317 When in doubt, return 0. */
3319 static int
3320 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3322 int unsignedp1, unsignedpo;
3323 tree primarg0, primarg1, primother;
3324 unsigned int correct_width;
3326 if (operand_equal_p (arg0, arg1, 0))
3327 return 1;
3329 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3330 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3331 return 0;
3333 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3334 and see if the inner values are the same. This removes any
3335 signedness comparison, which doesn't matter here. */
3336 primarg0 = arg0, primarg1 = arg1;
3337 STRIP_NOPS (primarg0);
3338 STRIP_NOPS (primarg1);
3339 if (operand_equal_p (primarg0, primarg1, 0))
3340 return 1;
3342 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3343 actual comparison operand, ARG0.
3345 First throw away any conversions to wider types
3346 already present in the operands. */
3348 primarg1 = get_narrower (arg1, &unsignedp1);
3349 primother = get_narrower (other, &unsignedpo);
3351 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3352 if (unsignedp1 == unsignedpo
3353 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3354 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3356 tree type = TREE_TYPE (arg0);
3358 /* Make sure shorter operand is extended the right way
3359 to match the longer operand. */
3360 primarg1 = fold_convert (signed_or_unsigned_type_for
3361 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3363 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3364 return 1;
3367 return 0;
3370 /* See if ARG is an expression that is either a comparison or is performing
3371 arithmetic on comparisons. The comparisons must only be comparing
3372 two different values, which will be stored in *CVAL1 and *CVAL2; if
3373 they are nonzero it means that some operands have already been found.
3374 No variables may be used anywhere else in the expression except in the
3375 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3376 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3378 If this is true, return 1. Otherwise, return zero. */
3380 static int
3381 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3383 enum tree_code code = TREE_CODE (arg);
3384 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3386 /* We can handle some of the tcc_expression cases here. */
3387 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3388 tclass = tcc_unary;
3389 else if (tclass == tcc_expression
3390 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3391 || code == COMPOUND_EXPR))
3392 tclass = tcc_binary;
3394 else if (tclass == tcc_expression && code == SAVE_EXPR
3395 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3397 /* If we've already found a CVAL1 or CVAL2, this expression is
3398 two complex to handle. */
3399 if (*cval1 || *cval2)
3400 return 0;
3402 tclass = tcc_unary;
3403 *save_p = 1;
3406 switch (tclass)
3408 case tcc_unary:
3409 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3411 case tcc_binary:
3412 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3413 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3414 cval1, cval2, save_p));
3416 case tcc_constant:
3417 return 1;
3419 case tcc_expression:
3420 if (code == COND_EXPR)
3421 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3422 cval1, cval2, save_p)
3423 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3424 cval1, cval2, save_p)
3425 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3426 cval1, cval2, save_p));
3427 return 0;
3429 case tcc_comparison:
3430 /* First see if we can handle the first operand, then the second. For
3431 the second operand, we know *CVAL1 can't be zero. It must be that
3432 one side of the comparison is each of the values; test for the
3433 case where this isn't true by failing if the two operands
3434 are the same. */
3436 if (operand_equal_p (TREE_OPERAND (arg, 0),
3437 TREE_OPERAND (arg, 1), 0))
3438 return 0;
3440 if (*cval1 == 0)
3441 *cval1 = TREE_OPERAND (arg, 0);
3442 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3444 else if (*cval2 == 0)
3445 *cval2 = TREE_OPERAND (arg, 0);
3446 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3448 else
3449 return 0;
3451 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3453 else if (*cval2 == 0)
3454 *cval2 = TREE_OPERAND (arg, 1);
3455 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3457 else
3458 return 0;
3460 return 1;
3462 default:
3463 return 0;
3467 /* ARG is a tree that is known to contain just arithmetic operations and
3468 comparisons. Evaluate the operations in the tree substituting NEW0 for
3469 any occurrence of OLD0 as an operand of a comparison and likewise for
3470 NEW1 and OLD1. */
3472 static tree
3473 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3475 tree type = TREE_TYPE (arg);
3476 enum tree_code code = TREE_CODE (arg);
3477 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3479 /* We can handle some of the tcc_expression cases here. */
3480 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3481 tclass = tcc_unary;
3482 else if (tclass == tcc_expression
3483 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3484 tclass = tcc_binary;
3486 switch (tclass)
3488 case tcc_unary:
3489 return fold_build1 (code, type,
3490 eval_subst (TREE_OPERAND (arg, 0),
3491 old0, new0, old1, new1));
3493 case tcc_binary:
3494 return fold_build2 (code, type,
3495 eval_subst (TREE_OPERAND (arg, 0),
3496 old0, new0, old1, new1),
3497 eval_subst (TREE_OPERAND (arg, 1),
3498 old0, new0, old1, new1));
3500 case tcc_expression:
3501 switch (code)
3503 case SAVE_EXPR:
3504 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3506 case COMPOUND_EXPR:
3507 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3509 case COND_EXPR:
3510 return fold_build3 (code, type,
3511 eval_subst (TREE_OPERAND (arg, 0),
3512 old0, new0, old1, new1),
3513 eval_subst (TREE_OPERAND (arg, 1),
3514 old0, new0, old1, new1),
3515 eval_subst (TREE_OPERAND (arg, 2),
3516 old0, new0, old1, new1));
3517 default:
3518 break;
3520 /* Fall through - ??? */
3522 case tcc_comparison:
3524 tree arg0 = TREE_OPERAND (arg, 0);
3525 tree arg1 = TREE_OPERAND (arg, 1);
3527 /* We need to check both for exact equality and tree equality. The
3528 former will be true if the operand has a side-effect. In that
3529 case, we know the operand occurred exactly once. */
3531 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3532 arg0 = new0;
3533 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3534 arg0 = new1;
3536 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3537 arg1 = new0;
3538 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3539 arg1 = new1;
3541 return fold_build2 (code, type, arg0, arg1);
3544 default:
3545 return arg;
3549 /* Return a tree for the case when the result of an expression is RESULT
3550 converted to TYPE and OMITTED was previously an operand of the expression
3551 but is now not needed (e.g., we folded OMITTED * 0).
3553 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3554 the conversion of RESULT to TYPE. */
3556 tree
3557 omit_one_operand (tree type, tree result, tree omitted)
3559 tree t = fold_convert (type, result);
3561 /* If the resulting operand is an empty statement, just return the omitted
3562 statement casted to void. */
3563 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3564 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3566 if (TREE_SIDE_EFFECTS (omitted))
3567 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3569 return non_lvalue (t);
3572 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3574 static tree
3575 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3577 tree t = fold_convert (type, result);
3579 /* If the resulting operand is an empty statement, just return the omitted
3580 statement casted to void. */
3581 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3582 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3584 if (TREE_SIDE_EFFECTS (omitted))
3585 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3587 return pedantic_non_lvalue (t);
3590 /* Return a tree for the case when the result of an expression is RESULT
3591 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3592 of the expression but are now not needed.
3594 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3595 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3596 evaluated before OMITTED2. Otherwise, if neither has side effects,
3597 just do the conversion of RESULT to TYPE. */
3599 tree
3600 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3602 tree t = fold_convert (type, result);
3604 if (TREE_SIDE_EFFECTS (omitted2))
3605 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3606 if (TREE_SIDE_EFFECTS (omitted1))
3607 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3609 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3613 /* Return a simplified tree node for the truth-negation of ARG. This
3614 never alters ARG itself. We assume that ARG is an operation that
3615 returns a truth value (0 or 1).
3617 FIXME: one would think we would fold the result, but it causes
3618 problems with the dominator optimizer. */
3620 tree
3621 fold_truth_not_expr (tree arg)
3623 tree type = TREE_TYPE (arg);
3624 enum tree_code code = TREE_CODE (arg);
3626 /* If this is a comparison, we can simply invert it, except for
3627 floating-point non-equality comparisons, in which case we just
3628 enclose a TRUTH_NOT_EXPR around what we have. */
3630 if (TREE_CODE_CLASS (code) == tcc_comparison)
3632 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3633 if (FLOAT_TYPE_P (op_type)
3634 && flag_trapping_math
3635 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3636 && code != NE_EXPR && code != EQ_EXPR)
3637 return NULL_TREE;
3638 else
3640 code = invert_tree_comparison (code,
3641 HONOR_NANS (TYPE_MODE (op_type)));
3642 if (code == ERROR_MARK)
3643 return NULL_TREE;
3644 else
3645 return build2 (code, type,
3646 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3650 switch (code)
3652 case INTEGER_CST:
3653 return constant_boolean_node (integer_zerop (arg), type);
3655 case TRUTH_AND_EXPR:
3656 return build2 (TRUTH_OR_EXPR, type,
3657 invert_truthvalue (TREE_OPERAND (arg, 0)),
3658 invert_truthvalue (TREE_OPERAND (arg, 1)));
3660 case TRUTH_OR_EXPR:
3661 return build2 (TRUTH_AND_EXPR, type,
3662 invert_truthvalue (TREE_OPERAND (arg, 0)),
3663 invert_truthvalue (TREE_OPERAND (arg, 1)));
3665 case TRUTH_XOR_EXPR:
3666 /* Here we can invert either operand. We invert the first operand
3667 unless the second operand is a TRUTH_NOT_EXPR in which case our
3668 result is the XOR of the first operand with the inside of the
3669 negation of the second operand. */
3671 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3672 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3673 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3674 else
3675 return build2 (TRUTH_XOR_EXPR, type,
3676 invert_truthvalue (TREE_OPERAND (arg, 0)),
3677 TREE_OPERAND (arg, 1));
3679 case TRUTH_ANDIF_EXPR:
3680 return build2 (TRUTH_ORIF_EXPR, type,
3681 invert_truthvalue (TREE_OPERAND (arg, 0)),
3682 invert_truthvalue (TREE_OPERAND (arg, 1)));
3684 case TRUTH_ORIF_EXPR:
3685 return build2 (TRUTH_ANDIF_EXPR, type,
3686 invert_truthvalue (TREE_OPERAND (arg, 0)),
3687 invert_truthvalue (TREE_OPERAND (arg, 1)));
3689 case TRUTH_NOT_EXPR:
3690 return TREE_OPERAND (arg, 0);
3692 case COND_EXPR:
3694 tree arg1 = TREE_OPERAND (arg, 1);
3695 tree arg2 = TREE_OPERAND (arg, 2);
3696 /* A COND_EXPR may have a throw as one operand, which
3697 then has void type. Just leave void operands
3698 as they are. */
3699 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3700 VOID_TYPE_P (TREE_TYPE (arg1))
3701 ? arg1 : invert_truthvalue (arg1),
3702 VOID_TYPE_P (TREE_TYPE (arg2))
3703 ? arg2 : invert_truthvalue (arg2));
3706 case COMPOUND_EXPR:
3707 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3708 invert_truthvalue (TREE_OPERAND (arg, 1)));
3710 case NON_LVALUE_EXPR:
3711 return invert_truthvalue (TREE_OPERAND (arg, 0));
3713 case NOP_EXPR:
3714 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3715 return build1 (TRUTH_NOT_EXPR, type, arg);
3717 case CONVERT_EXPR:
3718 case FLOAT_EXPR:
3719 return build1 (TREE_CODE (arg), type,
3720 invert_truthvalue (TREE_OPERAND (arg, 0)));
3722 case BIT_AND_EXPR:
3723 if (!integer_onep (TREE_OPERAND (arg, 1)))
3724 break;
3725 return build2 (EQ_EXPR, type, arg,
3726 build_int_cst (type, 0));
3728 case SAVE_EXPR:
3729 return build1 (TRUTH_NOT_EXPR, type, arg);
3731 case CLEANUP_POINT_EXPR:
3732 return build1 (CLEANUP_POINT_EXPR, type,
3733 invert_truthvalue (TREE_OPERAND (arg, 0)));
3735 default:
3736 break;
3739 return NULL_TREE;
3742 /* Return a simplified tree node for the truth-negation of ARG. This
3743 never alters ARG itself. We assume that ARG is an operation that
3744 returns a truth value (0 or 1).
3746 FIXME: one would think we would fold the result, but it causes
3747 problems with the dominator optimizer. */
3749 tree
3750 invert_truthvalue (tree arg)
3752 tree tem;
3754 if (TREE_CODE (arg) == ERROR_MARK)
3755 return arg;
3757 tem = fold_truth_not_expr (arg);
3758 if (!tem)
3759 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3761 return tem;
3764 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3765 operands are another bit-wise operation with a common input. If so,
3766 distribute the bit operations to save an operation and possibly two if
3767 constants are involved. For example, convert
3768 (A | B) & (A | C) into A | (B & C)
3769 Further simplification will occur if B and C are constants.
3771 If this optimization cannot be done, 0 will be returned. */
3773 static tree
3774 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3776 tree common;
3777 tree left, right;
3779 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3780 || TREE_CODE (arg0) == code
3781 || (TREE_CODE (arg0) != BIT_AND_EXPR
3782 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3783 return 0;
3785 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3787 common = TREE_OPERAND (arg0, 0);
3788 left = TREE_OPERAND (arg0, 1);
3789 right = TREE_OPERAND (arg1, 1);
3791 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3793 common = TREE_OPERAND (arg0, 0);
3794 left = TREE_OPERAND (arg0, 1);
3795 right = TREE_OPERAND (arg1, 0);
3797 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3799 common = TREE_OPERAND (arg0, 1);
3800 left = TREE_OPERAND (arg0, 0);
3801 right = TREE_OPERAND (arg1, 1);
3803 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3805 common = TREE_OPERAND (arg0, 1);
3806 left = TREE_OPERAND (arg0, 0);
3807 right = TREE_OPERAND (arg1, 0);
3809 else
3810 return 0;
3812 common = fold_convert (type, common);
3813 left = fold_convert (type, left);
3814 right = fold_convert (type, right);
3815 return fold_build2 (TREE_CODE (arg0), type, common,
3816 fold_build2 (code, type, left, right));
3819 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3820 with code CODE. This optimization is unsafe. */
3821 static tree
3822 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3824 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3825 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3827 /* (A / C) +- (B / C) -> (A +- B) / C. */
3828 if (mul0 == mul1
3829 && operand_equal_p (TREE_OPERAND (arg0, 1),
3830 TREE_OPERAND (arg1, 1), 0))
3831 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3832 fold_build2 (code, type,
3833 TREE_OPERAND (arg0, 0),
3834 TREE_OPERAND (arg1, 0)),
3835 TREE_OPERAND (arg0, 1));
3837 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3838 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3839 TREE_OPERAND (arg1, 0), 0)
3840 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3841 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3843 REAL_VALUE_TYPE r0, r1;
3844 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3845 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3846 if (!mul0)
3847 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3848 if (!mul1)
3849 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3850 real_arithmetic (&r0, code, &r0, &r1);
3851 return fold_build2 (MULT_EXPR, type,
3852 TREE_OPERAND (arg0, 0),
3853 build_real (type, r0));
3856 return NULL_TREE;
3859 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3860 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3862 static tree
3863 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3864 HOST_WIDE_INT bitpos, int unsignedp)
3866 tree result, bftype;
3868 if (bitpos == 0)
3870 tree size = TYPE_SIZE (TREE_TYPE (inner));
3871 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3872 || POINTER_TYPE_P (TREE_TYPE (inner)))
3873 && host_integerp (size, 0)
3874 && tree_low_cst (size, 0) == bitsize)
3875 return fold_convert (type, inner);
3878 bftype = type;
3879 if (TYPE_PRECISION (bftype) != bitsize
3880 || TYPE_UNSIGNED (bftype) == !unsignedp)
3881 bftype = build_nonstandard_integer_type (bitsize, 0);
3883 result = build3 (BIT_FIELD_REF, bftype, inner,
3884 size_int (bitsize), bitsize_int (bitpos));
3886 if (bftype != type)
3887 result = fold_convert (type, result);
3889 return result;
3892 /* Optimize a bit-field compare.
3894 There are two cases: First is a compare against a constant and the
3895 second is a comparison of two items where the fields are at the same
3896 bit position relative to the start of a chunk (byte, halfword, word)
3897 large enough to contain it. In these cases we can avoid the shift
3898 implicit in bitfield extractions.
3900 For constants, we emit a compare of the shifted constant with the
3901 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3902 compared. For two fields at the same position, we do the ANDs with the
3903 similar mask and compare the result of the ANDs.
3905 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3906 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3907 are the left and right operands of the comparison, respectively.
3909 If the optimization described above can be done, we return the resulting
3910 tree. Otherwise we return zero. */
3912 static tree
3913 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3914 tree lhs, tree rhs)
3916 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3917 tree type = TREE_TYPE (lhs);
3918 tree signed_type, unsigned_type;
3919 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3920 enum machine_mode lmode, rmode, nmode;
3921 int lunsignedp, runsignedp;
3922 int lvolatilep = 0, rvolatilep = 0;
3923 tree linner, rinner = NULL_TREE;
3924 tree mask;
3925 tree offset;
3927 /* Get all the information about the extractions being done. If the bit size
3928 if the same as the size of the underlying object, we aren't doing an
3929 extraction at all and so can do nothing. We also don't want to
3930 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3931 then will no longer be able to replace it. */
3932 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3933 &lunsignedp, &lvolatilep, false);
3934 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3935 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3936 return 0;
3938 if (!const_p)
3940 /* If this is not a constant, we can only do something if bit positions,
3941 sizes, and signedness are the same. */
3942 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3943 &runsignedp, &rvolatilep, false);
3945 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3946 || lunsignedp != runsignedp || offset != 0
3947 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3948 return 0;
3951 /* See if we can find a mode to refer to this field. We should be able to,
3952 but fail if we can't. */
3953 nmode = get_best_mode (lbitsize, lbitpos,
3954 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3955 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3956 TYPE_ALIGN (TREE_TYPE (rinner))),
3957 word_mode, lvolatilep || rvolatilep);
3958 if (nmode == VOIDmode)
3959 return 0;
3961 /* Set signed and unsigned types of the precision of this mode for the
3962 shifts below. */
3963 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3964 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3966 /* Compute the bit position and size for the new reference and our offset
3967 within it. If the new reference is the same size as the original, we
3968 won't optimize anything, so return zero. */
3969 nbitsize = GET_MODE_BITSIZE (nmode);
3970 nbitpos = lbitpos & ~ (nbitsize - 1);
3971 lbitpos -= nbitpos;
3972 if (nbitsize == lbitsize)
3973 return 0;
3975 if (BYTES_BIG_ENDIAN)
3976 lbitpos = nbitsize - lbitsize - lbitpos;
3978 /* Make the mask to be used against the extracted field. */
3979 mask = build_int_cst_type (unsigned_type, -1);
3980 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3981 mask = const_binop (RSHIFT_EXPR, mask,
3982 size_int (nbitsize - lbitsize - lbitpos), 0);
3984 if (! const_p)
3985 /* If not comparing with constant, just rework the comparison
3986 and return. */
3987 return fold_build2 (code, compare_type,
3988 fold_build2 (BIT_AND_EXPR, unsigned_type,
3989 make_bit_field_ref (linner,
3990 unsigned_type,
3991 nbitsize, nbitpos,
3993 mask),
3994 fold_build2 (BIT_AND_EXPR, unsigned_type,
3995 make_bit_field_ref (rinner,
3996 unsigned_type,
3997 nbitsize, nbitpos,
3999 mask));
4001 /* Otherwise, we are handling the constant case. See if the constant is too
4002 big for the field. Warn and return a tree of for 0 (false) if so. We do
4003 this not only for its own sake, but to avoid having to test for this
4004 error case below. If we didn't, we might generate wrong code.
4006 For unsigned fields, the constant shifted right by the field length should
4007 be all zero. For signed fields, the high-order bits should agree with
4008 the sign bit. */
4010 if (lunsignedp)
4012 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4013 fold_convert (unsigned_type, rhs),
4014 size_int (lbitsize), 0)))
4016 warning (0, "comparison is always %d due to width of bit-field",
4017 code == NE_EXPR);
4018 return constant_boolean_node (code == NE_EXPR, compare_type);
4021 else
4023 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4024 size_int (lbitsize - 1), 0);
4025 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4027 warning (0, "comparison is always %d due to width of bit-field",
4028 code == NE_EXPR);
4029 return constant_boolean_node (code == NE_EXPR, compare_type);
4033 /* Single-bit compares should always be against zero. */
4034 if (lbitsize == 1 && ! integer_zerop (rhs))
4036 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4037 rhs = build_int_cst (type, 0);
4040 /* Make a new bitfield reference, shift the constant over the
4041 appropriate number of bits and mask it with the computed mask
4042 (in case this was a signed field). If we changed it, make a new one. */
4043 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4044 if (lvolatilep)
4046 TREE_SIDE_EFFECTS (lhs) = 1;
4047 TREE_THIS_VOLATILE (lhs) = 1;
4050 rhs = const_binop (BIT_AND_EXPR,
4051 const_binop (LSHIFT_EXPR,
4052 fold_convert (unsigned_type, rhs),
4053 size_int (lbitpos), 0),
4054 mask, 0);
4056 return build2 (code, compare_type,
4057 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4058 rhs);
4061 /* Subroutine for fold_truthop: decode a field reference.
4063 If EXP is a comparison reference, we return the innermost reference.
4065 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4066 set to the starting bit number.
4068 If the innermost field can be completely contained in a mode-sized
4069 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4071 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4072 otherwise it is not changed.
4074 *PUNSIGNEDP is set to the signedness of the field.
4076 *PMASK is set to the mask used. This is either contained in a
4077 BIT_AND_EXPR or derived from the width of the field.
4079 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4081 Return 0 if this is not a component reference or is one that we can't
4082 do anything with. */
4084 static tree
4085 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4086 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4087 int *punsignedp, int *pvolatilep,
4088 tree *pmask, tree *pand_mask)
4090 tree outer_type = 0;
4091 tree and_mask = 0;
4092 tree mask, inner, offset;
4093 tree unsigned_type;
4094 unsigned int precision;
4096 /* All the optimizations using this function assume integer fields.
4097 There are problems with FP fields since the type_for_size call
4098 below can fail for, e.g., XFmode. */
4099 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4100 return 0;
4102 /* We are interested in the bare arrangement of bits, so strip everything
4103 that doesn't affect the machine mode. However, record the type of the
4104 outermost expression if it may matter below. */
4105 if (CONVERT_EXPR_P (exp)
4106 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4107 outer_type = TREE_TYPE (exp);
4108 STRIP_NOPS (exp);
4110 if (TREE_CODE (exp) == BIT_AND_EXPR)
4112 and_mask = TREE_OPERAND (exp, 1);
4113 exp = TREE_OPERAND (exp, 0);
4114 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4115 if (TREE_CODE (and_mask) != INTEGER_CST)
4116 return 0;
4119 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4120 punsignedp, pvolatilep, false);
4121 if ((inner == exp && and_mask == 0)
4122 || *pbitsize < 0 || offset != 0
4123 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4124 return 0;
4126 /* If the number of bits in the reference is the same as the bitsize of
4127 the outer type, then the outer type gives the signedness. Otherwise
4128 (in case of a small bitfield) the signedness is unchanged. */
4129 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4130 *punsignedp = TYPE_UNSIGNED (outer_type);
4132 /* Compute the mask to access the bitfield. */
4133 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4134 precision = TYPE_PRECISION (unsigned_type);
4136 mask = build_int_cst_type (unsigned_type, -1);
4138 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4139 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4141 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4142 if (and_mask != 0)
4143 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4144 fold_convert (unsigned_type, and_mask), mask);
4146 *pmask = mask;
4147 *pand_mask = and_mask;
4148 return inner;
4151 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4152 bit positions. */
4154 static int
4155 all_ones_mask_p (const_tree mask, int size)
4157 tree type = TREE_TYPE (mask);
4158 unsigned int precision = TYPE_PRECISION (type);
4159 tree tmask;
4161 tmask = build_int_cst_type (signed_type_for (type), -1);
4163 return
4164 tree_int_cst_equal (mask,
4165 const_binop (RSHIFT_EXPR,
4166 const_binop (LSHIFT_EXPR, tmask,
4167 size_int (precision - size),
4169 size_int (precision - size), 0));
4172 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4173 represents the sign bit of EXP's type. If EXP represents a sign
4174 or zero extension, also test VAL against the unextended type.
4175 The return value is the (sub)expression whose sign bit is VAL,
4176 or NULL_TREE otherwise. */
4178 static tree
4179 sign_bit_p (tree exp, const_tree val)
4181 unsigned HOST_WIDE_INT mask_lo, lo;
4182 HOST_WIDE_INT mask_hi, hi;
4183 int width;
4184 tree t;
4186 /* Tree EXP must have an integral type. */
4187 t = TREE_TYPE (exp);
4188 if (! INTEGRAL_TYPE_P (t))
4189 return NULL_TREE;
4191 /* Tree VAL must be an integer constant. */
4192 if (TREE_CODE (val) != INTEGER_CST
4193 || TREE_OVERFLOW (val))
4194 return NULL_TREE;
4196 width = TYPE_PRECISION (t);
4197 if (width > HOST_BITS_PER_WIDE_INT)
4199 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4200 lo = 0;
4202 mask_hi = ((unsigned HOST_WIDE_INT) -1
4203 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4204 mask_lo = -1;
4206 else
4208 hi = 0;
4209 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4211 mask_hi = 0;
4212 mask_lo = ((unsigned HOST_WIDE_INT) -1
4213 >> (HOST_BITS_PER_WIDE_INT - width));
4216 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4217 treat VAL as if it were unsigned. */
4218 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4219 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4220 return exp;
4222 /* Handle extension from a narrower type. */
4223 if (TREE_CODE (exp) == NOP_EXPR
4224 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4225 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4227 return NULL_TREE;
4230 /* Subroutine for fold_truthop: determine if an operand is simple enough
4231 to be evaluated unconditionally. */
4233 static int
4234 simple_operand_p (const_tree exp)
4236 /* Strip any conversions that don't change the machine mode. */
4237 STRIP_NOPS (exp);
4239 return (CONSTANT_CLASS_P (exp)
4240 || TREE_CODE (exp) == SSA_NAME
4241 || (DECL_P (exp)
4242 && ! TREE_ADDRESSABLE (exp)
4243 && ! TREE_THIS_VOLATILE (exp)
4244 && ! DECL_NONLOCAL (exp)
4245 /* Don't regard global variables as simple. They may be
4246 allocated in ways unknown to the compiler (shared memory,
4247 #pragma weak, etc). */
4248 && ! TREE_PUBLIC (exp)
4249 && ! DECL_EXTERNAL (exp)
4250 /* Loading a static variable is unduly expensive, but global
4251 registers aren't expensive. */
4252 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4255 /* The following functions are subroutines to fold_range_test and allow it to
4256 try to change a logical combination of comparisons into a range test.
4258 For example, both
4259 X == 2 || X == 3 || X == 4 || X == 5
4261 X >= 2 && X <= 5
4262 are converted to
4263 (unsigned) (X - 2) <= 3
4265 We describe each set of comparisons as being either inside or outside
4266 a range, using a variable named like IN_P, and then describe the
4267 range with a lower and upper bound. If one of the bounds is omitted,
4268 it represents either the highest or lowest value of the type.
4270 In the comments below, we represent a range by two numbers in brackets
4271 preceded by a "+" to designate being inside that range, or a "-" to
4272 designate being outside that range, so the condition can be inverted by
4273 flipping the prefix. An omitted bound is represented by a "-". For
4274 example, "- [-, 10]" means being outside the range starting at the lowest
4275 possible value and ending at 10, in other words, being greater than 10.
4276 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4277 always false.
4279 We set up things so that the missing bounds are handled in a consistent
4280 manner so neither a missing bound nor "true" and "false" need to be
4281 handled using a special case. */
4283 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4284 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4285 and UPPER1_P are nonzero if the respective argument is an upper bound
4286 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4287 must be specified for a comparison. ARG1 will be converted to ARG0's
4288 type if both are specified. */
4290 static tree
4291 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4292 tree arg1, int upper1_p)
4294 tree tem;
4295 int result;
4296 int sgn0, sgn1;
4298 /* If neither arg represents infinity, do the normal operation.
4299 Else, if not a comparison, return infinity. Else handle the special
4300 comparison rules. Note that most of the cases below won't occur, but
4301 are handled for consistency. */
4303 if (arg0 != 0 && arg1 != 0)
4305 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4306 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4307 STRIP_NOPS (tem);
4308 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4311 if (TREE_CODE_CLASS (code) != tcc_comparison)
4312 return 0;
4314 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4315 for neither. In real maths, we cannot assume open ended ranges are
4316 the same. But, this is computer arithmetic, where numbers are finite.
4317 We can therefore make the transformation of any unbounded range with
4318 the value Z, Z being greater than any representable number. This permits
4319 us to treat unbounded ranges as equal. */
4320 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4321 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4322 switch (code)
4324 case EQ_EXPR:
4325 result = sgn0 == sgn1;
4326 break;
4327 case NE_EXPR:
4328 result = sgn0 != sgn1;
4329 break;
4330 case LT_EXPR:
4331 result = sgn0 < sgn1;
4332 break;
4333 case LE_EXPR:
4334 result = sgn0 <= sgn1;
4335 break;
4336 case GT_EXPR:
4337 result = sgn0 > sgn1;
4338 break;
4339 case GE_EXPR:
4340 result = sgn0 >= sgn1;
4341 break;
4342 default:
4343 gcc_unreachable ();
4346 return constant_boolean_node (result, type);
4349 /* Given EXP, a logical expression, set the range it is testing into
4350 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4351 actually being tested. *PLOW and *PHIGH will be made of the same
4352 type as the returned expression. If EXP is not a comparison, we
4353 will most likely not be returning a useful value and range. Set
4354 *STRICT_OVERFLOW_P to true if the return value is only valid
4355 because signed overflow is undefined; otherwise, do not change
4356 *STRICT_OVERFLOW_P. */
4358 static tree
4359 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4360 bool *strict_overflow_p)
4362 enum tree_code code;
4363 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4364 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4365 int in_p, n_in_p;
4366 tree low, high, n_low, n_high;
4368 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4369 and see if we can refine the range. Some of the cases below may not
4370 happen, but it doesn't seem worth worrying about this. We "continue"
4371 the outer loop when we've changed something; otherwise we "break"
4372 the switch, which will "break" the while. */
4374 in_p = 0;
4375 low = high = build_int_cst (TREE_TYPE (exp), 0);
4377 while (1)
4379 code = TREE_CODE (exp);
4380 exp_type = TREE_TYPE (exp);
4382 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4384 if (TREE_OPERAND_LENGTH (exp) > 0)
4385 arg0 = TREE_OPERAND (exp, 0);
4386 if (TREE_CODE_CLASS (code) == tcc_comparison
4387 || TREE_CODE_CLASS (code) == tcc_unary
4388 || TREE_CODE_CLASS (code) == tcc_binary)
4389 arg0_type = TREE_TYPE (arg0);
4390 if (TREE_CODE_CLASS (code) == tcc_binary
4391 || TREE_CODE_CLASS (code) == tcc_comparison
4392 || (TREE_CODE_CLASS (code) == tcc_expression
4393 && TREE_OPERAND_LENGTH (exp) > 1))
4394 arg1 = TREE_OPERAND (exp, 1);
4397 switch (code)
4399 case TRUTH_NOT_EXPR:
4400 in_p = ! in_p, exp = arg0;
4401 continue;
4403 case EQ_EXPR: case NE_EXPR:
4404 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4405 /* We can only do something if the range is testing for zero
4406 and if the second operand is an integer constant. Note that
4407 saying something is "in" the range we make is done by
4408 complementing IN_P since it will set in the initial case of
4409 being not equal to zero; "out" is leaving it alone. */
4410 if (low == 0 || high == 0
4411 || ! integer_zerop (low) || ! integer_zerop (high)
4412 || TREE_CODE (arg1) != INTEGER_CST)
4413 break;
4415 switch (code)
4417 case NE_EXPR: /* - [c, c] */
4418 low = high = arg1;
4419 break;
4420 case EQ_EXPR: /* + [c, c] */
4421 in_p = ! in_p, low = high = arg1;
4422 break;
4423 case GT_EXPR: /* - [-, c] */
4424 low = 0, high = arg1;
4425 break;
4426 case GE_EXPR: /* + [c, -] */
4427 in_p = ! in_p, low = arg1, high = 0;
4428 break;
4429 case LT_EXPR: /* - [c, -] */
4430 low = arg1, high = 0;
4431 break;
4432 case LE_EXPR: /* + [-, c] */
4433 in_p = ! in_p, low = 0, high = arg1;
4434 break;
4435 default:
4436 gcc_unreachable ();
4439 /* If this is an unsigned comparison, we also know that EXP is
4440 greater than or equal to zero. We base the range tests we make
4441 on that fact, so we record it here so we can parse existing
4442 range tests. We test arg0_type since often the return type
4443 of, e.g. EQ_EXPR, is boolean. */
4444 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4446 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4447 in_p, low, high, 1,
4448 build_int_cst (arg0_type, 0),
4449 NULL_TREE))
4450 break;
4452 in_p = n_in_p, low = n_low, high = n_high;
4454 /* If the high bound is missing, but we have a nonzero low
4455 bound, reverse the range so it goes from zero to the low bound
4456 minus 1. */
4457 if (high == 0 && low && ! integer_zerop (low))
4459 in_p = ! in_p;
4460 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4461 integer_one_node, 0);
4462 low = build_int_cst (arg0_type, 0);
4466 exp = arg0;
4467 continue;
4469 case NEGATE_EXPR:
4470 /* (-x) IN [a,b] -> x in [-b, -a] */
4471 n_low = range_binop (MINUS_EXPR, exp_type,
4472 build_int_cst (exp_type, 0),
4473 0, high, 1);
4474 n_high = range_binop (MINUS_EXPR, exp_type,
4475 build_int_cst (exp_type, 0),
4476 0, low, 0);
4477 low = n_low, high = n_high;
4478 exp = arg0;
4479 continue;
4481 case BIT_NOT_EXPR:
4482 /* ~ X -> -X - 1 */
4483 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4484 build_int_cst (exp_type, 1));
4485 continue;
4487 case PLUS_EXPR: case MINUS_EXPR:
4488 if (TREE_CODE (arg1) != INTEGER_CST)
4489 break;
4491 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4492 move a constant to the other side. */
4493 if (!TYPE_UNSIGNED (arg0_type)
4494 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4495 break;
4497 /* If EXP is signed, any overflow in the computation is undefined,
4498 so we don't worry about it so long as our computations on
4499 the bounds don't overflow. For unsigned, overflow is defined
4500 and this is exactly the right thing. */
4501 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4502 arg0_type, low, 0, arg1, 0);
4503 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4504 arg0_type, high, 1, arg1, 0);
4505 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4506 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4507 break;
4509 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4510 *strict_overflow_p = true;
4512 /* Check for an unsigned range which has wrapped around the maximum
4513 value thus making n_high < n_low, and normalize it. */
4514 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4516 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4517 integer_one_node, 0);
4518 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4519 integer_one_node, 0);
4521 /* If the range is of the form +/- [ x+1, x ], we won't
4522 be able to normalize it. But then, it represents the
4523 whole range or the empty set, so make it
4524 +/- [ -, - ]. */
4525 if (tree_int_cst_equal (n_low, low)
4526 && tree_int_cst_equal (n_high, high))
4527 low = high = 0;
4528 else
4529 in_p = ! in_p;
4531 else
4532 low = n_low, high = n_high;
4534 exp = arg0;
4535 continue;
4537 CASE_CONVERT: case NON_LVALUE_EXPR:
4538 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4539 break;
4541 if (! INTEGRAL_TYPE_P (arg0_type)
4542 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4543 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4544 break;
4546 n_low = low, n_high = high;
4548 if (n_low != 0)
4549 n_low = fold_convert (arg0_type, n_low);
4551 if (n_high != 0)
4552 n_high = fold_convert (arg0_type, n_high);
4555 /* If we're converting arg0 from an unsigned type, to exp,
4556 a signed type, we will be doing the comparison as unsigned.
4557 The tests above have already verified that LOW and HIGH
4558 are both positive.
4560 So we have to ensure that we will handle large unsigned
4561 values the same way that the current signed bounds treat
4562 negative values. */
4564 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4566 tree high_positive;
4567 tree equiv_type;
4568 /* For fixed-point modes, we need to pass the saturating flag
4569 as the 2nd parameter. */
4570 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4571 equiv_type = lang_hooks.types.type_for_mode
4572 (TYPE_MODE (arg0_type),
4573 TYPE_SATURATING (arg0_type));
4574 else
4575 equiv_type = lang_hooks.types.type_for_mode
4576 (TYPE_MODE (arg0_type), 1);
4578 /* A range without an upper bound is, naturally, unbounded.
4579 Since convert would have cropped a very large value, use
4580 the max value for the destination type. */
4581 high_positive
4582 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4583 : TYPE_MAX_VALUE (arg0_type);
4585 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4586 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4587 fold_convert (arg0_type,
4588 high_positive),
4589 build_int_cst (arg0_type, 1));
4591 /* If the low bound is specified, "and" the range with the
4592 range for which the original unsigned value will be
4593 positive. */
4594 if (low != 0)
4596 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4597 1, n_low, n_high, 1,
4598 fold_convert (arg0_type,
4599 integer_zero_node),
4600 high_positive))
4601 break;
4603 in_p = (n_in_p == in_p);
4605 else
4607 /* Otherwise, "or" the range with the range of the input
4608 that will be interpreted as negative. */
4609 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4610 0, n_low, n_high, 1,
4611 fold_convert (arg0_type,
4612 integer_zero_node),
4613 high_positive))
4614 break;
4616 in_p = (in_p != n_in_p);
4620 exp = arg0;
4621 low = n_low, high = n_high;
4622 continue;
4624 default:
4625 break;
4628 break;
4631 /* If EXP is a constant, we can evaluate whether this is true or false. */
4632 if (TREE_CODE (exp) == INTEGER_CST)
4634 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4635 exp, 0, low, 0))
4636 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4637 exp, 1, high, 1)));
4638 low = high = 0;
4639 exp = 0;
4642 *pin_p = in_p, *plow = low, *phigh = high;
4643 return exp;
4646 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4647 type, TYPE, return an expression to test if EXP is in (or out of, depending
4648 on IN_P) the range. Return 0 if the test couldn't be created. */
4650 static tree
4651 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4653 tree etype = TREE_TYPE (exp);
4654 tree value;
4656 #ifdef HAVE_canonicalize_funcptr_for_compare
4657 /* Disable this optimization for function pointer expressions
4658 on targets that require function pointer canonicalization. */
4659 if (HAVE_canonicalize_funcptr_for_compare
4660 && TREE_CODE (etype) == POINTER_TYPE
4661 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4662 return NULL_TREE;
4663 #endif
4665 if (! in_p)
4667 value = build_range_check (type, exp, 1, low, high);
4668 if (value != 0)
4669 return invert_truthvalue (value);
4671 return 0;
4674 if (low == 0 && high == 0)
4675 return build_int_cst (type, 1);
4677 if (low == 0)
4678 return fold_build2 (LE_EXPR, type, exp,
4679 fold_convert (etype, high));
4681 if (high == 0)
4682 return fold_build2 (GE_EXPR, type, exp,
4683 fold_convert (etype, low));
4685 if (operand_equal_p (low, high, 0))
4686 return fold_build2 (EQ_EXPR, type, exp,
4687 fold_convert (etype, low));
4689 if (integer_zerop (low))
4691 if (! TYPE_UNSIGNED (etype))
4693 etype = unsigned_type_for (etype);
4694 high = fold_convert (etype, high);
4695 exp = fold_convert (etype, exp);
4697 return build_range_check (type, exp, 1, 0, high);
4700 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4701 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4703 unsigned HOST_WIDE_INT lo;
4704 HOST_WIDE_INT hi;
4705 int prec;
4707 prec = TYPE_PRECISION (etype);
4708 if (prec <= HOST_BITS_PER_WIDE_INT)
4710 hi = 0;
4711 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4713 else
4715 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4716 lo = (unsigned HOST_WIDE_INT) -1;
4719 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4721 if (TYPE_UNSIGNED (etype))
4723 tree signed_etype = signed_type_for (etype);
4724 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4725 etype
4726 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4727 else
4728 etype = signed_etype;
4729 exp = fold_convert (etype, exp);
4731 return fold_build2 (GT_EXPR, type, exp,
4732 build_int_cst (etype, 0));
4736 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4737 This requires wrap-around arithmetics for the type of the expression. */
4738 switch (TREE_CODE (etype))
4740 case INTEGER_TYPE:
4741 /* There is no requirement that LOW be within the range of ETYPE
4742 if the latter is a subtype. It must, however, be within the base
4743 type of ETYPE. So be sure we do the subtraction in that type. */
4744 if (TREE_TYPE (etype))
4745 etype = TREE_TYPE (etype);
4746 break;
4748 case ENUMERAL_TYPE:
4749 case BOOLEAN_TYPE:
4750 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4751 TYPE_UNSIGNED (etype));
4752 break;
4754 default:
4755 break;
4758 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4759 if (TREE_CODE (etype) == INTEGER_TYPE
4760 && !TYPE_OVERFLOW_WRAPS (etype))
4762 tree utype, minv, maxv;
4764 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4765 for the type in question, as we rely on this here. */
4766 utype = unsigned_type_for (etype);
4767 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4768 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4769 integer_one_node, 1);
4770 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4772 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4773 minv, 1, maxv, 1)))
4774 etype = utype;
4775 else
4776 return 0;
4779 high = fold_convert (etype, high);
4780 low = fold_convert (etype, low);
4781 exp = fold_convert (etype, exp);
4783 value = const_binop (MINUS_EXPR, high, low, 0);
4786 if (POINTER_TYPE_P (etype))
4788 if (value != 0 && !TREE_OVERFLOW (value))
4790 low = fold_convert (sizetype, low);
4791 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4792 return build_range_check (type,
4793 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4794 1, build_int_cst (etype, 0), value);
4796 return 0;
4799 if (value != 0 && !TREE_OVERFLOW (value))
4800 return build_range_check (type,
4801 fold_build2 (MINUS_EXPR, etype, exp, low),
4802 1, build_int_cst (etype, 0), value);
4804 return 0;
4807 /* Return the predecessor of VAL in its type, handling the infinite case. */
4809 static tree
4810 range_predecessor (tree val)
4812 tree type = TREE_TYPE (val);
4814 if (INTEGRAL_TYPE_P (type)
4815 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4816 return 0;
4817 else
4818 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4821 /* Return the successor of VAL in its type, handling the infinite case. */
4823 static tree
4824 range_successor (tree val)
4826 tree type = TREE_TYPE (val);
4828 if (INTEGRAL_TYPE_P (type)
4829 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4830 return 0;
4831 else
4832 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4835 /* Given two ranges, see if we can merge them into one. Return 1 if we
4836 can, 0 if we can't. Set the output range into the specified parameters. */
4838 static int
4839 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4840 tree high0, int in1_p, tree low1, tree high1)
4842 int no_overlap;
4843 int subset;
4844 int temp;
4845 tree tem;
4846 int in_p;
4847 tree low, high;
4848 int lowequal = ((low0 == 0 && low1 == 0)
4849 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4850 low0, 0, low1, 0)));
4851 int highequal = ((high0 == 0 && high1 == 0)
4852 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4853 high0, 1, high1, 1)));
4855 /* Make range 0 be the range that starts first, or ends last if they
4856 start at the same value. Swap them if it isn't. */
4857 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4858 low0, 0, low1, 0))
4859 || (lowequal
4860 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4861 high1, 1, high0, 1))))
4863 temp = in0_p, in0_p = in1_p, in1_p = temp;
4864 tem = low0, low0 = low1, low1 = tem;
4865 tem = high0, high0 = high1, high1 = tem;
4868 /* Now flag two cases, whether the ranges are disjoint or whether the
4869 second range is totally subsumed in the first. Note that the tests
4870 below are simplified by the ones above. */
4871 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4872 high0, 1, low1, 0));
4873 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4874 high1, 1, high0, 1));
4876 /* We now have four cases, depending on whether we are including or
4877 excluding the two ranges. */
4878 if (in0_p && in1_p)
4880 /* If they don't overlap, the result is false. If the second range
4881 is a subset it is the result. Otherwise, the range is from the start
4882 of the second to the end of the first. */
4883 if (no_overlap)
4884 in_p = 0, low = high = 0;
4885 else if (subset)
4886 in_p = 1, low = low1, high = high1;
4887 else
4888 in_p = 1, low = low1, high = high0;
4891 else if (in0_p && ! in1_p)
4893 /* If they don't overlap, the result is the first range. If they are
4894 equal, the result is false. If the second range is a subset of the
4895 first, and the ranges begin at the same place, we go from just after
4896 the end of the second range to the end of the first. If the second
4897 range is not a subset of the first, or if it is a subset and both
4898 ranges end at the same place, the range starts at the start of the
4899 first range and ends just before the second range.
4900 Otherwise, we can't describe this as a single range. */
4901 if (no_overlap)
4902 in_p = 1, low = low0, high = high0;
4903 else if (lowequal && highequal)
4904 in_p = 0, low = high = 0;
4905 else if (subset && lowequal)
4907 low = range_successor (high1);
4908 high = high0;
4909 in_p = 1;
4910 if (low == 0)
4912 /* We are in the weird situation where high0 > high1 but
4913 high1 has no successor. Punt. */
4914 return 0;
4917 else if (! subset || highequal)
4919 low = low0;
4920 high = range_predecessor (low1);
4921 in_p = 1;
4922 if (high == 0)
4924 /* low0 < low1 but low1 has no predecessor. Punt. */
4925 return 0;
4928 else
4929 return 0;
4932 else if (! in0_p && in1_p)
4934 /* If they don't overlap, the result is the second range. If the second
4935 is a subset of the first, the result is false. Otherwise,
4936 the range starts just after the first range and ends at the
4937 end of the second. */
4938 if (no_overlap)
4939 in_p = 1, low = low1, high = high1;
4940 else if (subset || highequal)
4941 in_p = 0, low = high = 0;
4942 else
4944 low = range_successor (high0);
4945 high = high1;
4946 in_p = 1;
4947 if (low == 0)
4949 /* high1 > high0 but high0 has no successor. Punt. */
4950 return 0;
4955 else
4957 /* The case where we are excluding both ranges. Here the complex case
4958 is if they don't overlap. In that case, the only time we have a
4959 range is if they are adjacent. If the second is a subset of the
4960 first, the result is the first. Otherwise, the range to exclude
4961 starts at the beginning of the first range and ends at the end of the
4962 second. */
4963 if (no_overlap)
4965 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4966 range_successor (high0),
4967 1, low1, 0)))
4968 in_p = 0, low = low0, high = high1;
4969 else
4971 /* Canonicalize - [min, x] into - [-, x]. */
4972 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4973 switch (TREE_CODE (TREE_TYPE (low0)))
4975 case ENUMERAL_TYPE:
4976 if (TYPE_PRECISION (TREE_TYPE (low0))
4977 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4978 break;
4979 /* FALLTHROUGH */
4980 case INTEGER_TYPE:
4981 if (tree_int_cst_equal (low0,
4982 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4983 low0 = 0;
4984 break;
4985 case POINTER_TYPE:
4986 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4987 && integer_zerop (low0))
4988 low0 = 0;
4989 break;
4990 default:
4991 break;
4994 /* Canonicalize - [x, max] into - [x, -]. */
4995 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4996 switch (TREE_CODE (TREE_TYPE (high1)))
4998 case ENUMERAL_TYPE:
4999 if (TYPE_PRECISION (TREE_TYPE (high1))
5000 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5001 break;
5002 /* FALLTHROUGH */
5003 case INTEGER_TYPE:
5004 if (tree_int_cst_equal (high1,
5005 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5006 high1 = 0;
5007 break;
5008 case POINTER_TYPE:
5009 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5010 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5011 high1, 1,
5012 integer_one_node, 1)))
5013 high1 = 0;
5014 break;
5015 default:
5016 break;
5019 /* The ranges might be also adjacent between the maximum and
5020 minimum values of the given type. For
5021 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5022 return + [x + 1, y - 1]. */
5023 if (low0 == 0 && high1 == 0)
5025 low = range_successor (high0);
5026 high = range_predecessor (low1);
5027 if (low == 0 || high == 0)
5028 return 0;
5030 in_p = 1;
5032 else
5033 return 0;
5036 else if (subset)
5037 in_p = 0, low = low0, high = high0;
5038 else
5039 in_p = 0, low = low0, high = high1;
5042 *pin_p = in_p, *plow = low, *phigh = high;
5043 return 1;
5047 /* Subroutine of fold, looking inside expressions of the form
5048 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5049 of the COND_EXPR. This function is being used also to optimize
5050 A op B ? C : A, by reversing the comparison first.
5052 Return a folded expression whose code is not a COND_EXPR
5053 anymore, or NULL_TREE if no folding opportunity is found. */
5055 static tree
5056 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5058 enum tree_code comp_code = TREE_CODE (arg0);
5059 tree arg00 = TREE_OPERAND (arg0, 0);
5060 tree arg01 = TREE_OPERAND (arg0, 1);
5061 tree arg1_type = TREE_TYPE (arg1);
5062 tree tem;
5064 STRIP_NOPS (arg1);
5065 STRIP_NOPS (arg2);
5067 /* If we have A op 0 ? A : -A, consider applying the following
5068 transformations:
5070 A == 0? A : -A same as -A
5071 A != 0? A : -A same as A
5072 A >= 0? A : -A same as abs (A)
5073 A > 0? A : -A same as abs (A)
5074 A <= 0? A : -A same as -abs (A)
5075 A < 0? A : -A same as -abs (A)
5077 None of these transformations work for modes with signed
5078 zeros. If A is +/-0, the first two transformations will
5079 change the sign of the result (from +0 to -0, or vice
5080 versa). The last four will fix the sign of the result,
5081 even though the original expressions could be positive or
5082 negative, depending on the sign of A.
5084 Note that all these transformations are correct if A is
5085 NaN, since the two alternatives (A and -A) are also NaNs. */
5086 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5087 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5088 ? real_zerop (arg01)
5089 : integer_zerop (arg01))
5090 && ((TREE_CODE (arg2) == NEGATE_EXPR
5091 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5092 /* In the case that A is of the form X-Y, '-A' (arg2) may
5093 have already been folded to Y-X, check for that. */
5094 || (TREE_CODE (arg1) == MINUS_EXPR
5095 && TREE_CODE (arg2) == MINUS_EXPR
5096 && operand_equal_p (TREE_OPERAND (arg1, 0),
5097 TREE_OPERAND (arg2, 1), 0)
5098 && operand_equal_p (TREE_OPERAND (arg1, 1),
5099 TREE_OPERAND (arg2, 0), 0))))
5100 switch (comp_code)
5102 case EQ_EXPR:
5103 case UNEQ_EXPR:
5104 tem = fold_convert (arg1_type, arg1);
5105 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5106 case NE_EXPR:
5107 case LTGT_EXPR:
5108 return pedantic_non_lvalue (fold_convert (type, arg1));
5109 case UNGE_EXPR:
5110 case UNGT_EXPR:
5111 if (flag_trapping_math)
5112 break;
5113 /* Fall through. */
5114 case GE_EXPR:
5115 case GT_EXPR:
5116 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5117 arg1 = fold_convert (signed_type_for
5118 (TREE_TYPE (arg1)), arg1);
5119 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5120 return pedantic_non_lvalue (fold_convert (type, tem));
5121 case UNLE_EXPR:
5122 case UNLT_EXPR:
5123 if (flag_trapping_math)
5124 break;
5125 case LE_EXPR:
5126 case LT_EXPR:
5127 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5128 arg1 = fold_convert (signed_type_for
5129 (TREE_TYPE (arg1)), arg1);
5130 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5131 return negate_expr (fold_convert (type, tem));
5132 default:
5133 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5134 break;
5137 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5138 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5139 both transformations are correct when A is NaN: A != 0
5140 is then true, and A == 0 is false. */
5142 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5143 && integer_zerop (arg01) && integer_zerop (arg2))
5145 if (comp_code == NE_EXPR)
5146 return pedantic_non_lvalue (fold_convert (type, arg1));
5147 else if (comp_code == EQ_EXPR)
5148 return build_int_cst (type, 0);
5151 /* Try some transformations of A op B ? A : B.
5153 A == B? A : B same as B
5154 A != B? A : B same as A
5155 A >= B? A : B same as max (A, B)
5156 A > B? A : B same as max (B, A)
5157 A <= B? A : B same as min (A, B)
5158 A < B? A : B same as min (B, A)
5160 As above, these transformations don't work in the presence
5161 of signed zeros. For example, if A and B are zeros of
5162 opposite sign, the first two transformations will change
5163 the sign of the result. In the last four, the original
5164 expressions give different results for (A=+0, B=-0) and
5165 (A=-0, B=+0), but the transformed expressions do not.
5167 The first two transformations are correct if either A or B
5168 is a NaN. In the first transformation, the condition will
5169 be false, and B will indeed be chosen. In the case of the
5170 second transformation, the condition A != B will be true,
5171 and A will be chosen.
5173 The conversions to max() and min() are not correct if B is
5174 a number and A is not. The conditions in the original
5175 expressions will be false, so all four give B. The min()
5176 and max() versions would give a NaN instead. */
5177 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5178 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5179 /* Avoid these transformations if the COND_EXPR may be used
5180 as an lvalue in the C++ front-end. PR c++/19199. */
5181 && (in_gimple_form
5182 || (strcmp (lang_hooks.name, "GNU C++") != 0
5183 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5184 || ! maybe_lvalue_p (arg1)
5185 || ! maybe_lvalue_p (arg2)))
5187 tree comp_op0 = arg00;
5188 tree comp_op1 = arg01;
5189 tree comp_type = TREE_TYPE (comp_op0);
5191 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5192 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5194 comp_type = type;
5195 comp_op0 = arg1;
5196 comp_op1 = arg2;
5199 switch (comp_code)
5201 case EQ_EXPR:
5202 return pedantic_non_lvalue (fold_convert (type, arg2));
5203 case NE_EXPR:
5204 return pedantic_non_lvalue (fold_convert (type, arg1));
5205 case LE_EXPR:
5206 case LT_EXPR:
5207 case UNLE_EXPR:
5208 case UNLT_EXPR:
5209 /* In C++ a ?: expression can be an lvalue, so put the
5210 operand which will be used if they are equal first
5211 so that we can convert this back to the
5212 corresponding COND_EXPR. */
5213 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5215 comp_op0 = fold_convert (comp_type, comp_op0);
5216 comp_op1 = fold_convert (comp_type, comp_op1);
5217 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5218 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5219 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5220 return pedantic_non_lvalue (fold_convert (type, tem));
5222 break;
5223 case GE_EXPR:
5224 case GT_EXPR:
5225 case UNGE_EXPR:
5226 case UNGT_EXPR:
5227 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5229 comp_op0 = fold_convert (comp_type, comp_op0);
5230 comp_op1 = fold_convert (comp_type, comp_op1);
5231 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5232 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5233 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5234 return pedantic_non_lvalue (fold_convert (type, tem));
5236 break;
5237 case UNEQ_EXPR:
5238 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5239 return pedantic_non_lvalue (fold_convert (type, arg2));
5240 break;
5241 case LTGT_EXPR:
5242 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5243 return pedantic_non_lvalue (fold_convert (type, arg1));
5244 break;
5245 default:
5246 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5247 break;
5251 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5252 we might still be able to simplify this. For example,
5253 if C1 is one less or one more than C2, this might have started
5254 out as a MIN or MAX and been transformed by this function.
5255 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5257 if (INTEGRAL_TYPE_P (type)
5258 && TREE_CODE (arg01) == INTEGER_CST
5259 && TREE_CODE (arg2) == INTEGER_CST)
5260 switch (comp_code)
5262 case EQ_EXPR:
5263 /* We can replace A with C1 in this case. */
5264 arg1 = fold_convert (type, arg01);
5265 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5267 case LT_EXPR:
5268 /* If C1 is C2 + 1, this is min(A, C2). */
5269 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5270 OEP_ONLY_CONST)
5271 && operand_equal_p (arg01,
5272 const_binop (PLUS_EXPR, arg2,
5273 build_int_cst (type, 1), 0),
5274 OEP_ONLY_CONST))
5275 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5276 type,
5277 fold_convert (type, arg1),
5278 arg2));
5279 break;
5281 case LE_EXPR:
5282 /* If C1 is C2 - 1, this is min(A, C2). */
5283 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5284 OEP_ONLY_CONST)
5285 && operand_equal_p (arg01,
5286 const_binop (MINUS_EXPR, arg2,
5287 build_int_cst (type, 1), 0),
5288 OEP_ONLY_CONST))
5289 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5290 type,
5291 fold_convert (type, arg1),
5292 arg2));
5293 break;
5295 case GT_EXPR:
5296 /* If C1 is C2 - 1, this is max(A, C2). */
5297 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5298 OEP_ONLY_CONST)
5299 && operand_equal_p (arg01,
5300 const_binop (MINUS_EXPR, arg2,
5301 build_int_cst (type, 1), 0),
5302 OEP_ONLY_CONST))
5303 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5304 type,
5305 fold_convert (type, arg1),
5306 arg2));
5307 break;
5309 case GE_EXPR:
5310 /* If C1 is C2 + 1, this is max(A, C2). */
5311 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5312 OEP_ONLY_CONST)
5313 && operand_equal_p (arg01,
5314 const_binop (PLUS_EXPR, arg2,
5315 build_int_cst (type, 1), 0),
5316 OEP_ONLY_CONST))
5317 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5318 type,
5319 fold_convert (type, arg1),
5320 arg2));
5321 break;
5322 case NE_EXPR:
5323 break;
5324 default:
5325 gcc_unreachable ();
5328 return NULL_TREE;
5333 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5334 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5335 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5336 false) >= 2)
5337 #endif
5339 /* EXP is some logical combination of boolean tests. See if we can
5340 merge it into some range test. Return the new tree if so. */
5342 static tree
5343 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5345 int or_op = (code == TRUTH_ORIF_EXPR
5346 || code == TRUTH_OR_EXPR);
5347 int in0_p, in1_p, in_p;
5348 tree low0, low1, low, high0, high1, high;
5349 bool strict_overflow_p = false;
5350 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5351 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5352 tree tem;
5353 const char * const warnmsg = G_("assuming signed overflow does not occur "
5354 "when simplifying range test");
5356 /* If this is an OR operation, invert both sides; we will invert
5357 again at the end. */
5358 if (or_op)
5359 in0_p = ! in0_p, in1_p = ! in1_p;
5361 /* If both expressions are the same, if we can merge the ranges, and we
5362 can build the range test, return it or it inverted. If one of the
5363 ranges is always true or always false, consider it to be the same
5364 expression as the other. */
5365 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5366 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5367 in1_p, low1, high1)
5368 && 0 != (tem = (build_range_check (type,
5369 lhs != 0 ? lhs
5370 : rhs != 0 ? rhs : integer_zero_node,
5371 in_p, low, high))))
5373 if (strict_overflow_p)
5374 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5375 return or_op ? invert_truthvalue (tem) : tem;
5378 /* On machines where the branch cost is expensive, if this is a
5379 short-circuited branch and the underlying object on both sides
5380 is the same, make a non-short-circuit operation. */
5381 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5382 && lhs != 0 && rhs != 0
5383 && (code == TRUTH_ANDIF_EXPR
5384 || code == TRUTH_ORIF_EXPR)
5385 && operand_equal_p (lhs, rhs, 0))
5387 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5388 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5389 which cases we can't do this. */
5390 if (simple_operand_p (lhs))
5391 return build2 (code == TRUTH_ANDIF_EXPR
5392 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5393 type, op0, op1);
5395 else if (lang_hooks.decls.global_bindings_p () == 0
5396 && ! CONTAINS_PLACEHOLDER_P (lhs))
5398 tree common = save_expr (lhs);
5400 if (0 != (lhs = build_range_check (type, common,
5401 or_op ? ! in0_p : in0_p,
5402 low0, high0))
5403 && (0 != (rhs = build_range_check (type, common,
5404 or_op ? ! in1_p : in1_p,
5405 low1, high1))))
5407 if (strict_overflow_p)
5408 fold_overflow_warning (warnmsg,
5409 WARN_STRICT_OVERFLOW_COMPARISON);
5410 return build2 (code == TRUTH_ANDIF_EXPR
5411 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5412 type, lhs, rhs);
5417 return 0;
5420 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5421 bit value. Arrange things so the extra bits will be set to zero if and
5422 only if C is signed-extended to its full width. If MASK is nonzero,
5423 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5425 static tree
5426 unextend (tree c, int p, int unsignedp, tree mask)
5428 tree type = TREE_TYPE (c);
5429 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5430 tree temp;
5432 if (p == modesize || unsignedp)
5433 return c;
5435 /* We work by getting just the sign bit into the low-order bit, then
5436 into the high-order bit, then sign-extend. We then XOR that value
5437 with C. */
5438 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5439 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5441 /* We must use a signed type in order to get an arithmetic right shift.
5442 However, we must also avoid introducing accidental overflows, so that
5443 a subsequent call to integer_zerop will work. Hence we must
5444 do the type conversion here. At this point, the constant is either
5445 zero or one, and the conversion to a signed type can never overflow.
5446 We could get an overflow if this conversion is done anywhere else. */
5447 if (TYPE_UNSIGNED (type))
5448 temp = fold_convert (signed_type_for (type), temp);
5450 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5451 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5452 if (mask != 0)
5453 temp = const_binop (BIT_AND_EXPR, temp,
5454 fold_convert (TREE_TYPE (c), mask), 0);
5455 /* If necessary, convert the type back to match the type of C. */
5456 if (TYPE_UNSIGNED (type))
5457 temp = fold_convert (type, temp);
5459 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5462 /* Find ways of folding logical expressions of LHS and RHS:
5463 Try to merge two comparisons to the same innermost item.
5464 Look for range tests like "ch >= '0' && ch <= '9'".
5465 Look for combinations of simple terms on machines with expensive branches
5466 and evaluate the RHS unconditionally.
5468 For example, if we have p->a == 2 && p->b == 4 and we can make an
5469 object large enough to span both A and B, we can do this with a comparison
5470 against the object ANDed with the a mask.
5472 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5473 operations to do this with one comparison.
5475 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5476 function and the one above.
5478 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5479 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5481 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5482 two operands.
5484 We return the simplified tree or 0 if no optimization is possible. */
5486 static tree
5487 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5489 /* If this is the "or" of two comparisons, we can do something if
5490 the comparisons are NE_EXPR. If this is the "and", we can do something
5491 if the comparisons are EQ_EXPR. I.e.,
5492 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5494 WANTED_CODE is this operation code. For single bit fields, we can
5495 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5496 comparison for one-bit fields. */
5498 enum tree_code wanted_code;
5499 enum tree_code lcode, rcode;
5500 tree ll_arg, lr_arg, rl_arg, rr_arg;
5501 tree ll_inner, lr_inner, rl_inner, rr_inner;
5502 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5503 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5504 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5505 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5506 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5507 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5508 enum machine_mode lnmode, rnmode;
5509 tree ll_mask, lr_mask, rl_mask, rr_mask;
5510 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5511 tree l_const, r_const;
5512 tree lntype, rntype, result;
5513 HOST_WIDE_INT first_bit, end_bit;
5514 int volatilep;
5515 tree orig_lhs = lhs, orig_rhs = rhs;
5516 enum tree_code orig_code = code;
5518 /* Start by getting the comparison codes. Fail if anything is volatile.
5519 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5520 it were surrounded with a NE_EXPR. */
5522 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5523 return 0;
5525 lcode = TREE_CODE (lhs);
5526 rcode = TREE_CODE (rhs);
5528 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5530 lhs = build2 (NE_EXPR, truth_type, lhs,
5531 build_int_cst (TREE_TYPE (lhs), 0));
5532 lcode = NE_EXPR;
5535 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5537 rhs = build2 (NE_EXPR, truth_type, rhs,
5538 build_int_cst (TREE_TYPE (rhs), 0));
5539 rcode = NE_EXPR;
5542 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5543 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5544 return 0;
5546 ll_arg = TREE_OPERAND (lhs, 0);
5547 lr_arg = TREE_OPERAND (lhs, 1);
5548 rl_arg = TREE_OPERAND (rhs, 0);
5549 rr_arg = TREE_OPERAND (rhs, 1);
5551 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5552 if (simple_operand_p (ll_arg)
5553 && simple_operand_p (lr_arg))
5555 tree result;
5556 if (operand_equal_p (ll_arg, rl_arg, 0)
5557 && operand_equal_p (lr_arg, rr_arg, 0))
5559 result = combine_comparisons (code, lcode, rcode,
5560 truth_type, ll_arg, lr_arg);
5561 if (result)
5562 return result;
5564 else if (operand_equal_p (ll_arg, rr_arg, 0)
5565 && operand_equal_p (lr_arg, rl_arg, 0))
5567 result = combine_comparisons (code, lcode,
5568 swap_tree_comparison (rcode),
5569 truth_type, ll_arg, lr_arg);
5570 if (result)
5571 return result;
5575 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5576 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5578 /* If the RHS can be evaluated unconditionally and its operands are
5579 simple, it wins to evaluate the RHS unconditionally on machines
5580 with expensive branches. In this case, this isn't a comparison
5581 that can be merged. Avoid doing this if the RHS is a floating-point
5582 comparison since those can trap. */
5584 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5585 false) >= 2
5586 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5587 && simple_operand_p (rl_arg)
5588 && simple_operand_p (rr_arg))
5590 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5591 if (code == TRUTH_OR_EXPR
5592 && lcode == NE_EXPR && integer_zerop (lr_arg)
5593 && rcode == NE_EXPR && integer_zerop (rr_arg)
5594 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5595 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5596 return build2 (NE_EXPR, truth_type,
5597 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5598 ll_arg, rl_arg),
5599 build_int_cst (TREE_TYPE (ll_arg), 0));
5601 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5602 if (code == TRUTH_AND_EXPR
5603 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5604 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5605 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5606 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5607 return build2 (EQ_EXPR, truth_type,
5608 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5609 ll_arg, rl_arg),
5610 build_int_cst (TREE_TYPE (ll_arg), 0));
5612 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5614 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5615 return build2 (code, truth_type, lhs, rhs);
5616 return NULL_TREE;
5620 /* See if the comparisons can be merged. Then get all the parameters for
5621 each side. */
5623 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5624 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5625 return 0;
5627 volatilep = 0;
5628 ll_inner = decode_field_reference (ll_arg,
5629 &ll_bitsize, &ll_bitpos, &ll_mode,
5630 &ll_unsignedp, &volatilep, &ll_mask,
5631 &ll_and_mask);
5632 lr_inner = decode_field_reference (lr_arg,
5633 &lr_bitsize, &lr_bitpos, &lr_mode,
5634 &lr_unsignedp, &volatilep, &lr_mask,
5635 &lr_and_mask);
5636 rl_inner = decode_field_reference (rl_arg,
5637 &rl_bitsize, &rl_bitpos, &rl_mode,
5638 &rl_unsignedp, &volatilep, &rl_mask,
5639 &rl_and_mask);
5640 rr_inner = decode_field_reference (rr_arg,
5641 &rr_bitsize, &rr_bitpos, &rr_mode,
5642 &rr_unsignedp, &volatilep, &rr_mask,
5643 &rr_and_mask);
5645 /* It must be true that the inner operation on the lhs of each
5646 comparison must be the same if we are to be able to do anything.
5647 Then see if we have constants. If not, the same must be true for
5648 the rhs's. */
5649 if (volatilep || ll_inner == 0 || rl_inner == 0
5650 || ! operand_equal_p (ll_inner, rl_inner, 0))
5651 return 0;
5653 if (TREE_CODE (lr_arg) == INTEGER_CST
5654 && TREE_CODE (rr_arg) == INTEGER_CST)
5655 l_const = lr_arg, r_const = rr_arg;
5656 else if (lr_inner == 0 || rr_inner == 0
5657 || ! operand_equal_p (lr_inner, rr_inner, 0))
5658 return 0;
5659 else
5660 l_const = r_const = 0;
5662 /* If either comparison code is not correct for our logical operation,
5663 fail. However, we can convert a one-bit comparison against zero into
5664 the opposite comparison against that bit being set in the field. */
5666 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5667 if (lcode != wanted_code)
5669 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5671 /* Make the left operand unsigned, since we are only interested
5672 in the value of one bit. Otherwise we are doing the wrong
5673 thing below. */
5674 ll_unsignedp = 1;
5675 l_const = ll_mask;
5677 else
5678 return 0;
5681 /* This is analogous to the code for l_const above. */
5682 if (rcode != wanted_code)
5684 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5686 rl_unsignedp = 1;
5687 r_const = rl_mask;
5689 else
5690 return 0;
5693 /* See if we can find a mode that contains both fields being compared on
5694 the left. If we can't, fail. Otherwise, update all constants and masks
5695 to be relative to a field of that size. */
5696 first_bit = MIN (ll_bitpos, rl_bitpos);
5697 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5698 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5699 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5700 volatilep);
5701 if (lnmode == VOIDmode)
5702 return 0;
5704 lnbitsize = GET_MODE_BITSIZE (lnmode);
5705 lnbitpos = first_bit & ~ (lnbitsize - 1);
5706 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5707 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5709 if (BYTES_BIG_ENDIAN)
5711 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5712 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5715 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5716 size_int (xll_bitpos), 0);
5717 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5718 size_int (xrl_bitpos), 0);
5720 if (l_const)
5722 l_const = fold_convert (lntype, l_const);
5723 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5724 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5725 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5726 fold_build1 (BIT_NOT_EXPR,
5727 lntype, ll_mask),
5728 0)))
5730 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5732 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5735 if (r_const)
5737 r_const = fold_convert (lntype, r_const);
5738 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5739 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5740 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5741 fold_build1 (BIT_NOT_EXPR,
5742 lntype, rl_mask),
5743 0)))
5745 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5747 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5751 /* If the right sides are not constant, do the same for it. Also,
5752 disallow this optimization if a size or signedness mismatch occurs
5753 between the left and right sides. */
5754 if (l_const == 0)
5756 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5757 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5758 /* Make sure the two fields on the right
5759 correspond to the left without being swapped. */
5760 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5761 return 0;
5763 first_bit = MIN (lr_bitpos, rr_bitpos);
5764 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5765 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5766 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5767 volatilep);
5768 if (rnmode == VOIDmode)
5769 return 0;
5771 rnbitsize = GET_MODE_BITSIZE (rnmode);
5772 rnbitpos = first_bit & ~ (rnbitsize - 1);
5773 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5774 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5776 if (BYTES_BIG_ENDIAN)
5778 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5779 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5782 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5783 size_int (xlr_bitpos), 0);
5784 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5785 size_int (xrr_bitpos), 0);
5787 /* Make a mask that corresponds to both fields being compared.
5788 Do this for both items being compared. If the operands are the
5789 same size and the bits being compared are in the same position
5790 then we can do this by masking both and comparing the masked
5791 results. */
5792 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5793 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5794 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5796 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5797 ll_unsignedp || rl_unsignedp);
5798 if (! all_ones_mask_p (ll_mask, lnbitsize))
5799 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5801 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5802 lr_unsignedp || rr_unsignedp);
5803 if (! all_ones_mask_p (lr_mask, rnbitsize))
5804 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5806 return build2 (wanted_code, truth_type, lhs, rhs);
5809 /* There is still another way we can do something: If both pairs of
5810 fields being compared are adjacent, we may be able to make a wider
5811 field containing them both.
5813 Note that we still must mask the lhs/rhs expressions. Furthermore,
5814 the mask must be shifted to account for the shift done by
5815 make_bit_field_ref. */
5816 if ((ll_bitsize + ll_bitpos == rl_bitpos
5817 && lr_bitsize + lr_bitpos == rr_bitpos)
5818 || (ll_bitpos == rl_bitpos + rl_bitsize
5819 && lr_bitpos == rr_bitpos + rr_bitsize))
5821 tree type;
5823 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5824 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5825 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5826 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5828 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5829 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5830 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5831 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5833 /* Convert to the smaller type before masking out unwanted bits. */
5834 type = lntype;
5835 if (lntype != rntype)
5837 if (lnbitsize > rnbitsize)
5839 lhs = fold_convert (rntype, lhs);
5840 ll_mask = fold_convert (rntype, ll_mask);
5841 type = rntype;
5843 else if (lnbitsize < rnbitsize)
5845 rhs = fold_convert (lntype, rhs);
5846 lr_mask = fold_convert (lntype, lr_mask);
5847 type = lntype;
5851 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5852 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5854 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5855 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5857 return build2 (wanted_code, truth_type, lhs, rhs);
5860 return 0;
5863 /* Handle the case of comparisons with constants. If there is something in
5864 common between the masks, those bits of the constants must be the same.
5865 If not, the condition is always false. Test for this to avoid generating
5866 incorrect code below. */
5867 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5868 if (! integer_zerop (result)
5869 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5870 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5872 if (wanted_code == NE_EXPR)
5874 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5875 return constant_boolean_node (true, truth_type);
5877 else
5879 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5880 return constant_boolean_node (false, truth_type);
5884 /* Construct the expression we will return. First get the component
5885 reference we will make. Unless the mask is all ones the width of
5886 that field, perform the mask operation. Then compare with the
5887 merged constant. */
5888 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5889 ll_unsignedp || rl_unsignedp);
5891 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5892 if (! all_ones_mask_p (ll_mask, lnbitsize))
5893 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5895 return build2 (wanted_code, truth_type, result,
5896 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5899 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5900 constant. */
5902 static tree
5903 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5905 tree arg0 = op0;
5906 enum tree_code op_code;
5907 tree comp_const;
5908 tree minmax_const;
5909 int consts_equal, consts_lt;
5910 tree inner;
5912 STRIP_SIGN_NOPS (arg0);
5914 op_code = TREE_CODE (arg0);
5915 minmax_const = TREE_OPERAND (arg0, 1);
5916 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5917 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5918 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5919 inner = TREE_OPERAND (arg0, 0);
5921 /* If something does not permit us to optimize, return the original tree. */
5922 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5923 || TREE_CODE (comp_const) != INTEGER_CST
5924 || TREE_OVERFLOW (comp_const)
5925 || TREE_CODE (minmax_const) != INTEGER_CST
5926 || TREE_OVERFLOW (minmax_const))
5927 return NULL_TREE;
5929 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5930 and GT_EXPR, doing the rest with recursive calls using logical
5931 simplifications. */
5932 switch (code)
5934 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5936 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5937 type, op0, op1);
5938 if (tem)
5939 return invert_truthvalue (tem);
5940 return NULL_TREE;
5943 case GE_EXPR:
5944 return
5945 fold_build2 (TRUTH_ORIF_EXPR, type,
5946 optimize_minmax_comparison
5947 (EQ_EXPR, type, arg0, comp_const),
5948 optimize_minmax_comparison
5949 (GT_EXPR, type, arg0, comp_const));
5951 case EQ_EXPR:
5952 if (op_code == MAX_EXPR && consts_equal)
5953 /* MAX (X, 0) == 0 -> X <= 0 */
5954 return fold_build2 (LE_EXPR, type, inner, comp_const);
5956 else if (op_code == MAX_EXPR && consts_lt)
5957 /* MAX (X, 0) == 5 -> X == 5 */
5958 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5960 else if (op_code == MAX_EXPR)
5961 /* MAX (X, 0) == -1 -> false */
5962 return omit_one_operand (type, integer_zero_node, inner);
5964 else if (consts_equal)
5965 /* MIN (X, 0) == 0 -> X >= 0 */
5966 return fold_build2 (GE_EXPR, type, inner, comp_const);
5968 else if (consts_lt)
5969 /* MIN (X, 0) == 5 -> false */
5970 return omit_one_operand (type, integer_zero_node, inner);
5972 else
5973 /* MIN (X, 0) == -1 -> X == -1 */
5974 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5976 case GT_EXPR:
5977 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5978 /* MAX (X, 0) > 0 -> X > 0
5979 MAX (X, 0) > 5 -> X > 5 */
5980 return fold_build2 (GT_EXPR, type, inner, comp_const);
5982 else if (op_code == MAX_EXPR)
5983 /* MAX (X, 0) > -1 -> true */
5984 return omit_one_operand (type, integer_one_node, inner);
5986 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5987 /* MIN (X, 0) > 0 -> false
5988 MIN (X, 0) > 5 -> false */
5989 return omit_one_operand (type, integer_zero_node, inner);
5991 else
5992 /* MIN (X, 0) > -1 -> X > -1 */
5993 return fold_build2 (GT_EXPR, type, inner, comp_const);
5995 default:
5996 return NULL_TREE;
6000 /* T is an integer expression that is being multiplied, divided, or taken a
6001 modulus (CODE says which and what kind of divide or modulus) by a
6002 constant C. See if we can eliminate that operation by folding it with
6003 other operations already in T. WIDE_TYPE, if non-null, is a type that
6004 should be used for the computation if wider than our type.
6006 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6007 (X * 2) + (Y * 4). We must, however, be assured that either the original
6008 expression would not overflow or that overflow is undefined for the type
6009 in the language in question.
6011 If we return a non-null expression, it is an equivalent form of the
6012 original computation, but need not be in the original type.
6014 We set *STRICT_OVERFLOW_P to true if the return values depends on
6015 signed overflow being undefined. Otherwise we do not change
6016 *STRICT_OVERFLOW_P. */
6018 static tree
6019 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6020 bool *strict_overflow_p)
6022 /* To avoid exponential search depth, refuse to allow recursion past
6023 three levels. Beyond that (1) it's highly unlikely that we'll find
6024 something interesting and (2) we've probably processed it before
6025 when we built the inner expression. */
6027 static int depth;
6028 tree ret;
6030 if (depth > 3)
6031 return NULL;
6033 depth++;
6034 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6035 depth--;
6037 return ret;
6040 static tree
6041 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6042 bool *strict_overflow_p)
6044 tree type = TREE_TYPE (t);
6045 enum tree_code tcode = TREE_CODE (t);
6046 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6047 > GET_MODE_SIZE (TYPE_MODE (type)))
6048 ? wide_type : type);
6049 tree t1, t2;
6050 int same_p = tcode == code;
6051 tree op0 = NULL_TREE, op1 = NULL_TREE;
6052 bool sub_strict_overflow_p;
6054 /* Don't deal with constants of zero here; they confuse the code below. */
6055 if (integer_zerop (c))
6056 return NULL_TREE;
6058 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6059 op0 = TREE_OPERAND (t, 0);
6061 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6062 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6064 /* Note that we need not handle conditional operations here since fold
6065 already handles those cases. So just do arithmetic here. */
6066 switch (tcode)
6068 case INTEGER_CST:
6069 /* For a constant, we can always simplify if we are a multiply
6070 or (for divide and modulus) if it is a multiple of our constant. */
6071 if (code == MULT_EXPR
6072 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6073 return const_binop (code, fold_convert (ctype, t),
6074 fold_convert (ctype, c), 0);
6075 break;
6077 CASE_CONVERT: case NON_LVALUE_EXPR:
6078 /* If op0 is an expression ... */
6079 if ((COMPARISON_CLASS_P (op0)
6080 || UNARY_CLASS_P (op0)
6081 || BINARY_CLASS_P (op0)
6082 || VL_EXP_CLASS_P (op0)
6083 || EXPRESSION_CLASS_P (op0))
6084 /* ... and has wrapping overflow, and its type is smaller
6085 than ctype, then we cannot pass through as widening. */
6086 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6087 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6088 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6089 && (TYPE_PRECISION (ctype)
6090 > TYPE_PRECISION (TREE_TYPE (op0))))
6091 /* ... or this is a truncation (t is narrower than op0),
6092 then we cannot pass through this narrowing. */
6093 || (TYPE_PRECISION (type)
6094 < TYPE_PRECISION (TREE_TYPE (op0)))
6095 /* ... or signedness changes for division or modulus,
6096 then we cannot pass through this conversion. */
6097 || (code != MULT_EXPR
6098 && (TYPE_UNSIGNED (ctype)
6099 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6100 /* ... or has undefined overflow while the converted to
6101 type has not, we cannot do the operation in the inner type
6102 as that would introduce undefined overflow. */
6103 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6104 && !TYPE_OVERFLOW_UNDEFINED (type))))
6105 break;
6107 /* Pass the constant down and see if we can make a simplification. If
6108 we can, replace this expression with the inner simplification for
6109 possible later conversion to our or some other type. */
6110 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6111 && TREE_CODE (t2) == INTEGER_CST
6112 && !TREE_OVERFLOW (t2)
6113 && (0 != (t1 = extract_muldiv (op0, t2, code,
6114 code == MULT_EXPR
6115 ? ctype : NULL_TREE,
6116 strict_overflow_p))))
6117 return t1;
6118 break;
6120 case ABS_EXPR:
6121 /* If widening the type changes it from signed to unsigned, then we
6122 must avoid building ABS_EXPR itself as unsigned. */
6123 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6125 tree cstype = (*signed_type_for) (ctype);
6126 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6127 != 0)
6129 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6130 return fold_convert (ctype, t1);
6132 break;
6134 /* If the constant is negative, we cannot simplify this. */
6135 if (tree_int_cst_sgn (c) == -1)
6136 break;
6137 /* FALLTHROUGH */
6138 case NEGATE_EXPR:
6139 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6140 != 0)
6141 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6142 break;
6144 case MIN_EXPR: case MAX_EXPR:
6145 /* If widening the type changes the signedness, then we can't perform
6146 this optimization as that changes the result. */
6147 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6148 break;
6150 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6151 sub_strict_overflow_p = false;
6152 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6153 &sub_strict_overflow_p)) != 0
6154 && (t2 = extract_muldiv (op1, c, code, wide_type,
6155 &sub_strict_overflow_p)) != 0)
6157 if (tree_int_cst_sgn (c) < 0)
6158 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6159 if (sub_strict_overflow_p)
6160 *strict_overflow_p = true;
6161 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6162 fold_convert (ctype, t2));
6164 break;
6166 case LSHIFT_EXPR: case RSHIFT_EXPR:
6167 /* If the second operand is constant, this is a multiplication
6168 or floor division, by a power of two, so we can treat it that
6169 way unless the multiplier or divisor overflows. Signed
6170 left-shift overflow is implementation-defined rather than
6171 undefined in C90, so do not convert signed left shift into
6172 multiplication. */
6173 if (TREE_CODE (op1) == INTEGER_CST
6174 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6175 /* const_binop may not detect overflow correctly,
6176 so check for it explicitly here. */
6177 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6178 && TREE_INT_CST_HIGH (op1) == 0
6179 && 0 != (t1 = fold_convert (ctype,
6180 const_binop (LSHIFT_EXPR,
6181 size_one_node,
6182 op1, 0)))
6183 && !TREE_OVERFLOW (t1))
6184 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6185 ? MULT_EXPR : FLOOR_DIV_EXPR,
6186 ctype, fold_convert (ctype, op0), t1),
6187 c, code, wide_type, strict_overflow_p);
6188 break;
6190 case PLUS_EXPR: case MINUS_EXPR:
6191 /* See if we can eliminate the operation on both sides. If we can, we
6192 can return a new PLUS or MINUS. If we can't, the only remaining
6193 cases where we can do anything are if the second operand is a
6194 constant. */
6195 sub_strict_overflow_p = false;
6196 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6197 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6198 if (t1 != 0 && t2 != 0
6199 && (code == MULT_EXPR
6200 /* If not multiplication, we can only do this if both operands
6201 are divisible by c. */
6202 || (multiple_of_p (ctype, op0, c)
6203 && multiple_of_p (ctype, op1, c))))
6205 if (sub_strict_overflow_p)
6206 *strict_overflow_p = true;
6207 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6208 fold_convert (ctype, t2));
6211 /* If this was a subtraction, negate OP1 and set it to be an addition.
6212 This simplifies the logic below. */
6213 if (tcode == MINUS_EXPR)
6214 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6216 if (TREE_CODE (op1) != INTEGER_CST)
6217 break;
6219 /* If either OP1 or C are negative, this optimization is not safe for
6220 some of the division and remainder types while for others we need
6221 to change the code. */
6222 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6224 if (code == CEIL_DIV_EXPR)
6225 code = FLOOR_DIV_EXPR;
6226 else if (code == FLOOR_DIV_EXPR)
6227 code = CEIL_DIV_EXPR;
6228 else if (code != MULT_EXPR
6229 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6230 break;
6233 /* If it's a multiply or a division/modulus operation of a multiple
6234 of our constant, do the operation and verify it doesn't overflow. */
6235 if (code == MULT_EXPR
6236 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6238 op1 = const_binop (code, fold_convert (ctype, op1),
6239 fold_convert (ctype, c), 0);
6240 /* We allow the constant to overflow with wrapping semantics. */
6241 if (op1 == 0
6242 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6243 break;
6245 else
6246 break;
6248 /* If we have an unsigned type is not a sizetype, we cannot widen
6249 the operation since it will change the result if the original
6250 computation overflowed. */
6251 if (TYPE_UNSIGNED (ctype)
6252 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6253 && ctype != type)
6254 break;
6256 /* If we were able to eliminate our operation from the first side,
6257 apply our operation to the second side and reform the PLUS. */
6258 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6259 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6261 /* The last case is if we are a multiply. In that case, we can
6262 apply the distributive law to commute the multiply and addition
6263 if the multiplication of the constants doesn't overflow. */
6264 if (code == MULT_EXPR)
6265 return fold_build2 (tcode, ctype,
6266 fold_build2 (code, ctype,
6267 fold_convert (ctype, op0),
6268 fold_convert (ctype, c)),
6269 op1);
6271 break;
6273 case MULT_EXPR:
6274 /* We have a special case here if we are doing something like
6275 (C * 8) % 4 since we know that's zero. */
6276 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6277 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6278 /* If the multiplication can overflow we cannot optimize this.
6279 ??? Until we can properly mark individual operations as
6280 not overflowing we need to treat sizetype special here as
6281 stor-layout relies on this opimization to make
6282 DECL_FIELD_BIT_OFFSET always a constant. */
6283 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6284 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6285 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6286 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6287 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6289 *strict_overflow_p = true;
6290 return omit_one_operand (type, integer_zero_node, op0);
6293 /* ... fall through ... */
6295 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6296 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6297 /* If we can extract our operation from the LHS, do so and return a
6298 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6299 do something only if the second operand is a constant. */
6300 if (same_p
6301 && (t1 = extract_muldiv (op0, c, code, wide_type,
6302 strict_overflow_p)) != 0)
6303 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6304 fold_convert (ctype, op1));
6305 else if (tcode == MULT_EXPR && code == MULT_EXPR
6306 && (t1 = extract_muldiv (op1, c, code, wide_type,
6307 strict_overflow_p)) != 0)
6308 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6309 fold_convert (ctype, t1));
6310 else if (TREE_CODE (op1) != INTEGER_CST)
6311 return 0;
6313 /* If these are the same operation types, we can associate them
6314 assuming no overflow. */
6315 if (tcode == code
6316 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6317 fold_convert (ctype, c), 1))
6318 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6319 TREE_INT_CST_HIGH (t1),
6320 (TYPE_UNSIGNED (ctype)
6321 && tcode != MULT_EXPR) ? -1 : 1,
6322 TREE_OVERFLOW (t1)))
6323 && !TREE_OVERFLOW (t1))
6324 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6326 /* If these operations "cancel" each other, we have the main
6327 optimizations of this pass, which occur when either constant is a
6328 multiple of the other, in which case we replace this with either an
6329 operation or CODE or TCODE.
6331 If we have an unsigned type that is not a sizetype, we cannot do
6332 this since it will change the result if the original computation
6333 overflowed. */
6334 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6335 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6336 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6337 || (tcode == MULT_EXPR
6338 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6339 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6340 && code != MULT_EXPR)))
6342 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6344 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6345 *strict_overflow_p = true;
6346 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6347 fold_convert (ctype,
6348 const_binop (TRUNC_DIV_EXPR,
6349 op1, c, 0)));
6351 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6353 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6354 *strict_overflow_p = true;
6355 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6356 fold_convert (ctype,
6357 const_binop (TRUNC_DIV_EXPR,
6358 c, op1, 0)));
6361 break;
6363 default:
6364 break;
6367 return 0;
6370 /* Return a node which has the indicated constant VALUE (either 0 or
6371 1), and is of the indicated TYPE. */
6373 tree
6374 constant_boolean_node (int value, tree type)
6376 if (type == integer_type_node)
6377 return value ? integer_one_node : integer_zero_node;
6378 else if (type == boolean_type_node)
6379 return value ? boolean_true_node : boolean_false_node;
6380 else
6381 return build_int_cst (type, value);
6385 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6386 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6387 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6388 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6389 COND is the first argument to CODE; otherwise (as in the example
6390 given here), it is the second argument. TYPE is the type of the
6391 original expression. Return NULL_TREE if no simplification is
6392 possible. */
6394 static tree
6395 fold_binary_op_with_conditional_arg (enum tree_code code,
6396 tree type, tree op0, tree op1,
6397 tree cond, tree arg, int cond_first_p)
6399 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6400 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6401 tree test, true_value, false_value;
6402 tree lhs = NULL_TREE;
6403 tree rhs = NULL_TREE;
6405 /* This transformation is only worthwhile if we don't have to wrap
6406 arg in a SAVE_EXPR, and the operation can be simplified on at least
6407 one of the branches once its pushed inside the COND_EXPR. */
6408 if (!TREE_CONSTANT (arg))
6409 return NULL_TREE;
6411 if (TREE_CODE (cond) == COND_EXPR)
6413 test = TREE_OPERAND (cond, 0);
6414 true_value = TREE_OPERAND (cond, 1);
6415 false_value = TREE_OPERAND (cond, 2);
6416 /* If this operand throws an expression, then it does not make
6417 sense to try to perform a logical or arithmetic operation
6418 involving it. */
6419 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6420 lhs = true_value;
6421 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6422 rhs = false_value;
6424 else
6426 tree testtype = TREE_TYPE (cond);
6427 test = cond;
6428 true_value = constant_boolean_node (true, testtype);
6429 false_value = constant_boolean_node (false, testtype);
6432 arg = fold_convert (arg_type, arg);
6433 if (lhs == 0)
6435 true_value = fold_convert (cond_type, true_value);
6436 if (cond_first_p)
6437 lhs = fold_build2 (code, type, true_value, arg);
6438 else
6439 lhs = fold_build2 (code, type, arg, true_value);
6441 if (rhs == 0)
6443 false_value = fold_convert (cond_type, false_value);
6444 if (cond_first_p)
6445 rhs = fold_build2 (code, type, false_value, arg);
6446 else
6447 rhs = fold_build2 (code, type, arg, false_value);
6450 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6451 return fold_convert (type, test);
6455 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6457 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6458 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6459 ADDEND is the same as X.
6461 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6462 and finite. The problematic cases are when X is zero, and its mode
6463 has signed zeros. In the case of rounding towards -infinity,
6464 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6465 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6467 bool
6468 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6470 if (!real_zerop (addend))
6471 return false;
6473 /* Don't allow the fold with -fsignaling-nans. */
6474 if (HONOR_SNANS (TYPE_MODE (type)))
6475 return false;
6477 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6478 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6479 return true;
6481 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6482 if (TREE_CODE (addend) == REAL_CST
6483 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6484 negate = !negate;
6486 /* The mode has signed zeros, and we have to honor their sign.
6487 In this situation, there is only one case we can return true for.
6488 X - 0 is the same as X unless rounding towards -infinity is
6489 supported. */
6490 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6493 /* Subroutine of fold() that checks comparisons of built-in math
6494 functions against real constants.
6496 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6497 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6498 is the type of the result and ARG0 and ARG1 are the operands of the
6499 comparison. ARG1 must be a TREE_REAL_CST.
6501 The function returns the constant folded tree if a simplification
6502 can be made, and NULL_TREE otherwise. */
6504 static tree
6505 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6506 tree type, tree arg0, tree arg1)
6508 REAL_VALUE_TYPE c;
6510 if (BUILTIN_SQRT_P (fcode))
6512 tree arg = CALL_EXPR_ARG (arg0, 0);
6513 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6515 c = TREE_REAL_CST (arg1);
6516 if (REAL_VALUE_NEGATIVE (c))
6518 /* sqrt(x) < y is always false, if y is negative. */
6519 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6520 return omit_one_operand (type, integer_zero_node, arg);
6522 /* sqrt(x) > y is always true, if y is negative and we
6523 don't care about NaNs, i.e. negative values of x. */
6524 if (code == NE_EXPR || !HONOR_NANS (mode))
6525 return omit_one_operand (type, integer_one_node, arg);
6527 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6528 return fold_build2 (GE_EXPR, type, arg,
6529 build_real (TREE_TYPE (arg), dconst0));
6531 else if (code == GT_EXPR || code == GE_EXPR)
6533 REAL_VALUE_TYPE c2;
6535 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6536 real_convert (&c2, mode, &c2);
6538 if (REAL_VALUE_ISINF (c2))
6540 /* sqrt(x) > y is x == +Inf, when y is very large. */
6541 if (HONOR_INFINITIES (mode))
6542 return fold_build2 (EQ_EXPR, type, arg,
6543 build_real (TREE_TYPE (arg), c2));
6545 /* sqrt(x) > y is always false, when y is very large
6546 and we don't care about infinities. */
6547 return omit_one_operand (type, integer_zero_node, arg);
6550 /* sqrt(x) > c is the same as x > c*c. */
6551 return fold_build2 (code, type, arg,
6552 build_real (TREE_TYPE (arg), c2));
6554 else if (code == LT_EXPR || code == LE_EXPR)
6556 REAL_VALUE_TYPE c2;
6558 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6559 real_convert (&c2, mode, &c2);
6561 if (REAL_VALUE_ISINF (c2))
6563 /* sqrt(x) < y is always true, when y is a very large
6564 value and we don't care about NaNs or Infinities. */
6565 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6566 return omit_one_operand (type, integer_one_node, arg);
6568 /* sqrt(x) < y is x != +Inf when y is very large and we
6569 don't care about NaNs. */
6570 if (! HONOR_NANS (mode))
6571 return fold_build2 (NE_EXPR, type, arg,
6572 build_real (TREE_TYPE (arg), c2));
6574 /* sqrt(x) < y is x >= 0 when y is very large and we
6575 don't care about Infinities. */
6576 if (! HONOR_INFINITIES (mode))
6577 return fold_build2 (GE_EXPR, type, arg,
6578 build_real (TREE_TYPE (arg), dconst0));
6580 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6581 if (lang_hooks.decls.global_bindings_p () != 0
6582 || CONTAINS_PLACEHOLDER_P (arg))
6583 return NULL_TREE;
6585 arg = save_expr (arg);
6586 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6587 fold_build2 (GE_EXPR, type, arg,
6588 build_real (TREE_TYPE (arg),
6589 dconst0)),
6590 fold_build2 (NE_EXPR, type, arg,
6591 build_real (TREE_TYPE (arg),
6592 c2)));
6595 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6596 if (! HONOR_NANS (mode))
6597 return fold_build2 (code, type, arg,
6598 build_real (TREE_TYPE (arg), c2));
6600 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6601 if (lang_hooks.decls.global_bindings_p () == 0
6602 && ! CONTAINS_PLACEHOLDER_P (arg))
6604 arg = save_expr (arg);
6605 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6606 fold_build2 (GE_EXPR, type, arg,
6607 build_real (TREE_TYPE (arg),
6608 dconst0)),
6609 fold_build2 (code, type, arg,
6610 build_real (TREE_TYPE (arg),
6611 c2)));
6616 return NULL_TREE;
6619 /* Subroutine of fold() that optimizes comparisons against Infinities,
6620 either +Inf or -Inf.
6622 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6623 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6624 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6626 The function returns the constant folded tree if a simplification
6627 can be made, and NULL_TREE otherwise. */
6629 static tree
6630 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6632 enum machine_mode mode;
6633 REAL_VALUE_TYPE max;
6634 tree temp;
6635 bool neg;
6637 mode = TYPE_MODE (TREE_TYPE (arg0));
6639 /* For negative infinity swap the sense of the comparison. */
6640 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6641 if (neg)
6642 code = swap_tree_comparison (code);
6644 switch (code)
6646 case GT_EXPR:
6647 /* x > +Inf is always false, if with ignore sNANs. */
6648 if (HONOR_SNANS (mode))
6649 return NULL_TREE;
6650 return omit_one_operand (type, integer_zero_node, arg0);
6652 case LE_EXPR:
6653 /* x <= +Inf is always true, if we don't case about NaNs. */
6654 if (! HONOR_NANS (mode))
6655 return omit_one_operand (type, integer_one_node, arg0);
6657 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6658 if (lang_hooks.decls.global_bindings_p () == 0
6659 && ! CONTAINS_PLACEHOLDER_P (arg0))
6661 arg0 = save_expr (arg0);
6662 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6664 break;
6666 case EQ_EXPR:
6667 case GE_EXPR:
6668 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6669 real_maxval (&max, neg, mode);
6670 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6671 arg0, build_real (TREE_TYPE (arg0), max));
6673 case LT_EXPR:
6674 /* x < +Inf is always equal to x <= DBL_MAX. */
6675 real_maxval (&max, neg, mode);
6676 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6677 arg0, build_real (TREE_TYPE (arg0), max));
6679 case NE_EXPR:
6680 /* x != +Inf is always equal to !(x > DBL_MAX). */
6681 real_maxval (&max, neg, mode);
6682 if (! HONOR_NANS (mode))
6683 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6684 arg0, build_real (TREE_TYPE (arg0), max));
6686 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6687 arg0, build_real (TREE_TYPE (arg0), max));
6688 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6690 default:
6691 break;
6694 return NULL_TREE;
6697 /* Subroutine of fold() that optimizes comparisons of a division by
6698 a nonzero integer constant against an integer constant, i.e.
6699 X/C1 op C2.
6701 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6702 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6703 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6705 The function returns the constant folded tree if a simplification
6706 can be made, and NULL_TREE otherwise. */
6708 static tree
6709 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6711 tree prod, tmp, hi, lo;
6712 tree arg00 = TREE_OPERAND (arg0, 0);
6713 tree arg01 = TREE_OPERAND (arg0, 1);
6714 unsigned HOST_WIDE_INT lpart;
6715 HOST_WIDE_INT hpart;
6716 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6717 bool neg_overflow;
6718 int overflow;
6720 /* We have to do this the hard way to detect unsigned overflow.
6721 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6722 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6723 TREE_INT_CST_HIGH (arg01),
6724 TREE_INT_CST_LOW (arg1),
6725 TREE_INT_CST_HIGH (arg1),
6726 &lpart, &hpart, unsigned_p);
6727 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6728 -1, overflow);
6729 neg_overflow = false;
6731 if (unsigned_p)
6733 tmp = int_const_binop (MINUS_EXPR, arg01,
6734 build_int_cst (TREE_TYPE (arg01), 1), 0);
6735 lo = prod;
6737 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6738 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6739 TREE_INT_CST_HIGH (prod),
6740 TREE_INT_CST_LOW (tmp),
6741 TREE_INT_CST_HIGH (tmp),
6742 &lpart, &hpart, unsigned_p);
6743 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6744 -1, overflow | TREE_OVERFLOW (prod));
6746 else if (tree_int_cst_sgn (arg01) >= 0)
6748 tmp = int_const_binop (MINUS_EXPR, arg01,
6749 build_int_cst (TREE_TYPE (arg01), 1), 0);
6750 switch (tree_int_cst_sgn (arg1))
6752 case -1:
6753 neg_overflow = true;
6754 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6755 hi = prod;
6756 break;
6758 case 0:
6759 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6760 hi = tmp;
6761 break;
6763 case 1:
6764 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6765 lo = prod;
6766 break;
6768 default:
6769 gcc_unreachable ();
6772 else
6774 /* A negative divisor reverses the relational operators. */
6775 code = swap_tree_comparison (code);
6777 tmp = int_const_binop (PLUS_EXPR, arg01,
6778 build_int_cst (TREE_TYPE (arg01), 1), 0);
6779 switch (tree_int_cst_sgn (arg1))
6781 case -1:
6782 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6783 lo = prod;
6784 break;
6786 case 0:
6787 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6788 lo = tmp;
6789 break;
6791 case 1:
6792 neg_overflow = true;
6793 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6794 hi = prod;
6795 break;
6797 default:
6798 gcc_unreachable ();
6802 switch (code)
6804 case EQ_EXPR:
6805 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6806 return omit_one_operand (type, integer_zero_node, arg00);
6807 if (TREE_OVERFLOW (hi))
6808 return fold_build2 (GE_EXPR, type, arg00, lo);
6809 if (TREE_OVERFLOW (lo))
6810 return fold_build2 (LE_EXPR, type, arg00, hi);
6811 return build_range_check (type, arg00, 1, lo, hi);
6813 case NE_EXPR:
6814 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6815 return omit_one_operand (type, integer_one_node, arg00);
6816 if (TREE_OVERFLOW (hi))
6817 return fold_build2 (LT_EXPR, type, arg00, lo);
6818 if (TREE_OVERFLOW (lo))
6819 return fold_build2 (GT_EXPR, type, arg00, hi);
6820 return build_range_check (type, arg00, 0, lo, hi);
6822 case LT_EXPR:
6823 if (TREE_OVERFLOW (lo))
6825 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6826 return omit_one_operand (type, tmp, arg00);
6828 return fold_build2 (LT_EXPR, type, arg00, lo);
6830 case LE_EXPR:
6831 if (TREE_OVERFLOW (hi))
6833 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6834 return omit_one_operand (type, tmp, arg00);
6836 return fold_build2 (LE_EXPR, type, arg00, hi);
6838 case GT_EXPR:
6839 if (TREE_OVERFLOW (hi))
6841 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6842 return omit_one_operand (type, tmp, arg00);
6844 return fold_build2 (GT_EXPR, type, arg00, hi);
6846 case GE_EXPR:
6847 if (TREE_OVERFLOW (lo))
6849 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6850 return omit_one_operand (type, tmp, arg00);
6852 return fold_build2 (GE_EXPR, type, arg00, lo);
6854 default:
6855 break;
6858 return NULL_TREE;
6862 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6863 equality/inequality test, then return a simplified form of the test
6864 using a sign testing. Otherwise return NULL. TYPE is the desired
6865 result type. */
6867 static tree
6868 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6869 tree result_type)
6871 /* If this is testing a single bit, we can optimize the test. */
6872 if ((code == NE_EXPR || code == EQ_EXPR)
6873 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6874 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6876 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6877 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6878 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6880 if (arg00 != NULL_TREE
6881 /* This is only a win if casting to a signed type is cheap,
6882 i.e. when arg00's type is not a partial mode. */
6883 && TYPE_PRECISION (TREE_TYPE (arg00))
6884 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6886 tree stype = signed_type_for (TREE_TYPE (arg00));
6887 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6888 result_type, fold_convert (stype, arg00),
6889 build_int_cst (stype, 0));
6893 return NULL_TREE;
6896 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6897 equality/inequality test, then return a simplified form of
6898 the test using shifts and logical operations. Otherwise return
6899 NULL. TYPE is the desired result type. */
6901 tree
6902 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6903 tree result_type)
6905 /* If this is testing a single bit, we can optimize the test. */
6906 if ((code == NE_EXPR || code == EQ_EXPR)
6907 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6908 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6910 tree inner = TREE_OPERAND (arg0, 0);
6911 tree type = TREE_TYPE (arg0);
6912 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6913 enum machine_mode operand_mode = TYPE_MODE (type);
6914 int ops_unsigned;
6915 tree signed_type, unsigned_type, intermediate_type;
6916 tree tem, one;
6918 /* First, see if we can fold the single bit test into a sign-bit
6919 test. */
6920 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6921 result_type);
6922 if (tem)
6923 return tem;
6925 /* Otherwise we have (A & C) != 0 where C is a single bit,
6926 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6927 Similarly for (A & C) == 0. */
6929 /* If INNER is a right shift of a constant and it plus BITNUM does
6930 not overflow, adjust BITNUM and INNER. */
6931 if (TREE_CODE (inner) == RSHIFT_EXPR
6932 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6933 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6934 && bitnum < TYPE_PRECISION (type)
6935 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6936 bitnum - TYPE_PRECISION (type)))
6938 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6939 inner = TREE_OPERAND (inner, 0);
6942 /* If we are going to be able to omit the AND below, we must do our
6943 operations as unsigned. If we must use the AND, we have a choice.
6944 Normally unsigned is faster, but for some machines signed is. */
6945 #ifdef LOAD_EXTEND_OP
6946 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6947 && !flag_syntax_only) ? 0 : 1;
6948 #else
6949 ops_unsigned = 1;
6950 #endif
6952 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6953 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6954 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6955 inner = fold_convert (intermediate_type, inner);
6957 if (bitnum != 0)
6958 inner = build2 (RSHIFT_EXPR, intermediate_type,
6959 inner, size_int (bitnum));
6961 one = build_int_cst (intermediate_type, 1);
6963 if (code == EQ_EXPR)
6964 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6966 /* Put the AND last so it can combine with more things. */
6967 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6969 /* Make sure to return the proper type. */
6970 inner = fold_convert (result_type, inner);
6972 return inner;
6974 return NULL_TREE;
6977 /* Check whether we are allowed to reorder operands arg0 and arg1,
6978 such that the evaluation of arg1 occurs before arg0. */
6980 static bool
6981 reorder_operands_p (const_tree arg0, const_tree arg1)
6983 if (! flag_evaluation_order)
6984 return true;
6985 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6986 return true;
6987 return ! TREE_SIDE_EFFECTS (arg0)
6988 && ! TREE_SIDE_EFFECTS (arg1);
6991 /* Test whether it is preferable two swap two operands, ARG0 and
6992 ARG1, for example because ARG0 is an integer constant and ARG1
6993 isn't. If REORDER is true, only recommend swapping if we can
6994 evaluate the operands in reverse order. */
6996 bool
6997 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6999 STRIP_SIGN_NOPS (arg0);
7000 STRIP_SIGN_NOPS (arg1);
7002 if (TREE_CODE (arg1) == INTEGER_CST)
7003 return 0;
7004 if (TREE_CODE (arg0) == INTEGER_CST)
7005 return 1;
7007 if (TREE_CODE (arg1) == REAL_CST)
7008 return 0;
7009 if (TREE_CODE (arg0) == REAL_CST)
7010 return 1;
7012 if (TREE_CODE (arg1) == FIXED_CST)
7013 return 0;
7014 if (TREE_CODE (arg0) == FIXED_CST)
7015 return 1;
7017 if (TREE_CODE (arg1) == COMPLEX_CST)
7018 return 0;
7019 if (TREE_CODE (arg0) == COMPLEX_CST)
7020 return 1;
7022 if (TREE_CONSTANT (arg1))
7023 return 0;
7024 if (TREE_CONSTANT (arg0))
7025 return 1;
7027 if (optimize_function_for_size_p (cfun))
7028 return 0;
7030 if (reorder && flag_evaluation_order
7031 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7032 return 0;
7034 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7035 for commutative and comparison operators. Ensuring a canonical
7036 form allows the optimizers to find additional redundancies without
7037 having to explicitly check for both orderings. */
7038 if (TREE_CODE (arg0) == SSA_NAME
7039 && TREE_CODE (arg1) == SSA_NAME
7040 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7041 return 1;
7043 /* Put SSA_NAMEs last. */
7044 if (TREE_CODE (arg1) == SSA_NAME)
7045 return 0;
7046 if (TREE_CODE (arg0) == SSA_NAME)
7047 return 1;
7049 /* Put variables last. */
7050 if (DECL_P (arg1))
7051 return 0;
7052 if (DECL_P (arg0))
7053 return 1;
7055 return 0;
7058 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7059 ARG0 is extended to a wider type. */
7061 static tree
7062 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7064 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7065 tree arg1_unw;
7066 tree shorter_type, outer_type;
7067 tree min, max;
7068 bool above, below;
7070 if (arg0_unw == arg0)
7071 return NULL_TREE;
7072 shorter_type = TREE_TYPE (arg0_unw);
7074 #ifdef HAVE_canonicalize_funcptr_for_compare
7075 /* Disable this optimization if we're casting a function pointer
7076 type on targets that require function pointer canonicalization. */
7077 if (HAVE_canonicalize_funcptr_for_compare
7078 && TREE_CODE (shorter_type) == POINTER_TYPE
7079 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7080 return NULL_TREE;
7081 #endif
7083 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7084 return NULL_TREE;
7086 arg1_unw = get_unwidened (arg1, NULL_TREE);
7088 /* If possible, express the comparison in the shorter mode. */
7089 if ((code == EQ_EXPR || code == NE_EXPR
7090 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7091 && (TREE_TYPE (arg1_unw) == shorter_type
7092 || ((TYPE_PRECISION (shorter_type)
7093 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7094 && (TYPE_UNSIGNED (shorter_type)
7095 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7096 || (TREE_CODE (arg1_unw) == INTEGER_CST
7097 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7098 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7099 && int_fits_type_p (arg1_unw, shorter_type))))
7100 return fold_build2 (code, type, arg0_unw,
7101 fold_convert (shorter_type, arg1_unw));
7103 if (TREE_CODE (arg1_unw) != INTEGER_CST
7104 || TREE_CODE (shorter_type) != INTEGER_TYPE
7105 || !int_fits_type_p (arg1_unw, shorter_type))
7106 return NULL_TREE;
7108 /* If we are comparing with the integer that does not fit into the range
7109 of the shorter type, the result is known. */
7110 outer_type = TREE_TYPE (arg1_unw);
7111 min = lower_bound_in_type (outer_type, shorter_type);
7112 max = upper_bound_in_type (outer_type, shorter_type);
7114 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7115 max, arg1_unw));
7116 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7117 arg1_unw, min));
7119 switch (code)
7121 case EQ_EXPR:
7122 if (above || below)
7123 return omit_one_operand (type, integer_zero_node, arg0);
7124 break;
7126 case NE_EXPR:
7127 if (above || below)
7128 return omit_one_operand (type, integer_one_node, arg0);
7129 break;
7131 case LT_EXPR:
7132 case LE_EXPR:
7133 if (above)
7134 return omit_one_operand (type, integer_one_node, arg0);
7135 else if (below)
7136 return omit_one_operand (type, integer_zero_node, arg0);
7138 case GT_EXPR:
7139 case GE_EXPR:
7140 if (above)
7141 return omit_one_operand (type, integer_zero_node, arg0);
7142 else if (below)
7143 return omit_one_operand (type, integer_one_node, arg0);
7145 default:
7146 break;
7149 return NULL_TREE;
7152 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7153 ARG0 just the signedness is changed. */
7155 static tree
7156 fold_sign_changed_comparison (enum tree_code code, tree type,
7157 tree arg0, tree arg1)
7159 tree arg0_inner;
7160 tree inner_type, outer_type;
7162 if (!CONVERT_EXPR_P (arg0))
7163 return NULL_TREE;
7165 outer_type = TREE_TYPE (arg0);
7166 arg0_inner = TREE_OPERAND (arg0, 0);
7167 inner_type = TREE_TYPE (arg0_inner);
7169 #ifdef HAVE_canonicalize_funcptr_for_compare
7170 /* Disable this optimization if we're casting a function pointer
7171 type on targets that require function pointer canonicalization. */
7172 if (HAVE_canonicalize_funcptr_for_compare
7173 && TREE_CODE (inner_type) == POINTER_TYPE
7174 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7175 return NULL_TREE;
7176 #endif
7178 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7179 return NULL_TREE;
7181 /* If the conversion is from an integral subtype to its basetype
7182 leave it alone. */
7183 if (TREE_TYPE (inner_type) == outer_type)
7184 return NULL_TREE;
7186 if (TREE_CODE (arg1) != INTEGER_CST
7187 && !(CONVERT_EXPR_P (arg1)
7188 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7189 return NULL_TREE;
7191 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7192 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7193 && code != NE_EXPR
7194 && code != EQ_EXPR)
7195 return NULL_TREE;
7197 if (TREE_CODE (arg1) == INTEGER_CST)
7198 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7199 TREE_INT_CST_HIGH (arg1), 0,
7200 TREE_OVERFLOW (arg1));
7201 else
7202 arg1 = fold_convert (inner_type, arg1);
7204 return fold_build2 (code, type, arg0_inner, arg1);
7207 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7208 step of the array. Reconstructs s and delta in the case of s * delta
7209 being an integer constant (and thus already folded).
7210 ADDR is the address. MULT is the multiplicative expression.
7211 If the function succeeds, the new address expression is returned. Otherwise
7212 NULL_TREE is returned. */
7214 static tree
7215 try_move_mult_to_index (tree addr, tree op1)
7217 tree s, delta, step;
7218 tree ref = TREE_OPERAND (addr, 0), pref;
7219 tree ret, pos;
7220 tree itype;
7221 bool mdim = false;
7223 /* Strip the nops that might be added when converting op1 to sizetype. */
7224 STRIP_NOPS (op1);
7226 /* Canonicalize op1 into a possibly non-constant delta
7227 and an INTEGER_CST s. */
7228 if (TREE_CODE (op1) == MULT_EXPR)
7230 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7232 STRIP_NOPS (arg0);
7233 STRIP_NOPS (arg1);
7235 if (TREE_CODE (arg0) == INTEGER_CST)
7237 s = arg0;
7238 delta = arg1;
7240 else if (TREE_CODE (arg1) == INTEGER_CST)
7242 s = arg1;
7243 delta = arg0;
7245 else
7246 return NULL_TREE;
7248 else if (TREE_CODE (op1) == INTEGER_CST)
7250 delta = op1;
7251 s = NULL_TREE;
7253 else
7255 /* Simulate we are delta * 1. */
7256 delta = op1;
7257 s = integer_one_node;
7260 for (;; ref = TREE_OPERAND (ref, 0))
7262 if (TREE_CODE (ref) == ARRAY_REF)
7264 /* Remember if this was a multi-dimensional array. */
7265 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7266 mdim = true;
7268 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7269 if (! itype)
7270 continue;
7272 step = array_ref_element_size (ref);
7273 if (TREE_CODE (step) != INTEGER_CST)
7274 continue;
7276 if (s)
7278 if (! tree_int_cst_equal (step, s))
7279 continue;
7281 else
7283 /* Try if delta is a multiple of step. */
7284 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7285 if (! tmp)
7286 continue;
7287 delta = tmp;
7290 /* Only fold here if we can verify we do not overflow one
7291 dimension of a multi-dimensional array. */
7292 if (mdim)
7294 tree tmp;
7296 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7297 || !INTEGRAL_TYPE_P (itype)
7298 || !TYPE_MAX_VALUE (itype)
7299 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7300 continue;
7302 tmp = fold_binary (PLUS_EXPR, itype,
7303 fold_convert (itype,
7304 TREE_OPERAND (ref, 1)),
7305 fold_convert (itype, delta));
7306 if (!tmp
7307 || TREE_CODE (tmp) != INTEGER_CST
7308 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7309 continue;
7312 break;
7314 else
7315 mdim = false;
7317 if (!handled_component_p (ref))
7318 return NULL_TREE;
7321 /* We found the suitable array reference. So copy everything up to it,
7322 and replace the index. */
7324 pref = TREE_OPERAND (addr, 0);
7325 ret = copy_node (pref);
7326 pos = ret;
7328 while (pref != ref)
7330 pref = TREE_OPERAND (pref, 0);
7331 TREE_OPERAND (pos, 0) = copy_node (pref);
7332 pos = TREE_OPERAND (pos, 0);
7335 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7336 fold_convert (itype,
7337 TREE_OPERAND (pos, 1)),
7338 fold_convert (itype, delta));
7340 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7344 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7345 means A >= Y && A != MAX, but in this case we know that
7346 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7348 static tree
7349 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7351 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7353 if (TREE_CODE (bound) == LT_EXPR)
7354 a = TREE_OPERAND (bound, 0);
7355 else if (TREE_CODE (bound) == GT_EXPR)
7356 a = TREE_OPERAND (bound, 1);
7357 else
7358 return NULL_TREE;
7360 typea = TREE_TYPE (a);
7361 if (!INTEGRAL_TYPE_P (typea)
7362 && !POINTER_TYPE_P (typea))
7363 return NULL_TREE;
7365 if (TREE_CODE (ineq) == LT_EXPR)
7367 a1 = TREE_OPERAND (ineq, 1);
7368 y = TREE_OPERAND (ineq, 0);
7370 else if (TREE_CODE (ineq) == GT_EXPR)
7372 a1 = TREE_OPERAND (ineq, 0);
7373 y = TREE_OPERAND (ineq, 1);
7375 else
7376 return NULL_TREE;
7378 if (TREE_TYPE (a1) != typea)
7379 return NULL_TREE;
7381 if (POINTER_TYPE_P (typea))
7383 /* Convert the pointer types into integer before taking the difference. */
7384 tree ta = fold_convert (ssizetype, a);
7385 tree ta1 = fold_convert (ssizetype, a1);
7386 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7388 else
7389 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7391 if (!diff || !integer_onep (diff))
7392 return NULL_TREE;
7394 return fold_build2 (GE_EXPR, type, a, y);
7397 /* Fold a sum or difference of at least one multiplication.
7398 Returns the folded tree or NULL if no simplification could be made. */
7400 static tree
7401 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7403 tree arg00, arg01, arg10, arg11;
7404 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7406 /* (A * C) +- (B * C) -> (A+-B) * C.
7407 (A * C) +- A -> A * (C+-1).
7408 We are most concerned about the case where C is a constant,
7409 but other combinations show up during loop reduction. Since
7410 it is not difficult, try all four possibilities. */
7412 if (TREE_CODE (arg0) == MULT_EXPR)
7414 arg00 = TREE_OPERAND (arg0, 0);
7415 arg01 = TREE_OPERAND (arg0, 1);
7417 else if (TREE_CODE (arg0) == INTEGER_CST)
7419 arg00 = build_one_cst (type);
7420 arg01 = arg0;
7422 else
7424 /* We cannot generate constant 1 for fract. */
7425 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7426 return NULL_TREE;
7427 arg00 = arg0;
7428 arg01 = build_one_cst (type);
7430 if (TREE_CODE (arg1) == MULT_EXPR)
7432 arg10 = TREE_OPERAND (arg1, 0);
7433 arg11 = TREE_OPERAND (arg1, 1);
7435 else if (TREE_CODE (arg1) == INTEGER_CST)
7437 arg10 = build_one_cst (type);
7438 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7439 the purpose of this canonicalization. */
7440 if (TREE_INT_CST_HIGH (arg1) == -1
7441 && negate_expr_p (arg1)
7442 && code == PLUS_EXPR)
7444 arg11 = negate_expr (arg1);
7445 code = MINUS_EXPR;
7447 else
7448 arg11 = arg1;
7450 else
7452 /* We cannot generate constant 1 for fract. */
7453 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7454 return NULL_TREE;
7455 arg10 = arg1;
7456 arg11 = build_one_cst (type);
7458 same = NULL_TREE;
7460 if (operand_equal_p (arg01, arg11, 0))
7461 same = arg01, alt0 = arg00, alt1 = arg10;
7462 else if (operand_equal_p (arg00, arg10, 0))
7463 same = arg00, alt0 = arg01, alt1 = arg11;
7464 else if (operand_equal_p (arg00, arg11, 0))
7465 same = arg00, alt0 = arg01, alt1 = arg10;
7466 else if (operand_equal_p (arg01, arg10, 0))
7467 same = arg01, alt0 = arg00, alt1 = arg11;
7469 /* No identical multiplicands; see if we can find a common
7470 power-of-two factor in non-power-of-two multiplies. This
7471 can help in multi-dimensional array access. */
7472 else if (host_integerp (arg01, 0)
7473 && host_integerp (arg11, 0))
7475 HOST_WIDE_INT int01, int11, tmp;
7476 bool swap = false;
7477 tree maybe_same;
7478 int01 = TREE_INT_CST_LOW (arg01);
7479 int11 = TREE_INT_CST_LOW (arg11);
7481 /* Move min of absolute values to int11. */
7482 if ((int01 >= 0 ? int01 : -int01)
7483 < (int11 >= 0 ? int11 : -int11))
7485 tmp = int01, int01 = int11, int11 = tmp;
7486 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7487 maybe_same = arg01;
7488 swap = true;
7490 else
7491 maybe_same = arg11;
7493 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7495 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7496 build_int_cst (TREE_TYPE (arg00),
7497 int01 / int11));
7498 alt1 = arg10;
7499 same = maybe_same;
7500 if (swap)
7501 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7505 if (same)
7506 return fold_build2 (MULT_EXPR, type,
7507 fold_build2 (code, type,
7508 fold_convert (type, alt0),
7509 fold_convert (type, alt1)),
7510 fold_convert (type, same));
7512 return NULL_TREE;
7515 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7516 specified by EXPR into the buffer PTR of length LEN bytes.
7517 Return the number of bytes placed in the buffer, or zero
7518 upon failure. */
7520 static int
7521 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7523 tree type = TREE_TYPE (expr);
7524 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7525 int byte, offset, word, words;
7526 unsigned char value;
7528 if (total_bytes > len)
7529 return 0;
7530 words = total_bytes / UNITS_PER_WORD;
7532 for (byte = 0; byte < total_bytes; byte++)
7534 int bitpos = byte * BITS_PER_UNIT;
7535 if (bitpos < HOST_BITS_PER_WIDE_INT)
7536 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7537 else
7538 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7539 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7541 if (total_bytes > UNITS_PER_WORD)
7543 word = byte / UNITS_PER_WORD;
7544 if (WORDS_BIG_ENDIAN)
7545 word = (words - 1) - word;
7546 offset = word * UNITS_PER_WORD;
7547 if (BYTES_BIG_ENDIAN)
7548 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7549 else
7550 offset += byte % UNITS_PER_WORD;
7552 else
7553 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7554 ptr[offset] = value;
7556 return total_bytes;
7560 /* Subroutine of native_encode_expr. Encode the REAL_CST
7561 specified by EXPR into the buffer PTR of length LEN bytes.
7562 Return the number of bytes placed in the buffer, or zero
7563 upon failure. */
7565 static int
7566 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7568 tree type = TREE_TYPE (expr);
7569 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7570 int byte, offset, word, words, bitpos;
7571 unsigned char value;
7573 /* There are always 32 bits in each long, no matter the size of
7574 the hosts long. We handle floating point representations with
7575 up to 192 bits. */
7576 long tmp[6];
7578 if (total_bytes > len)
7579 return 0;
7580 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7582 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7584 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7585 bitpos += BITS_PER_UNIT)
7587 byte = (bitpos / BITS_PER_UNIT) & 3;
7588 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7590 if (UNITS_PER_WORD < 4)
7592 word = byte / UNITS_PER_WORD;
7593 if (WORDS_BIG_ENDIAN)
7594 word = (words - 1) - word;
7595 offset = word * UNITS_PER_WORD;
7596 if (BYTES_BIG_ENDIAN)
7597 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7598 else
7599 offset += byte % UNITS_PER_WORD;
7601 else
7602 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7603 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7605 return total_bytes;
7608 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7609 specified by EXPR into the buffer PTR of length LEN bytes.
7610 Return the number of bytes placed in the buffer, or zero
7611 upon failure. */
7613 static int
7614 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7616 int rsize, isize;
7617 tree part;
7619 part = TREE_REALPART (expr);
7620 rsize = native_encode_expr (part, ptr, len);
7621 if (rsize == 0)
7622 return 0;
7623 part = TREE_IMAGPART (expr);
7624 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7625 if (isize != rsize)
7626 return 0;
7627 return rsize + isize;
7631 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7632 specified by EXPR into the buffer PTR of length LEN bytes.
7633 Return the number of bytes placed in the buffer, or zero
7634 upon failure. */
7636 static int
7637 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7639 int i, size, offset, count;
7640 tree itype, elem, elements;
7642 offset = 0;
7643 elements = TREE_VECTOR_CST_ELTS (expr);
7644 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7645 itype = TREE_TYPE (TREE_TYPE (expr));
7646 size = GET_MODE_SIZE (TYPE_MODE (itype));
7647 for (i = 0; i < count; i++)
7649 if (elements)
7651 elem = TREE_VALUE (elements);
7652 elements = TREE_CHAIN (elements);
7654 else
7655 elem = NULL_TREE;
7657 if (elem)
7659 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7660 return 0;
7662 else
7664 if (offset + size > len)
7665 return 0;
7666 memset (ptr+offset, 0, size);
7668 offset += size;
7670 return offset;
7674 /* Subroutine of native_encode_expr. Encode the STRING_CST
7675 specified by EXPR into the buffer PTR of length LEN bytes.
7676 Return the number of bytes placed in the buffer, or zero
7677 upon failure. */
7679 static int
7680 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7682 tree type = TREE_TYPE (expr);
7683 HOST_WIDE_INT total_bytes;
7685 if (TREE_CODE (type) != ARRAY_TYPE
7686 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7687 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7688 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7689 return 0;
7690 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7691 if (total_bytes > len)
7692 return 0;
7693 if (TREE_STRING_LENGTH (expr) < total_bytes)
7695 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7696 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7697 total_bytes - TREE_STRING_LENGTH (expr));
7699 else
7700 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7701 return total_bytes;
7705 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7706 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7707 buffer PTR of length LEN bytes. Return the number of bytes
7708 placed in the buffer, or zero upon failure. */
7711 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7713 switch (TREE_CODE (expr))
7715 case INTEGER_CST:
7716 return native_encode_int (expr, ptr, len);
7718 case REAL_CST:
7719 return native_encode_real (expr, ptr, len);
7721 case COMPLEX_CST:
7722 return native_encode_complex (expr, ptr, len);
7724 case VECTOR_CST:
7725 return native_encode_vector (expr, ptr, len);
7727 case STRING_CST:
7728 return native_encode_string (expr, ptr, len);
7730 default:
7731 return 0;
7736 /* Subroutine of native_interpret_expr. Interpret the contents of
7737 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7738 If the buffer cannot be interpreted, return NULL_TREE. */
7740 static tree
7741 native_interpret_int (tree type, const unsigned char *ptr, int len)
7743 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7744 int byte, offset, word, words;
7745 unsigned char value;
7746 unsigned int HOST_WIDE_INT lo = 0;
7747 HOST_WIDE_INT hi = 0;
7749 if (total_bytes > len)
7750 return NULL_TREE;
7751 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7752 return NULL_TREE;
7753 words = total_bytes / UNITS_PER_WORD;
7755 for (byte = 0; byte < total_bytes; byte++)
7757 int bitpos = byte * BITS_PER_UNIT;
7758 if (total_bytes > UNITS_PER_WORD)
7760 word = byte / UNITS_PER_WORD;
7761 if (WORDS_BIG_ENDIAN)
7762 word = (words - 1) - word;
7763 offset = word * UNITS_PER_WORD;
7764 if (BYTES_BIG_ENDIAN)
7765 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7766 else
7767 offset += byte % UNITS_PER_WORD;
7769 else
7770 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7771 value = ptr[offset];
7773 if (bitpos < HOST_BITS_PER_WIDE_INT)
7774 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7775 else
7776 hi |= (unsigned HOST_WIDE_INT) value
7777 << (bitpos - HOST_BITS_PER_WIDE_INT);
7780 return build_int_cst_wide_type (type, lo, hi);
7784 /* Subroutine of native_interpret_expr. Interpret the contents of
7785 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7786 If the buffer cannot be interpreted, return NULL_TREE. */
7788 static tree
7789 native_interpret_real (tree type, const unsigned char *ptr, int len)
7791 enum machine_mode mode = TYPE_MODE (type);
7792 int total_bytes = GET_MODE_SIZE (mode);
7793 int byte, offset, word, words, bitpos;
7794 unsigned char value;
7795 /* There are always 32 bits in each long, no matter the size of
7796 the hosts long. We handle floating point representations with
7797 up to 192 bits. */
7798 REAL_VALUE_TYPE r;
7799 long tmp[6];
7801 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7802 if (total_bytes > len || total_bytes > 24)
7803 return NULL_TREE;
7804 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7806 memset (tmp, 0, sizeof (tmp));
7807 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7808 bitpos += BITS_PER_UNIT)
7810 byte = (bitpos / BITS_PER_UNIT) & 3;
7811 if (UNITS_PER_WORD < 4)
7813 word = byte / UNITS_PER_WORD;
7814 if (WORDS_BIG_ENDIAN)
7815 word = (words - 1) - word;
7816 offset = word * UNITS_PER_WORD;
7817 if (BYTES_BIG_ENDIAN)
7818 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7819 else
7820 offset += byte % UNITS_PER_WORD;
7822 else
7823 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7824 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7826 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7829 real_from_target (&r, tmp, mode);
7830 return build_real (type, r);
7834 /* Subroutine of native_interpret_expr. Interpret the contents of
7835 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7836 If the buffer cannot be interpreted, return NULL_TREE. */
7838 static tree
7839 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7841 tree etype, rpart, ipart;
7842 int size;
7844 etype = TREE_TYPE (type);
7845 size = GET_MODE_SIZE (TYPE_MODE (etype));
7846 if (size * 2 > len)
7847 return NULL_TREE;
7848 rpart = native_interpret_expr (etype, ptr, size);
7849 if (!rpart)
7850 return NULL_TREE;
7851 ipart = native_interpret_expr (etype, ptr+size, size);
7852 if (!ipart)
7853 return NULL_TREE;
7854 return build_complex (type, rpart, ipart);
7858 /* Subroutine of native_interpret_expr. Interpret the contents of
7859 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7860 If the buffer cannot be interpreted, return NULL_TREE. */
7862 static tree
7863 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7865 tree etype, elem, elements;
7866 int i, size, count;
7868 etype = TREE_TYPE (type);
7869 size = GET_MODE_SIZE (TYPE_MODE (etype));
7870 count = TYPE_VECTOR_SUBPARTS (type);
7871 if (size * count > len)
7872 return NULL_TREE;
7874 elements = NULL_TREE;
7875 for (i = count - 1; i >= 0; i--)
7877 elem = native_interpret_expr (etype, ptr+(i*size), size);
7878 if (!elem)
7879 return NULL_TREE;
7880 elements = tree_cons (NULL_TREE, elem, elements);
7882 return build_vector (type, elements);
7886 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7887 the buffer PTR of length LEN as a constant of type TYPE. For
7888 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7889 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7890 return NULL_TREE. */
7892 tree
7893 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7895 switch (TREE_CODE (type))
7897 case INTEGER_TYPE:
7898 case ENUMERAL_TYPE:
7899 case BOOLEAN_TYPE:
7900 return native_interpret_int (type, ptr, len);
7902 case REAL_TYPE:
7903 return native_interpret_real (type, ptr, len);
7905 case COMPLEX_TYPE:
7906 return native_interpret_complex (type, ptr, len);
7908 case VECTOR_TYPE:
7909 return native_interpret_vector (type, ptr, len);
7911 default:
7912 return NULL_TREE;
7917 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7918 TYPE at compile-time. If we're unable to perform the conversion
7919 return NULL_TREE. */
7921 static tree
7922 fold_view_convert_expr (tree type, tree expr)
7924 /* We support up to 512-bit values (for V8DFmode). */
7925 unsigned char buffer[64];
7926 int len;
7928 /* Check that the host and target are sane. */
7929 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7930 return NULL_TREE;
7932 len = native_encode_expr (expr, buffer, sizeof (buffer));
7933 if (len == 0)
7934 return NULL_TREE;
7936 return native_interpret_expr (type, buffer, len);
7939 /* Build an expression for the address of T. Folds away INDIRECT_REF
7940 to avoid confusing the gimplify process. When IN_FOLD is true
7941 avoid modifications of T. */
7943 static tree
7944 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7946 /* The size of the object is not relevant when talking about its address. */
7947 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7948 t = TREE_OPERAND (t, 0);
7950 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7951 if (TREE_CODE (t) == INDIRECT_REF
7952 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7954 t = TREE_OPERAND (t, 0);
7956 if (TREE_TYPE (t) != ptrtype)
7957 t = build1 (NOP_EXPR, ptrtype, t);
7959 else if (!in_fold)
7961 tree base = t;
7963 while (handled_component_p (base))
7964 base = TREE_OPERAND (base, 0);
7966 if (DECL_P (base))
7967 TREE_ADDRESSABLE (base) = 1;
7969 t = build1 (ADDR_EXPR, ptrtype, t);
7971 else
7972 t = build1 (ADDR_EXPR, ptrtype, t);
7974 return t;
7977 /* Build an expression for the address of T with type PTRTYPE. This
7978 function modifies the input parameter 'T' by sometimes setting the
7979 TREE_ADDRESSABLE flag. */
7981 tree
7982 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7984 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7987 /* Build an expression for the address of T. This function modifies
7988 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7989 flag. When called from fold functions, use fold_addr_expr instead. */
7991 tree
7992 build_fold_addr_expr (tree t)
7994 return build_fold_addr_expr_with_type_1 (t,
7995 build_pointer_type (TREE_TYPE (t)),
7996 false);
7999 /* Same as build_fold_addr_expr, builds an expression for the address
8000 of T, but avoids touching the input node 't'. Fold functions
8001 should use this version. */
8003 static tree
8004 fold_addr_expr (tree t)
8006 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8008 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
8011 /* Fold a unary expression of code CODE and type TYPE with operand
8012 OP0. Return the folded expression if folding is successful.
8013 Otherwise, return NULL_TREE. */
8015 tree
8016 fold_unary (enum tree_code code, tree type, tree op0)
8018 tree tem;
8019 tree arg0;
8020 enum tree_code_class kind = TREE_CODE_CLASS (code);
8022 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8023 && TREE_CODE_LENGTH (code) == 1);
8025 arg0 = op0;
8026 if (arg0)
8028 if (CONVERT_EXPR_CODE_P (code)
8029 || code == FLOAT_EXPR || code == ABS_EXPR)
8031 /* Don't use STRIP_NOPS, because signedness of argument type
8032 matters. */
8033 STRIP_SIGN_NOPS (arg0);
8035 else
8037 /* Strip any conversions that don't change the mode. This
8038 is safe for every expression, except for a comparison
8039 expression because its signedness is derived from its
8040 operands.
8042 Note that this is done as an internal manipulation within
8043 the constant folder, in order to find the simplest
8044 representation of the arguments so that their form can be
8045 studied. In any cases, the appropriate type conversions
8046 should be put back in the tree that will get out of the
8047 constant folder. */
8048 STRIP_NOPS (arg0);
8052 if (TREE_CODE_CLASS (code) == tcc_unary)
8054 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8055 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8056 fold_build1 (code, type,
8057 fold_convert (TREE_TYPE (op0),
8058 TREE_OPERAND (arg0, 1))));
8059 else if (TREE_CODE (arg0) == COND_EXPR)
8061 tree arg01 = TREE_OPERAND (arg0, 1);
8062 tree arg02 = TREE_OPERAND (arg0, 2);
8063 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8064 arg01 = fold_build1 (code, type,
8065 fold_convert (TREE_TYPE (op0), arg01));
8066 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8067 arg02 = fold_build1 (code, type,
8068 fold_convert (TREE_TYPE (op0), arg02));
8069 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8070 arg01, arg02);
8072 /* If this was a conversion, and all we did was to move into
8073 inside the COND_EXPR, bring it back out. But leave it if
8074 it is a conversion from integer to integer and the
8075 result precision is no wider than a word since such a
8076 conversion is cheap and may be optimized away by combine,
8077 while it couldn't if it were outside the COND_EXPR. Then return
8078 so we don't get into an infinite recursion loop taking the
8079 conversion out and then back in. */
8081 if ((CONVERT_EXPR_CODE_P (code)
8082 || code == NON_LVALUE_EXPR)
8083 && TREE_CODE (tem) == COND_EXPR
8084 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8085 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8086 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8087 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8088 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8089 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8090 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8091 && (INTEGRAL_TYPE_P
8092 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8093 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8094 || flag_syntax_only))
8095 tem = build1 (code, type,
8096 build3 (COND_EXPR,
8097 TREE_TYPE (TREE_OPERAND
8098 (TREE_OPERAND (tem, 1), 0)),
8099 TREE_OPERAND (tem, 0),
8100 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8101 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8102 return tem;
8104 else if (COMPARISON_CLASS_P (arg0))
8106 if (TREE_CODE (type) == BOOLEAN_TYPE)
8108 arg0 = copy_node (arg0);
8109 TREE_TYPE (arg0) = type;
8110 return arg0;
8112 else if (TREE_CODE (type) != INTEGER_TYPE)
8113 return fold_build3 (COND_EXPR, type, arg0,
8114 fold_build1 (code, type,
8115 integer_one_node),
8116 fold_build1 (code, type,
8117 integer_zero_node));
8121 switch (code)
8123 case PAREN_EXPR:
8124 /* Re-association barriers around constants and other re-association
8125 barriers can be removed. */
8126 if (CONSTANT_CLASS_P (op0)
8127 || TREE_CODE (op0) == PAREN_EXPR)
8128 return fold_convert (type, op0);
8129 return NULL_TREE;
8131 CASE_CONVERT:
8132 case FLOAT_EXPR:
8133 case FIX_TRUNC_EXPR:
8134 if (TREE_TYPE (op0) == type)
8135 return op0;
8137 /* If we have (type) (a CMP b) and type is an integral type, return
8138 new expression involving the new type. */
8139 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8140 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8141 TREE_OPERAND (op0, 1));
8143 /* Handle cases of two conversions in a row. */
8144 if (CONVERT_EXPR_P (op0))
8146 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8147 tree inter_type = TREE_TYPE (op0);
8148 int inside_int = INTEGRAL_TYPE_P (inside_type);
8149 int inside_ptr = POINTER_TYPE_P (inside_type);
8150 int inside_float = FLOAT_TYPE_P (inside_type);
8151 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8152 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8153 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8154 int inter_int = INTEGRAL_TYPE_P (inter_type);
8155 int inter_ptr = POINTER_TYPE_P (inter_type);
8156 int inter_float = FLOAT_TYPE_P (inter_type);
8157 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8158 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8159 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8160 int final_int = INTEGRAL_TYPE_P (type);
8161 int final_ptr = POINTER_TYPE_P (type);
8162 int final_float = FLOAT_TYPE_P (type);
8163 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8164 unsigned int final_prec = TYPE_PRECISION (type);
8165 int final_unsignedp = TYPE_UNSIGNED (type);
8167 /* In addition to the cases of two conversions in a row
8168 handled below, if we are converting something to its own
8169 type via an object of identical or wider precision, neither
8170 conversion is needed. */
8171 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8172 && (((inter_int || inter_ptr) && final_int)
8173 || (inter_float && final_float))
8174 && inter_prec >= final_prec)
8175 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8177 /* Likewise, if the intermediate and final types are either both
8178 float or both integer, we don't need the middle conversion if
8179 it is wider than the final type and doesn't change the signedness
8180 (for integers). Avoid this if the final type is a pointer
8181 since then we sometimes need the inner conversion. Likewise if
8182 the outer has a precision not equal to the size of its mode. */
8183 if (((inter_int && inside_int)
8184 || (inter_float && inside_float)
8185 || (inter_vec && inside_vec))
8186 && inter_prec >= inside_prec
8187 && (inter_float || inter_vec
8188 || inter_unsignedp == inside_unsignedp)
8189 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8190 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8191 && ! final_ptr
8192 && (! final_vec || inter_prec == inside_prec))
8193 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8195 /* If we have a sign-extension of a zero-extended value, we can
8196 replace that by a single zero-extension. */
8197 if (inside_int && inter_int && final_int
8198 && inside_prec < inter_prec && inter_prec < final_prec
8199 && inside_unsignedp && !inter_unsignedp)
8200 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8202 /* Two conversions in a row are not needed unless:
8203 - some conversion is floating-point (overstrict for now), or
8204 - some conversion is a vector (overstrict for now), or
8205 - the intermediate type is narrower than both initial and
8206 final, or
8207 - the intermediate type and innermost type differ in signedness,
8208 and the outermost type is wider than the intermediate, or
8209 - the initial type is a pointer type and the precisions of the
8210 intermediate and final types differ, or
8211 - the final type is a pointer type and the precisions of the
8212 initial and intermediate types differ. */
8213 if (! inside_float && ! inter_float && ! final_float
8214 && ! inside_vec && ! inter_vec && ! final_vec
8215 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8216 && ! (inside_int && inter_int
8217 && inter_unsignedp != inside_unsignedp
8218 && inter_prec < final_prec)
8219 && ((inter_unsignedp && inter_prec > inside_prec)
8220 == (final_unsignedp && final_prec > inter_prec))
8221 && ! (inside_ptr && inter_prec != final_prec)
8222 && ! (final_ptr && inside_prec != inter_prec)
8223 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8224 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8225 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8228 /* Handle (T *)&A.B.C for A being of type T and B and C
8229 living at offset zero. This occurs frequently in
8230 C++ upcasting and then accessing the base. */
8231 if (TREE_CODE (op0) == ADDR_EXPR
8232 && POINTER_TYPE_P (type)
8233 && handled_component_p (TREE_OPERAND (op0, 0)))
8235 HOST_WIDE_INT bitsize, bitpos;
8236 tree offset;
8237 enum machine_mode mode;
8238 int unsignedp, volatilep;
8239 tree base = TREE_OPERAND (op0, 0);
8240 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8241 &mode, &unsignedp, &volatilep, false);
8242 /* If the reference was to a (constant) zero offset, we can use
8243 the address of the base if it has the same base type
8244 as the result type. */
8245 if (! offset && bitpos == 0
8246 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8247 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8248 return fold_convert (type, fold_addr_expr (base));
8251 if (TREE_CODE (op0) == MODIFY_EXPR
8252 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8253 /* Detect assigning a bitfield. */
8254 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8255 && DECL_BIT_FIELD
8256 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8258 /* Don't leave an assignment inside a conversion
8259 unless assigning a bitfield. */
8260 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8261 /* First do the assignment, then return converted constant. */
8262 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8263 TREE_NO_WARNING (tem) = 1;
8264 TREE_USED (tem) = 1;
8265 return tem;
8268 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8269 constants (if x has signed type, the sign bit cannot be set
8270 in c). This folds extension into the BIT_AND_EXPR.
8271 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8272 very likely don't have maximal range for their precision and this
8273 transformation effectively doesn't preserve non-maximal ranges. */
8274 if (TREE_CODE (type) == INTEGER_TYPE
8275 && TREE_CODE (op0) == BIT_AND_EXPR
8276 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
8277 /* Not if the conversion is to the sub-type. */
8278 && TREE_TYPE (type) != TREE_TYPE (op0))
8280 tree and = op0;
8281 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8282 int change = 0;
8284 if (TYPE_UNSIGNED (TREE_TYPE (and))
8285 || (TYPE_PRECISION (type)
8286 <= TYPE_PRECISION (TREE_TYPE (and))))
8287 change = 1;
8288 else if (TYPE_PRECISION (TREE_TYPE (and1))
8289 <= HOST_BITS_PER_WIDE_INT
8290 && host_integerp (and1, 1))
8292 unsigned HOST_WIDE_INT cst;
8294 cst = tree_low_cst (and1, 1);
8295 cst &= (HOST_WIDE_INT) -1
8296 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8297 change = (cst == 0);
8298 #ifdef LOAD_EXTEND_OP
8299 if (change
8300 && !flag_syntax_only
8301 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8302 == ZERO_EXTEND))
8304 tree uns = unsigned_type_for (TREE_TYPE (and0));
8305 and0 = fold_convert (uns, and0);
8306 and1 = fold_convert (uns, and1);
8308 #endif
8310 if (change)
8312 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8313 TREE_INT_CST_HIGH (and1), 0,
8314 TREE_OVERFLOW (and1));
8315 return fold_build2 (BIT_AND_EXPR, type,
8316 fold_convert (type, and0), tem);
8320 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8321 when one of the new casts will fold away. Conservatively we assume
8322 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8323 if (POINTER_TYPE_P (type)
8324 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8325 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8326 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8327 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8329 tree arg00 = TREE_OPERAND (arg0, 0);
8330 tree arg01 = TREE_OPERAND (arg0, 1);
8332 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8333 fold_convert (sizetype, arg01));
8336 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8337 of the same precision, and X is an integer type not narrower than
8338 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8339 if (INTEGRAL_TYPE_P (type)
8340 && TREE_CODE (op0) == BIT_NOT_EXPR
8341 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8342 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8343 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8345 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8346 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8347 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8348 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8351 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8352 type of X and Y (integer types only). */
8353 if (INTEGRAL_TYPE_P (type)
8354 && TREE_CODE (op0) == MULT_EXPR
8355 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8356 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8358 /* Be careful not to introduce new overflows. */
8359 tree mult_type;
8360 if (TYPE_OVERFLOW_WRAPS (type))
8361 mult_type = type;
8362 else
8363 mult_type = unsigned_type_for (type);
8365 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8367 tem = fold_build2 (MULT_EXPR, mult_type,
8368 fold_convert (mult_type,
8369 TREE_OPERAND (op0, 0)),
8370 fold_convert (mult_type,
8371 TREE_OPERAND (op0, 1)));
8372 return fold_convert (type, tem);
8376 tem = fold_convert_const (code, type, op0);
8377 return tem ? tem : NULL_TREE;
8379 case FIXED_CONVERT_EXPR:
8380 tem = fold_convert_const (code, type, arg0);
8381 return tem ? tem : NULL_TREE;
8383 case VIEW_CONVERT_EXPR:
8384 if (TREE_TYPE (op0) == type)
8385 return op0;
8386 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8387 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8389 /* For integral conversions with the same precision or pointer
8390 conversions use a NOP_EXPR instead. */
8391 if ((INTEGRAL_TYPE_P (type)
8392 || POINTER_TYPE_P (type))
8393 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8394 || POINTER_TYPE_P (TREE_TYPE (op0)))
8395 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8396 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8397 a sub-type to its base type as generated by the Ada FE. */
8398 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8399 && TREE_TYPE (TREE_TYPE (op0))))
8400 return fold_convert (type, op0);
8402 /* Strip inner integral conversions that do not change the precision. */
8403 if (CONVERT_EXPR_P (op0)
8404 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8405 || POINTER_TYPE_P (TREE_TYPE (op0)))
8406 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8407 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8408 && (TYPE_PRECISION (TREE_TYPE (op0))
8409 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8410 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8412 return fold_view_convert_expr (type, op0);
8414 case NEGATE_EXPR:
8415 tem = fold_negate_expr (arg0);
8416 if (tem)
8417 return fold_convert (type, tem);
8418 return NULL_TREE;
8420 case ABS_EXPR:
8421 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8422 return fold_abs_const (arg0, type);
8423 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8424 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8425 /* Convert fabs((double)float) into (double)fabsf(float). */
8426 else if (TREE_CODE (arg0) == NOP_EXPR
8427 && TREE_CODE (type) == REAL_TYPE)
8429 tree targ0 = strip_float_extensions (arg0);
8430 if (targ0 != arg0)
8431 return fold_convert (type, fold_build1 (ABS_EXPR,
8432 TREE_TYPE (targ0),
8433 targ0));
8435 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8436 else if (TREE_CODE (arg0) == ABS_EXPR)
8437 return arg0;
8438 else if (tree_expr_nonnegative_p (arg0))
8439 return arg0;
8441 /* Strip sign ops from argument. */
8442 if (TREE_CODE (type) == REAL_TYPE)
8444 tem = fold_strip_sign_ops (arg0);
8445 if (tem)
8446 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8448 return NULL_TREE;
8450 case CONJ_EXPR:
8451 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8452 return fold_convert (type, arg0);
8453 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8455 tree itype = TREE_TYPE (type);
8456 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8457 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8458 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8460 if (TREE_CODE (arg0) == COMPLEX_CST)
8462 tree itype = TREE_TYPE (type);
8463 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8464 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8465 return build_complex (type, rpart, negate_expr (ipart));
8467 if (TREE_CODE (arg0) == CONJ_EXPR)
8468 return fold_convert (type, TREE_OPERAND (arg0, 0));
8469 return NULL_TREE;
8471 case BIT_NOT_EXPR:
8472 if (TREE_CODE (arg0) == INTEGER_CST)
8473 return fold_not_const (arg0, type);
8474 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8475 return fold_convert (type, TREE_OPERAND (arg0, 0));
8476 /* Convert ~ (-A) to A - 1. */
8477 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8478 return fold_build2 (MINUS_EXPR, type,
8479 fold_convert (type, TREE_OPERAND (arg0, 0)),
8480 build_int_cst (type, 1));
8481 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8482 else if (INTEGRAL_TYPE_P (type)
8483 && ((TREE_CODE (arg0) == MINUS_EXPR
8484 && integer_onep (TREE_OPERAND (arg0, 1)))
8485 || (TREE_CODE (arg0) == PLUS_EXPR
8486 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8487 return fold_build1 (NEGATE_EXPR, type,
8488 fold_convert (type, TREE_OPERAND (arg0, 0)));
8489 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8490 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8491 && (tem = fold_unary (BIT_NOT_EXPR, type,
8492 fold_convert (type,
8493 TREE_OPERAND (arg0, 0)))))
8494 return fold_build2 (BIT_XOR_EXPR, type, tem,
8495 fold_convert (type, TREE_OPERAND (arg0, 1)));
8496 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8497 && (tem = fold_unary (BIT_NOT_EXPR, type,
8498 fold_convert (type,
8499 TREE_OPERAND (arg0, 1)))))
8500 return fold_build2 (BIT_XOR_EXPR, type,
8501 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8502 /* Perform BIT_NOT_EXPR on each element individually. */
8503 else if (TREE_CODE (arg0) == VECTOR_CST)
8505 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8506 int count = TYPE_VECTOR_SUBPARTS (type), i;
8508 for (i = 0; i < count; i++)
8510 if (elements)
8512 elem = TREE_VALUE (elements);
8513 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8514 if (elem == NULL_TREE)
8515 break;
8516 elements = TREE_CHAIN (elements);
8518 else
8519 elem = build_int_cst (TREE_TYPE (type), -1);
8520 list = tree_cons (NULL_TREE, elem, list);
8522 if (i == count)
8523 return build_vector (type, nreverse (list));
8526 return NULL_TREE;
8528 case TRUTH_NOT_EXPR:
8529 /* The argument to invert_truthvalue must have Boolean type. */
8530 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8531 arg0 = fold_convert (boolean_type_node, arg0);
8533 /* Note that the operand of this must be an int
8534 and its values must be 0 or 1.
8535 ("true" is a fixed value perhaps depending on the language,
8536 but we don't handle values other than 1 correctly yet.) */
8537 tem = fold_truth_not_expr (arg0);
8538 if (!tem)
8539 return NULL_TREE;
8540 return fold_convert (type, tem);
8542 case REALPART_EXPR:
8543 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8544 return fold_convert (type, arg0);
8545 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8546 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8547 TREE_OPERAND (arg0, 1));
8548 if (TREE_CODE (arg0) == COMPLEX_CST)
8549 return fold_convert (type, TREE_REALPART (arg0));
8550 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8552 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8553 tem = fold_build2 (TREE_CODE (arg0), itype,
8554 fold_build1 (REALPART_EXPR, itype,
8555 TREE_OPERAND (arg0, 0)),
8556 fold_build1 (REALPART_EXPR, itype,
8557 TREE_OPERAND (arg0, 1)));
8558 return fold_convert (type, tem);
8560 if (TREE_CODE (arg0) == CONJ_EXPR)
8562 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8563 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8564 return fold_convert (type, tem);
8566 if (TREE_CODE (arg0) == CALL_EXPR)
8568 tree fn = get_callee_fndecl (arg0);
8569 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8570 switch (DECL_FUNCTION_CODE (fn))
8572 CASE_FLT_FN (BUILT_IN_CEXPI):
8573 fn = mathfn_built_in (type, BUILT_IN_COS);
8574 if (fn)
8575 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8576 break;
8578 default:
8579 break;
8582 return NULL_TREE;
8584 case IMAGPART_EXPR:
8585 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8586 return fold_convert (type, integer_zero_node);
8587 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8588 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8589 TREE_OPERAND (arg0, 0));
8590 if (TREE_CODE (arg0) == COMPLEX_CST)
8591 return fold_convert (type, TREE_IMAGPART (arg0));
8592 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8594 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8595 tem = fold_build2 (TREE_CODE (arg0), itype,
8596 fold_build1 (IMAGPART_EXPR, itype,
8597 TREE_OPERAND (arg0, 0)),
8598 fold_build1 (IMAGPART_EXPR, itype,
8599 TREE_OPERAND (arg0, 1)));
8600 return fold_convert (type, tem);
8602 if (TREE_CODE (arg0) == CONJ_EXPR)
8604 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8605 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8606 return fold_convert (type, negate_expr (tem));
8608 if (TREE_CODE (arg0) == CALL_EXPR)
8610 tree fn = get_callee_fndecl (arg0);
8611 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8612 switch (DECL_FUNCTION_CODE (fn))
8614 CASE_FLT_FN (BUILT_IN_CEXPI):
8615 fn = mathfn_built_in (type, BUILT_IN_SIN);
8616 if (fn)
8617 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8618 break;
8620 default:
8621 break;
8624 return NULL_TREE;
8626 default:
8627 return NULL_TREE;
8628 } /* switch (code) */
8632 /* If the operation was a conversion do _not_ mark a resulting constant
8633 with TREE_OVERFLOW if the original constant was not. These conversions
8634 have implementation defined behavior and retaining the TREE_OVERFLOW
8635 flag here would confuse later passes such as VRP. */
8636 tree
8637 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8639 tree res = fold_unary (code, type, op0);
8640 if (res
8641 && TREE_CODE (res) == INTEGER_CST
8642 && TREE_CODE (op0) == INTEGER_CST
8643 && CONVERT_EXPR_CODE_P (code))
8644 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8646 return res;
8649 /* Fold a binary expression of code CODE and type TYPE with operands
8650 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8651 Return the folded expression if folding is successful. Otherwise,
8652 return NULL_TREE. */
8654 static tree
8655 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8657 enum tree_code compl_code;
8659 if (code == MIN_EXPR)
8660 compl_code = MAX_EXPR;
8661 else if (code == MAX_EXPR)
8662 compl_code = MIN_EXPR;
8663 else
8664 gcc_unreachable ();
8666 /* MIN (MAX (a, b), b) == b. */
8667 if (TREE_CODE (op0) == compl_code
8668 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8669 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8671 /* MIN (MAX (b, a), b) == b. */
8672 if (TREE_CODE (op0) == compl_code
8673 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8674 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8675 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8677 /* MIN (a, MAX (a, b)) == a. */
8678 if (TREE_CODE (op1) == compl_code
8679 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8680 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8681 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8683 /* MIN (a, MAX (b, a)) == a. */
8684 if (TREE_CODE (op1) == compl_code
8685 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8686 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8687 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8689 return NULL_TREE;
8692 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8693 by changing CODE to reduce the magnitude of constants involved in
8694 ARG0 of the comparison.
8695 Returns a canonicalized comparison tree if a simplification was
8696 possible, otherwise returns NULL_TREE.
8697 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8698 valid if signed overflow is undefined. */
8700 static tree
8701 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8702 tree arg0, tree arg1,
8703 bool *strict_overflow_p)
8705 enum tree_code code0 = TREE_CODE (arg0);
8706 tree t, cst0 = NULL_TREE;
8707 int sgn0;
8708 bool swap = false;
8710 /* Match A +- CST code arg1 and CST code arg1. We can change the
8711 first form only if overflow is undefined. */
8712 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8713 /* In principle pointers also have undefined overflow behavior,
8714 but that causes problems elsewhere. */
8715 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8716 && (code0 == MINUS_EXPR
8717 || code0 == PLUS_EXPR)
8718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8719 || code0 == INTEGER_CST))
8720 return NULL_TREE;
8722 /* Identify the constant in arg0 and its sign. */
8723 if (code0 == INTEGER_CST)
8724 cst0 = arg0;
8725 else
8726 cst0 = TREE_OPERAND (arg0, 1);
8727 sgn0 = tree_int_cst_sgn (cst0);
8729 /* Overflowed constants and zero will cause problems. */
8730 if (integer_zerop (cst0)
8731 || TREE_OVERFLOW (cst0))
8732 return NULL_TREE;
8734 /* See if we can reduce the magnitude of the constant in
8735 arg0 by changing the comparison code. */
8736 if (code0 == INTEGER_CST)
8738 /* CST <= arg1 -> CST-1 < arg1. */
8739 if (code == LE_EXPR && sgn0 == 1)
8740 code = LT_EXPR;
8741 /* -CST < arg1 -> -CST-1 <= arg1. */
8742 else if (code == LT_EXPR && sgn0 == -1)
8743 code = LE_EXPR;
8744 /* CST > arg1 -> CST-1 >= arg1. */
8745 else if (code == GT_EXPR && sgn0 == 1)
8746 code = GE_EXPR;
8747 /* -CST >= arg1 -> -CST-1 > arg1. */
8748 else if (code == GE_EXPR && sgn0 == -1)
8749 code = GT_EXPR;
8750 else
8751 return NULL_TREE;
8752 /* arg1 code' CST' might be more canonical. */
8753 swap = true;
8755 else
8757 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8758 if (code == LT_EXPR
8759 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8760 code = LE_EXPR;
8761 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8762 else if (code == GT_EXPR
8763 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8764 code = GE_EXPR;
8765 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8766 else if (code == LE_EXPR
8767 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8768 code = LT_EXPR;
8769 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8770 else if (code == GE_EXPR
8771 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8772 code = GT_EXPR;
8773 else
8774 return NULL_TREE;
8775 *strict_overflow_p = true;
8778 /* Now build the constant reduced in magnitude. But not if that
8779 would produce one outside of its types range. */
8780 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8781 && ((sgn0 == 1
8782 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8783 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8784 || (sgn0 == -1
8785 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8786 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8787 /* We cannot swap the comparison here as that would cause us to
8788 endlessly recurse. */
8789 return NULL_TREE;
8791 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8792 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8793 if (code0 != INTEGER_CST)
8794 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8796 /* If swapping might yield to a more canonical form, do so. */
8797 if (swap)
8798 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8799 else
8800 return fold_build2 (code, type, t, arg1);
8803 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8804 overflow further. Try to decrease the magnitude of constants involved
8805 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8806 and put sole constants at the second argument position.
8807 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8809 static tree
8810 maybe_canonicalize_comparison (enum tree_code code, tree type,
8811 tree arg0, tree arg1)
8813 tree t;
8814 bool strict_overflow_p;
8815 const char * const warnmsg = G_("assuming signed overflow does not occur "
8816 "when reducing constant in comparison");
8818 /* Try canonicalization by simplifying arg0. */
8819 strict_overflow_p = false;
8820 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8821 &strict_overflow_p);
8822 if (t)
8824 if (strict_overflow_p)
8825 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8826 return t;
8829 /* Try canonicalization by simplifying arg1 using the swapped
8830 comparison. */
8831 code = swap_tree_comparison (code);
8832 strict_overflow_p = false;
8833 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8834 &strict_overflow_p);
8835 if (t && strict_overflow_p)
8836 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8837 return t;
8840 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8841 space. This is used to avoid issuing overflow warnings for
8842 expressions like &p->x which can not wrap. */
8844 static bool
8845 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8847 unsigned HOST_WIDE_INT offset_low, total_low;
8848 HOST_WIDE_INT size, offset_high, total_high;
8850 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8851 return true;
8853 if (bitpos < 0)
8854 return true;
8856 if (offset == NULL_TREE)
8858 offset_low = 0;
8859 offset_high = 0;
8861 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8862 return true;
8863 else
8865 offset_low = TREE_INT_CST_LOW (offset);
8866 offset_high = TREE_INT_CST_HIGH (offset);
8869 if (add_double_with_sign (offset_low, offset_high,
8870 bitpos / BITS_PER_UNIT, 0,
8871 &total_low, &total_high,
8872 true))
8873 return true;
8875 if (total_high != 0)
8876 return true;
8878 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8879 if (size <= 0)
8880 return true;
8882 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8883 array. */
8884 if (TREE_CODE (base) == ADDR_EXPR)
8886 HOST_WIDE_INT base_size;
8888 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8889 if (base_size > 0 && size < base_size)
8890 size = base_size;
8893 return total_low > (unsigned HOST_WIDE_INT) size;
8896 /* Subroutine of fold_binary. This routine performs all of the
8897 transformations that are common to the equality/inequality
8898 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8899 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8900 fold_binary should call fold_binary. Fold a comparison with
8901 tree code CODE and type TYPE with operands OP0 and OP1. Return
8902 the folded comparison or NULL_TREE. */
8904 static tree
8905 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8907 tree arg0, arg1, tem;
8909 arg0 = op0;
8910 arg1 = op1;
8912 STRIP_SIGN_NOPS (arg0);
8913 STRIP_SIGN_NOPS (arg1);
8915 tem = fold_relational_const (code, type, arg0, arg1);
8916 if (tem != NULL_TREE)
8917 return tem;
8919 /* If one arg is a real or integer constant, put it last. */
8920 if (tree_swap_operands_p (arg0, arg1, true))
8921 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8923 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8924 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8925 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8926 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8927 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8928 && (TREE_CODE (arg1) == INTEGER_CST
8929 && !TREE_OVERFLOW (arg1)))
8931 tree const1 = TREE_OPERAND (arg0, 1);
8932 tree const2 = arg1;
8933 tree variable = TREE_OPERAND (arg0, 0);
8934 tree lhs;
8935 int lhs_add;
8936 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8938 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8939 TREE_TYPE (arg1), const2, const1);
8941 /* If the constant operation overflowed this can be
8942 simplified as a comparison against INT_MAX/INT_MIN. */
8943 if (TREE_CODE (lhs) == INTEGER_CST
8944 && TREE_OVERFLOW (lhs))
8946 int const1_sgn = tree_int_cst_sgn (const1);
8947 enum tree_code code2 = code;
8949 /* Get the sign of the constant on the lhs if the
8950 operation were VARIABLE + CONST1. */
8951 if (TREE_CODE (arg0) == MINUS_EXPR)
8952 const1_sgn = -const1_sgn;
8954 /* The sign of the constant determines if we overflowed
8955 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8956 Canonicalize to the INT_MIN overflow by swapping the comparison
8957 if necessary. */
8958 if (const1_sgn == -1)
8959 code2 = swap_tree_comparison (code);
8961 /* We now can look at the canonicalized case
8962 VARIABLE + 1 CODE2 INT_MIN
8963 and decide on the result. */
8964 if (code2 == LT_EXPR
8965 || code2 == LE_EXPR
8966 || code2 == EQ_EXPR)
8967 return omit_one_operand (type, boolean_false_node, variable);
8968 else if (code2 == NE_EXPR
8969 || code2 == GE_EXPR
8970 || code2 == GT_EXPR)
8971 return omit_one_operand (type, boolean_true_node, variable);
8974 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8975 && (TREE_CODE (lhs) != INTEGER_CST
8976 || !TREE_OVERFLOW (lhs)))
8978 fold_overflow_warning (("assuming signed overflow does not occur "
8979 "when changing X +- C1 cmp C2 to "
8980 "X cmp C1 +- C2"),
8981 WARN_STRICT_OVERFLOW_COMPARISON);
8982 return fold_build2 (code, type, variable, lhs);
8986 /* For comparisons of pointers we can decompose it to a compile time
8987 comparison of the base objects and the offsets into the object.
8988 This requires at least one operand being an ADDR_EXPR or a
8989 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8990 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8991 && (TREE_CODE (arg0) == ADDR_EXPR
8992 || TREE_CODE (arg1) == ADDR_EXPR
8993 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8994 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8996 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8997 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8998 enum machine_mode mode;
8999 int volatilep, unsignedp;
9000 bool indirect_base0 = false, indirect_base1 = false;
9002 /* Get base and offset for the access. Strip ADDR_EXPR for
9003 get_inner_reference, but put it back by stripping INDIRECT_REF
9004 off the base object if possible. indirect_baseN will be true
9005 if baseN is not an address but refers to the object itself. */
9006 base0 = arg0;
9007 if (TREE_CODE (arg0) == ADDR_EXPR)
9009 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9010 &bitsize, &bitpos0, &offset0, &mode,
9011 &unsignedp, &volatilep, false);
9012 if (TREE_CODE (base0) == INDIRECT_REF)
9013 base0 = TREE_OPERAND (base0, 0);
9014 else
9015 indirect_base0 = true;
9017 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9019 base0 = TREE_OPERAND (arg0, 0);
9020 offset0 = TREE_OPERAND (arg0, 1);
9023 base1 = arg1;
9024 if (TREE_CODE (arg1) == ADDR_EXPR)
9026 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9027 &bitsize, &bitpos1, &offset1, &mode,
9028 &unsignedp, &volatilep, false);
9029 if (TREE_CODE (base1) == INDIRECT_REF)
9030 base1 = TREE_OPERAND (base1, 0);
9031 else
9032 indirect_base1 = true;
9034 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9036 base1 = TREE_OPERAND (arg1, 0);
9037 offset1 = TREE_OPERAND (arg1, 1);
9040 /* If we have equivalent bases we might be able to simplify. */
9041 if (indirect_base0 == indirect_base1
9042 && operand_equal_p (base0, base1, 0))
9044 /* We can fold this expression to a constant if the non-constant
9045 offset parts are equal. */
9046 if ((offset0 == offset1
9047 || (offset0 && offset1
9048 && operand_equal_p (offset0, offset1, 0)))
9049 && (code == EQ_EXPR
9050 || code == NE_EXPR
9051 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9054 if (code != EQ_EXPR
9055 && code != NE_EXPR
9056 && bitpos0 != bitpos1
9057 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9058 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9059 fold_overflow_warning (("assuming pointer wraparound does not "
9060 "occur when comparing P +- C1 with "
9061 "P +- C2"),
9062 WARN_STRICT_OVERFLOW_CONDITIONAL);
9064 switch (code)
9066 case EQ_EXPR:
9067 return constant_boolean_node (bitpos0 == bitpos1, type);
9068 case NE_EXPR:
9069 return constant_boolean_node (bitpos0 != bitpos1, type);
9070 case LT_EXPR:
9071 return constant_boolean_node (bitpos0 < bitpos1, type);
9072 case LE_EXPR:
9073 return constant_boolean_node (bitpos0 <= bitpos1, type);
9074 case GE_EXPR:
9075 return constant_boolean_node (bitpos0 >= bitpos1, type);
9076 case GT_EXPR:
9077 return constant_boolean_node (bitpos0 > bitpos1, type);
9078 default:;
9081 /* We can simplify the comparison to a comparison of the variable
9082 offset parts if the constant offset parts are equal.
9083 Be careful to use signed size type here because otherwise we
9084 mess with array offsets in the wrong way. This is possible
9085 because pointer arithmetic is restricted to retain within an
9086 object and overflow on pointer differences is undefined as of
9087 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9088 else if (bitpos0 == bitpos1
9089 && ((code == EQ_EXPR || code == NE_EXPR)
9090 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9092 tree signed_size_type_node;
9093 signed_size_type_node = signed_type_for (size_type_node);
9095 /* By converting to signed size type we cover middle-end pointer
9096 arithmetic which operates on unsigned pointer types of size
9097 type size and ARRAY_REF offsets which are properly sign or
9098 zero extended from their type in case it is narrower than
9099 size type. */
9100 if (offset0 == NULL_TREE)
9101 offset0 = build_int_cst (signed_size_type_node, 0);
9102 else
9103 offset0 = fold_convert (signed_size_type_node, offset0);
9104 if (offset1 == NULL_TREE)
9105 offset1 = build_int_cst (signed_size_type_node, 0);
9106 else
9107 offset1 = fold_convert (signed_size_type_node, offset1);
9109 if (code != EQ_EXPR
9110 && code != NE_EXPR
9111 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9112 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9113 fold_overflow_warning (("assuming pointer wraparound does not "
9114 "occur when comparing P +- C1 with "
9115 "P +- C2"),
9116 WARN_STRICT_OVERFLOW_COMPARISON);
9118 return fold_build2 (code, type, offset0, offset1);
9121 /* For non-equal bases we can simplify if they are addresses
9122 of local binding decls or constants. */
9123 else if (indirect_base0 && indirect_base1
9124 /* We know that !operand_equal_p (base0, base1, 0)
9125 because the if condition was false. But make
9126 sure two decls are not the same. */
9127 && base0 != base1
9128 && TREE_CODE (arg0) == ADDR_EXPR
9129 && TREE_CODE (arg1) == ADDR_EXPR
9130 && (((TREE_CODE (base0) == VAR_DECL
9131 || TREE_CODE (base0) == PARM_DECL)
9132 && (targetm.binds_local_p (base0)
9133 || CONSTANT_CLASS_P (base1)))
9134 || CONSTANT_CLASS_P (base0))
9135 && (((TREE_CODE (base1) == VAR_DECL
9136 || TREE_CODE (base1) == PARM_DECL)
9137 && (targetm.binds_local_p (base1)
9138 || CONSTANT_CLASS_P (base0)))
9139 || CONSTANT_CLASS_P (base1)))
9141 if (code == EQ_EXPR)
9142 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9143 else if (code == NE_EXPR)
9144 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9146 /* For equal offsets we can simplify to a comparison of the
9147 base addresses. */
9148 else if (bitpos0 == bitpos1
9149 && (indirect_base0
9150 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9151 && (indirect_base1
9152 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9153 && ((offset0 == offset1)
9154 || (offset0 && offset1
9155 && operand_equal_p (offset0, offset1, 0))))
9157 if (indirect_base0)
9158 base0 = fold_addr_expr (base0);
9159 if (indirect_base1)
9160 base1 = fold_addr_expr (base1);
9161 return fold_build2 (code, type, base0, base1);
9165 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9166 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9167 the resulting offset is smaller in absolute value than the
9168 original one. */
9169 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9170 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9171 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9172 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9173 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9174 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9175 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9177 tree const1 = TREE_OPERAND (arg0, 1);
9178 tree const2 = TREE_OPERAND (arg1, 1);
9179 tree variable1 = TREE_OPERAND (arg0, 0);
9180 tree variable2 = TREE_OPERAND (arg1, 0);
9181 tree cst;
9182 const char * const warnmsg = G_("assuming signed overflow does not "
9183 "occur when combining constants around "
9184 "a comparison");
9186 /* Put the constant on the side where it doesn't overflow and is
9187 of lower absolute value than before. */
9188 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9189 ? MINUS_EXPR : PLUS_EXPR,
9190 const2, const1, 0);
9191 if (!TREE_OVERFLOW (cst)
9192 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9194 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9195 return fold_build2 (code, type,
9196 variable1,
9197 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9198 variable2, cst));
9201 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9202 ? MINUS_EXPR : PLUS_EXPR,
9203 const1, const2, 0);
9204 if (!TREE_OVERFLOW (cst)
9205 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9207 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9208 return fold_build2 (code, type,
9209 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9210 variable1, cst),
9211 variable2);
9215 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9216 signed arithmetic case. That form is created by the compiler
9217 often enough for folding it to be of value. One example is in
9218 computing loop trip counts after Operator Strength Reduction. */
9219 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9220 && TREE_CODE (arg0) == MULT_EXPR
9221 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9222 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9223 && integer_zerop (arg1))
9225 tree const1 = TREE_OPERAND (arg0, 1);
9226 tree const2 = arg1; /* zero */
9227 tree variable1 = TREE_OPERAND (arg0, 0);
9228 enum tree_code cmp_code = code;
9230 gcc_assert (!integer_zerop (const1));
9232 fold_overflow_warning (("assuming signed overflow does not occur when "
9233 "eliminating multiplication in comparison "
9234 "with zero"),
9235 WARN_STRICT_OVERFLOW_COMPARISON);
9237 /* If const1 is negative we swap the sense of the comparison. */
9238 if (tree_int_cst_sgn (const1) < 0)
9239 cmp_code = swap_tree_comparison (cmp_code);
9241 return fold_build2 (cmp_code, type, variable1, const2);
9244 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9245 if (tem)
9246 return tem;
9248 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9250 tree targ0 = strip_float_extensions (arg0);
9251 tree targ1 = strip_float_extensions (arg1);
9252 tree newtype = TREE_TYPE (targ0);
9254 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9255 newtype = TREE_TYPE (targ1);
9257 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9258 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9259 return fold_build2 (code, type, fold_convert (newtype, targ0),
9260 fold_convert (newtype, targ1));
9262 /* (-a) CMP (-b) -> b CMP a */
9263 if (TREE_CODE (arg0) == NEGATE_EXPR
9264 && TREE_CODE (arg1) == NEGATE_EXPR)
9265 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9266 TREE_OPERAND (arg0, 0));
9268 if (TREE_CODE (arg1) == REAL_CST)
9270 REAL_VALUE_TYPE cst;
9271 cst = TREE_REAL_CST (arg1);
9273 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9274 if (TREE_CODE (arg0) == NEGATE_EXPR)
9275 return fold_build2 (swap_tree_comparison (code), type,
9276 TREE_OPERAND (arg0, 0),
9277 build_real (TREE_TYPE (arg1),
9278 REAL_VALUE_NEGATE (cst)));
9280 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9281 /* a CMP (-0) -> a CMP 0 */
9282 if (REAL_VALUE_MINUS_ZERO (cst))
9283 return fold_build2 (code, type, arg0,
9284 build_real (TREE_TYPE (arg1), dconst0));
9286 /* x != NaN is always true, other ops are always false. */
9287 if (REAL_VALUE_ISNAN (cst)
9288 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9290 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9291 return omit_one_operand (type, tem, arg0);
9294 /* Fold comparisons against infinity. */
9295 if (REAL_VALUE_ISINF (cst))
9297 tem = fold_inf_compare (code, type, arg0, arg1);
9298 if (tem != NULL_TREE)
9299 return tem;
9303 /* If this is a comparison of a real constant with a PLUS_EXPR
9304 or a MINUS_EXPR of a real constant, we can convert it into a
9305 comparison with a revised real constant as long as no overflow
9306 occurs when unsafe_math_optimizations are enabled. */
9307 if (flag_unsafe_math_optimizations
9308 && TREE_CODE (arg1) == REAL_CST
9309 && (TREE_CODE (arg0) == PLUS_EXPR
9310 || TREE_CODE (arg0) == MINUS_EXPR)
9311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9312 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9313 ? MINUS_EXPR : PLUS_EXPR,
9314 arg1, TREE_OPERAND (arg0, 1), 0))
9315 && !TREE_OVERFLOW (tem))
9316 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9318 /* Likewise, we can simplify a comparison of a real constant with
9319 a MINUS_EXPR whose first operand is also a real constant, i.e.
9320 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9321 floating-point types only if -fassociative-math is set. */
9322 if (flag_associative_math
9323 && TREE_CODE (arg1) == REAL_CST
9324 && TREE_CODE (arg0) == MINUS_EXPR
9325 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9326 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9327 arg1, 0))
9328 && !TREE_OVERFLOW (tem))
9329 return fold_build2 (swap_tree_comparison (code), type,
9330 TREE_OPERAND (arg0, 1), tem);
9332 /* Fold comparisons against built-in math functions. */
9333 if (TREE_CODE (arg1) == REAL_CST
9334 && flag_unsafe_math_optimizations
9335 && ! flag_errno_math)
9337 enum built_in_function fcode = builtin_mathfn_code (arg0);
9339 if (fcode != END_BUILTINS)
9341 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9342 if (tem != NULL_TREE)
9343 return tem;
9348 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9349 && CONVERT_EXPR_P (arg0))
9351 /* If we are widening one operand of an integer comparison,
9352 see if the other operand is similarly being widened. Perhaps we
9353 can do the comparison in the narrower type. */
9354 tem = fold_widened_comparison (code, type, arg0, arg1);
9355 if (tem)
9356 return tem;
9358 /* Or if we are changing signedness. */
9359 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9360 if (tem)
9361 return tem;
9364 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9365 constant, we can simplify it. */
9366 if (TREE_CODE (arg1) == INTEGER_CST
9367 && (TREE_CODE (arg0) == MIN_EXPR
9368 || TREE_CODE (arg0) == MAX_EXPR)
9369 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9371 tem = optimize_minmax_comparison (code, type, op0, op1);
9372 if (tem)
9373 return tem;
9376 /* Simplify comparison of something with itself. (For IEEE
9377 floating-point, we can only do some of these simplifications.) */
9378 if (operand_equal_p (arg0, arg1, 0))
9380 switch (code)
9382 case EQ_EXPR:
9383 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9384 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9385 return constant_boolean_node (1, type);
9386 break;
9388 case GE_EXPR:
9389 case LE_EXPR:
9390 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9391 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9392 return constant_boolean_node (1, type);
9393 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9395 case NE_EXPR:
9396 /* For NE, we can only do this simplification if integer
9397 or we don't honor IEEE floating point NaNs. */
9398 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9399 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9400 break;
9401 /* ... fall through ... */
9402 case GT_EXPR:
9403 case LT_EXPR:
9404 return constant_boolean_node (0, type);
9405 default:
9406 gcc_unreachable ();
9410 /* If we are comparing an expression that just has comparisons
9411 of two integer values, arithmetic expressions of those comparisons,
9412 and constants, we can simplify it. There are only three cases
9413 to check: the two values can either be equal, the first can be
9414 greater, or the second can be greater. Fold the expression for
9415 those three values. Since each value must be 0 or 1, we have
9416 eight possibilities, each of which corresponds to the constant 0
9417 or 1 or one of the six possible comparisons.
9419 This handles common cases like (a > b) == 0 but also handles
9420 expressions like ((x > y) - (y > x)) > 0, which supposedly
9421 occur in macroized code. */
9423 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9425 tree cval1 = 0, cval2 = 0;
9426 int save_p = 0;
9428 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9429 /* Don't handle degenerate cases here; they should already
9430 have been handled anyway. */
9431 && cval1 != 0 && cval2 != 0
9432 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9433 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9434 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9435 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9436 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9437 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9438 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9440 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9441 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9443 /* We can't just pass T to eval_subst in case cval1 or cval2
9444 was the same as ARG1. */
9446 tree high_result
9447 = fold_build2 (code, type,
9448 eval_subst (arg0, cval1, maxval,
9449 cval2, minval),
9450 arg1);
9451 tree equal_result
9452 = fold_build2 (code, type,
9453 eval_subst (arg0, cval1, maxval,
9454 cval2, maxval),
9455 arg1);
9456 tree low_result
9457 = fold_build2 (code, type,
9458 eval_subst (arg0, cval1, minval,
9459 cval2, maxval),
9460 arg1);
9462 /* All three of these results should be 0 or 1. Confirm they are.
9463 Then use those values to select the proper code to use. */
9465 if (TREE_CODE (high_result) == INTEGER_CST
9466 && TREE_CODE (equal_result) == INTEGER_CST
9467 && TREE_CODE (low_result) == INTEGER_CST)
9469 /* Make a 3-bit mask with the high-order bit being the
9470 value for `>', the next for '=', and the low for '<'. */
9471 switch ((integer_onep (high_result) * 4)
9472 + (integer_onep (equal_result) * 2)
9473 + integer_onep (low_result))
9475 case 0:
9476 /* Always false. */
9477 return omit_one_operand (type, integer_zero_node, arg0);
9478 case 1:
9479 code = LT_EXPR;
9480 break;
9481 case 2:
9482 code = EQ_EXPR;
9483 break;
9484 case 3:
9485 code = LE_EXPR;
9486 break;
9487 case 4:
9488 code = GT_EXPR;
9489 break;
9490 case 5:
9491 code = NE_EXPR;
9492 break;
9493 case 6:
9494 code = GE_EXPR;
9495 break;
9496 case 7:
9497 /* Always true. */
9498 return omit_one_operand (type, integer_one_node, arg0);
9501 if (save_p)
9502 return save_expr (build2 (code, type, cval1, cval2));
9503 return fold_build2 (code, type, cval1, cval2);
9508 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9509 into a single range test. */
9510 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9511 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9512 && TREE_CODE (arg1) == INTEGER_CST
9513 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9514 && !integer_zerop (TREE_OPERAND (arg0, 1))
9515 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9516 && !TREE_OVERFLOW (arg1))
9518 tem = fold_div_compare (code, type, arg0, arg1);
9519 if (tem != NULL_TREE)
9520 return tem;
9523 /* Fold ~X op ~Y as Y op X. */
9524 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9525 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9527 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9528 return fold_build2 (code, type,
9529 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9530 TREE_OPERAND (arg0, 0));
9533 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9534 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9535 && TREE_CODE (arg1) == INTEGER_CST)
9537 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9538 return fold_build2 (swap_tree_comparison (code), type,
9539 TREE_OPERAND (arg0, 0),
9540 fold_build1 (BIT_NOT_EXPR, cmp_type,
9541 fold_convert (cmp_type, arg1)));
9544 return NULL_TREE;
9548 /* Subroutine of fold_binary. Optimize complex multiplications of the
9549 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9550 argument EXPR represents the expression "z" of type TYPE. */
9552 static tree
9553 fold_mult_zconjz (tree type, tree expr)
9555 tree itype = TREE_TYPE (type);
9556 tree rpart, ipart, tem;
9558 if (TREE_CODE (expr) == COMPLEX_EXPR)
9560 rpart = TREE_OPERAND (expr, 0);
9561 ipart = TREE_OPERAND (expr, 1);
9563 else if (TREE_CODE (expr) == COMPLEX_CST)
9565 rpart = TREE_REALPART (expr);
9566 ipart = TREE_IMAGPART (expr);
9568 else
9570 expr = save_expr (expr);
9571 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9572 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9575 rpart = save_expr (rpart);
9576 ipart = save_expr (ipart);
9577 tem = fold_build2 (PLUS_EXPR, itype,
9578 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9579 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9580 return fold_build2 (COMPLEX_EXPR, type, tem,
9581 fold_convert (itype, integer_zero_node));
9585 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9586 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9587 guarantees that P and N have the same least significant log2(M) bits.
9588 N is not otherwise constrained. In particular, N is not normalized to
9589 0 <= N < M as is common. In general, the precise value of P is unknown.
9590 M is chosen as large as possible such that constant N can be determined.
9592 Returns M and sets *RESIDUE to N. */
9594 static unsigned HOST_WIDE_INT
9595 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9597 enum tree_code code;
9599 *residue = 0;
9601 code = TREE_CODE (expr);
9602 if (code == ADDR_EXPR)
9604 expr = TREE_OPERAND (expr, 0);
9605 if (handled_component_p (expr))
9607 HOST_WIDE_INT bitsize, bitpos;
9608 tree offset;
9609 enum machine_mode mode;
9610 int unsignedp, volatilep;
9612 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9613 &mode, &unsignedp, &volatilep, false);
9614 *residue = bitpos / BITS_PER_UNIT;
9615 if (offset)
9617 if (TREE_CODE (offset) == INTEGER_CST)
9618 *residue += TREE_INT_CST_LOW (offset);
9619 else
9620 /* We don't handle more complicated offset expressions. */
9621 return 1;
9625 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9626 return DECL_ALIGN_UNIT (expr);
9628 else if (code == POINTER_PLUS_EXPR)
9630 tree op0, op1;
9631 unsigned HOST_WIDE_INT modulus;
9632 enum tree_code inner_code;
9634 op0 = TREE_OPERAND (expr, 0);
9635 STRIP_NOPS (op0);
9636 modulus = get_pointer_modulus_and_residue (op0, residue);
9638 op1 = TREE_OPERAND (expr, 1);
9639 STRIP_NOPS (op1);
9640 inner_code = TREE_CODE (op1);
9641 if (inner_code == INTEGER_CST)
9643 *residue += TREE_INT_CST_LOW (op1);
9644 return modulus;
9646 else if (inner_code == MULT_EXPR)
9648 op1 = TREE_OPERAND (op1, 1);
9649 if (TREE_CODE (op1) == INTEGER_CST)
9651 unsigned HOST_WIDE_INT align;
9653 /* Compute the greatest power-of-2 divisor of op1. */
9654 align = TREE_INT_CST_LOW (op1);
9655 align &= -align;
9657 /* If align is non-zero and less than *modulus, replace
9658 *modulus with align., If align is 0, then either op1 is 0
9659 or the greatest power-of-2 divisor of op1 doesn't fit in an
9660 unsigned HOST_WIDE_INT. In either case, no additional
9661 constraint is imposed. */
9662 if (align)
9663 modulus = MIN (modulus, align);
9665 return modulus;
9670 /* If we get here, we were unable to determine anything useful about the
9671 expression. */
9672 return 1;
9676 /* Fold a binary expression of code CODE and type TYPE with operands
9677 OP0 and OP1. Return the folded expression if folding is
9678 successful. Otherwise, return NULL_TREE. */
9680 tree
9681 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9683 enum tree_code_class kind = TREE_CODE_CLASS (code);
9684 tree arg0, arg1, tem;
9685 tree t1 = NULL_TREE;
9686 bool strict_overflow_p;
9688 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9689 && TREE_CODE_LENGTH (code) == 2
9690 && op0 != NULL_TREE
9691 && op1 != NULL_TREE);
9693 arg0 = op0;
9694 arg1 = op1;
9696 /* Strip any conversions that don't change the mode. This is
9697 safe for every expression, except for a comparison expression
9698 because its signedness is derived from its operands. So, in
9699 the latter case, only strip conversions that don't change the
9700 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9701 preserved.
9703 Note that this is done as an internal manipulation within the
9704 constant folder, in order to find the simplest representation
9705 of the arguments so that their form can be studied. In any
9706 cases, the appropriate type conversions should be put back in
9707 the tree that will get out of the constant folder. */
9709 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9711 STRIP_SIGN_NOPS (arg0);
9712 STRIP_SIGN_NOPS (arg1);
9714 else
9716 STRIP_NOPS (arg0);
9717 STRIP_NOPS (arg1);
9720 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9721 constant but we can't do arithmetic on them. */
9722 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9723 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9724 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9725 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9726 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9727 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9729 if (kind == tcc_binary)
9731 /* Make sure type and arg0 have the same saturating flag. */
9732 gcc_assert (TYPE_SATURATING (type)
9733 == TYPE_SATURATING (TREE_TYPE (arg0)));
9734 tem = const_binop (code, arg0, arg1, 0);
9736 else if (kind == tcc_comparison)
9737 tem = fold_relational_const (code, type, arg0, arg1);
9738 else
9739 tem = NULL_TREE;
9741 if (tem != NULL_TREE)
9743 if (TREE_TYPE (tem) != type)
9744 tem = fold_convert (type, tem);
9745 return tem;
9749 /* If this is a commutative operation, and ARG0 is a constant, move it
9750 to ARG1 to reduce the number of tests below. */
9751 if (commutative_tree_code (code)
9752 && tree_swap_operands_p (arg0, arg1, true))
9753 return fold_build2 (code, type, op1, op0);
9755 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9757 First check for cases where an arithmetic operation is applied to a
9758 compound, conditional, or comparison operation. Push the arithmetic
9759 operation inside the compound or conditional to see if any folding
9760 can then be done. Convert comparison to conditional for this purpose.
9761 The also optimizes non-constant cases that used to be done in
9762 expand_expr.
9764 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9765 one of the operands is a comparison and the other is a comparison, a
9766 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9767 code below would make the expression more complex. Change it to a
9768 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9769 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9771 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9772 || code == EQ_EXPR || code == NE_EXPR)
9773 && ((truth_value_p (TREE_CODE (arg0))
9774 && (truth_value_p (TREE_CODE (arg1))
9775 || (TREE_CODE (arg1) == BIT_AND_EXPR
9776 && integer_onep (TREE_OPERAND (arg1, 1)))))
9777 || (truth_value_p (TREE_CODE (arg1))
9778 && (truth_value_p (TREE_CODE (arg0))
9779 || (TREE_CODE (arg0) == BIT_AND_EXPR
9780 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9782 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9783 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9784 : TRUTH_XOR_EXPR,
9785 boolean_type_node,
9786 fold_convert (boolean_type_node, arg0),
9787 fold_convert (boolean_type_node, arg1));
9789 if (code == EQ_EXPR)
9790 tem = invert_truthvalue (tem);
9792 return fold_convert (type, tem);
9795 if (TREE_CODE_CLASS (code) == tcc_binary
9796 || TREE_CODE_CLASS (code) == tcc_comparison)
9798 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9799 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9800 fold_build2 (code, type,
9801 fold_convert (TREE_TYPE (op0),
9802 TREE_OPERAND (arg0, 1)),
9803 op1));
9804 if (TREE_CODE (arg1) == COMPOUND_EXPR
9805 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9806 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9807 fold_build2 (code, type, op0,
9808 fold_convert (TREE_TYPE (op1),
9809 TREE_OPERAND (arg1, 1))));
9811 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9813 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9814 arg0, arg1,
9815 /*cond_first_p=*/1);
9816 if (tem != NULL_TREE)
9817 return tem;
9820 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9822 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9823 arg1, arg0,
9824 /*cond_first_p=*/0);
9825 if (tem != NULL_TREE)
9826 return tem;
9830 switch (code)
9832 case POINTER_PLUS_EXPR:
9833 /* 0 +p index -> (type)index */
9834 if (integer_zerop (arg0))
9835 return non_lvalue (fold_convert (type, arg1));
9837 /* PTR +p 0 -> PTR */
9838 if (integer_zerop (arg1))
9839 return non_lvalue (fold_convert (type, arg0));
9841 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9842 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9843 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9844 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9845 fold_convert (sizetype, arg1),
9846 fold_convert (sizetype, arg0)));
9848 /* index +p PTR -> PTR +p index */
9849 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9850 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9851 return fold_build2 (POINTER_PLUS_EXPR, type,
9852 fold_convert (type, arg1),
9853 fold_convert (sizetype, arg0));
9855 /* (PTR +p B) +p A -> PTR +p (B + A) */
9856 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9858 tree inner;
9859 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9860 tree arg00 = TREE_OPERAND (arg0, 0);
9861 inner = fold_build2 (PLUS_EXPR, sizetype,
9862 arg01, fold_convert (sizetype, arg1));
9863 return fold_convert (type,
9864 fold_build2 (POINTER_PLUS_EXPR,
9865 TREE_TYPE (arg00), arg00, inner));
9868 /* PTR_CST +p CST -> CST1 */
9869 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9870 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9872 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9873 of the array. Loop optimizer sometimes produce this type of
9874 expressions. */
9875 if (TREE_CODE (arg0) == ADDR_EXPR)
9877 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9878 if (tem)
9879 return fold_convert (type, tem);
9882 return NULL_TREE;
9884 case PLUS_EXPR:
9885 /* A + (-B) -> A - B */
9886 if (TREE_CODE (arg1) == NEGATE_EXPR)
9887 return fold_build2 (MINUS_EXPR, type,
9888 fold_convert (type, arg0),
9889 fold_convert (type, TREE_OPERAND (arg1, 0)));
9890 /* (-A) + B -> B - A */
9891 if (TREE_CODE (arg0) == NEGATE_EXPR
9892 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9893 return fold_build2 (MINUS_EXPR, type,
9894 fold_convert (type, arg1),
9895 fold_convert (type, TREE_OPERAND (arg0, 0)));
9897 if (INTEGRAL_TYPE_P (type))
9899 /* Convert ~A + 1 to -A. */
9900 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9901 && integer_onep (arg1))
9902 return fold_build1 (NEGATE_EXPR, type,
9903 fold_convert (type, TREE_OPERAND (arg0, 0)));
9905 /* ~X + X is -1. */
9906 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9907 && !TYPE_OVERFLOW_TRAPS (type))
9909 tree tem = TREE_OPERAND (arg0, 0);
9911 STRIP_NOPS (tem);
9912 if (operand_equal_p (tem, arg1, 0))
9914 t1 = build_int_cst_type (type, -1);
9915 return omit_one_operand (type, t1, arg1);
9919 /* X + ~X is -1. */
9920 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9921 && !TYPE_OVERFLOW_TRAPS (type))
9923 tree tem = TREE_OPERAND (arg1, 0);
9925 STRIP_NOPS (tem);
9926 if (operand_equal_p (arg0, tem, 0))
9928 t1 = build_int_cst_type (type, -1);
9929 return omit_one_operand (type, t1, arg0);
9933 /* X + (X / CST) * -CST is X % CST. */
9934 if (TREE_CODE (arg1) == MULT_EXPR
9935 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9936 && operand_equal_p (arg0,
9937 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9939 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9940 tree cst1 = TREE_OPERAND (arg1, 1);
9941 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9942 if (sum && integer_zerop (sum))
9943 return fold_convert (type,
9944 fold_build2 (TRUNC_MOD_EXPR,
9945 TREE_TYPE (arg0), arg0, cst0));
9949 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9950 same or one. Make sure type is not saturating.
9951 fold_plusminus_mult_expr will re-associate. */
9952 if ((TREE_CODE (arg0) == MULT_EXPR
9953 || TREE_CODE (arg1) == MULT_EXPR)
9954 && !TYPE_SATURATING (type)
9955 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9957 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9958 if (tem)
9959 return tem;
9962 if (! FLOAT_TYPE_P (type))
9964 if (integer_zerop (arg1))
9965 return non_lvalue (fold_convert (type, arg0));
9967 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9968 with a constant, and the two constants have no bits in common,
9969 we should treat this as a BIT_IOR_EXPR since this may produce more
9970 simplifications. */
9971 if (TREE_CODE (arg0) == BIT_AND_EXPR
9972 && TREE_CODE (arg1) == BIT_AND_EXPR
9973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9974 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9975 && integer_zerop (const_binop (BIT_AND_EXPR,
9976 TREE_OPERAND (arg0, 1),
9977 TREE_OPERAND (arg1, 1), 0)))
9979 code = BIT_IOR_EXPR;
9980 goto bit_ior;
9983 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9984 (plus (plus (mult) (mult)) (foo)) so that we can
9985 take advantage of the factoring cases below. */
9986 if (((TREE_CODE (arg0) == PLUS_EXPR
9987 || TREE_CODE (arg0) == MINUS_EXPR)
9988 && TREE_CODE (arg1) == MULT_EXPR)
9989 || ((TREE_CODE (arg1) == PLUS_EXPR
9990 || TREE_CODE (arg1) == MINUS_EXPR)
9991 && TREE_CODE (arg0) == MULT_EXPR))
9993 tree parg0, parg1, parg, marg;
9994 enum tree_code pcode;
9996 if (TREE_CODE (arg1) == MULT_EXPR)
9997 parg = arg0, marg = arg1;
9998 else
9999 parg = arg1, marg = arg0;
10000 pcode = TREE_CODE (parg);
10001 parg0 = TREE_OPERAND (parg, 0);
10002 parg1 = TREE_OPERAND (parg, 1);
10003 STRIP_NOPS (parg0);
10004 STRIP_NOPS (parg1);
10006 if (TREE_CODE (parg0) == MULT_EXPR
10007 && TREE_CODE (parg1) != MULT_EXPR)
10008 return fold_build2 (pcode, type,
10009 fold_build2 (PLUS_EXPR, type,
10010 fold_convert (type, parg0),
10011 fold_convert (type, marg)),
10012 fold_convert (type, parg1));
10013 if (TREE_CODE (parg0) != MULT_EXPR
10014 && TREE_CODE (parg1) == MULT_EXPR)
10015 return fold_build2 (PLUS_EXPR, type,
10016 fold_convert (type, parg0),
10017 fold_build2 (pcode, type,
10018 fold_convert (type, marg),
10019 fold_convert (type,
10020 parg1)));
10023 else
10025 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10026 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10027 return non_lvalue (fold_convert (type, arg0));
10029 /* Likewise if the operands are reversed. */
10030 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10031 return non_lvalue (fold_convert (type, arg1));
10033 /* Convert X + -C into X - C. */
10034 if (TREE_CODE (arg1) == REAL_CST
10035 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10037 tem = fold_negate_const (arg1, type);
10038 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10039 return fold_build2 (MINUS_EXPR, type,
10040 fold_convert (type, arg0),
10041 fold_convert (type, tem));
10044 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10045 to __complex__ ( x, y ). This is not the same for SNaNs or
10046 if signed zeros are involved. */
10047 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10048 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10049 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10051 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10052 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10053 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10054 bool arg0rz = false, arg0iz = false;
10055 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10056 || (arg0i && (arg0iz = real_zerop (arg0i))))
10058 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10059 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10060 if (arg0rz && arg1i && real_zerop (arg1i))
10062 tree rp = arg1r ? arg1r
10063 : build1 (REALPART_EXPR, rtype, arg1);
10064 tree ip = arg0i ? arg0i
10065 : build1 (IMAGPART_EXPR, rtype, arg0);
10066 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10068 else if (arg0iz && arg1r && real_zerop (arg1r))
10070 tree rp = arg0r ? arg0r
10071 : build1 (REALPART_EXPR, rtype, arg0);
10072 tree ip = arg1i ? arg1i
10073 : build1 (IMAGPART_EXPR, rtype, arg1);
10074 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10079 if (flag_unsafe_math_optimizations
10080 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10081 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10082 && (tem = distribute_real_division (code, type, arg0, arg1)))
10083 return tem;
10085 /* Convert x+x into x*2.0. */
10086 if (operand_equal_p (arg0, arg1, 0)
10087 && SCALAR_FLOAT_TYPE_P (type))
10088 return fold_build2 (MULT_EXPR, type, arg0,
10089 build_real (type, dconst2));
10091 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10092 We associate floats only if the user has specified
10093 -fassociative-math. */
10094 if (flag_associative_math
10095 && TREE_CODE (arg1) == PLUS_EXPR
10096 && TREE_CODE (arg0) != MULT_EXPR)
10098 tree tree10 = TREE_OPERAND (arg1, 0);
10099 tree tree11 = TREE_OPERAND (arg1, 1);
10100 if (TREE_CODE (tree11) == MULT_EXPR
10101 && TREE_CODE (tree10) == MULT_EXPR)
10103 tree tree0;
10104 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10105 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10108 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10109 We associate floats only if the user has specified
10110 -fassociative-math. */
10111 if (flag_associative_math
10112 && TREE_CODE (arg0) == PLUS_EXPR
10113 && TREE_CODE (arg1) != MULT_EXPR)
10115 tree tree00 = TREE_OPERAND (arg0, 0);
10116 tree tree01 = TREE_OPERAND (arg0, 1);
10117 if (TREE_CODE (tree01) == MULT_EXPR
10118 && TREE_CODE (tree00) == MULT_EXPR)
10120 tree tree0;
10121 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10122 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10127 bit_rotate:
10128 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10129 is a rotate of A by C1 bits. */
10130 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10131 is a rotate of A by B bits. */
10133 enum tree_code code0, code1;
10134 tree rtype;
10135 code0 = TREE_CODE (arg0);
10136 code1 = TREE_CODE (arg1);
10137 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10138 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10139 && operand_equal_p (TREE_OPERAND (arg0, 0),
10140 TREE_OPERAND (arg1, 0), 0)
10141 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10142 TYPE_UNSIGNED (rtype))
10143 /* Only create rotates in complete modes. Other cases are not
10144 expanded properly. */
10145 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10147 tree tree01, tree11;
10148 enum tree_code code01, code11;
10150 tree01 = TREE_OPERAND (arg0, 1);
10151 tree11 = TREE_OPERAND (arg1, 1);
10152 STRIP_NOPS (tree01);
10153 STRIP_NOPS (tree11);
10154 code01 = TREE_CODE (tree01);
10155 code11 = TREE_CODE (tree11);
10156 if (code01 == INTEGER_CST
10157 && code11 == INTEGER_CST
10158 && TREE_INT_CST_HIGH (tree01) == 0
10159 && TREE_INT_CST_HIGH (tree11) == 0
10160 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10161 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10162 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
10163 code0 == LSHIFT_EXPR ? tree01 : tree11);
10164 else if (code11 == MINUS_EXPR)
10166 tree tree110, tree111;
10167 tree110 = TREE_OPERAND (tree11, 0);
10168 tree111 = TREE_OPERAND (tree11, 1);
10169 STRIP_NOPS (tree110);
10170 STRIP_NOPS (tree111);
10171 if (TREE_CODE (tree110) == INTEGER_CST
10172 && 0 == compare_tree_int (tree110,
10173 TYPE_PRECISION
10174 (TREE_TYPE (TREE_OPERAND
10175 (arg0, 0))))
10176 && operand_equal_p (tree01, tree111, 0))
10177 return build2 ((code0 == LSHIFT_EXPR
10178 ? LROTATE_EXPR
10179 : RROTATE_EXPR),
10180 type, TREE_OPERAND (arg0, 0), tree01);
10182 else if (code01 == MINUS_EXPR)
10184 tree tree010, tree011;
10185 tree010 = TREE_OPERAND (tree01, 0);
10186 tree011 = TREE_OPERAND (tree01, 1);
10187 STRIP_NOPS (tree010);
10188 STRIP_NOPS (tree011);
10189 if (TREE_CODE (tree010) == INTEGER_CST
10190 && 0 == compare_tree_int (tree010,
10191 TYPE_PRECISION
10192 (TREE_TYPE (TREE_OPERAND
10193 (arg0, 0))))
10194 && operand_equal_p (tree11, tree011, 0))
10195 return build2 ((code0 != LSHIFT_EXPR
10196 ? LROTATE_EXPR
10197 : RROTATE_EXPR),
10198 type, TREE_OPERAND (arg0, 0), tree11);
10203 associate:
10204 /* In most languages, can't associate operations on floats through
10205 parentheses. Rather than remember where the parentheses were, we
10206 don't associate floats at all, unless the user has specified
10207 -fassociative-math.
10208 And, we need to make sure type is not saturating. */
10210 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10211 && !TYPE_SATURATING (type))
10213 tree var0, con0, lit0, minus_lit0;
10214 tree var1, con1, lit1, minus_lit1;
10215 bool ok = true;
10217 /* Split both trees into variables, constants, and literals. Then
10218 associate each group together, the constants with literals,
10219 then the result with variables. This increases the chances of
10220 literals being recombined later and of generating relocatable
10221 expressions for the sum of a constant and literal. */
10222 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10223 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10224 code == MINUS_EXPR);
10226 /* With undefined overflow we can only associate constants
10227 with one variable. */
10228 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10229 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10230 && var0 && var1)
10232 tree tmp0 = var0;
10233 tree tmp1 = var1;
10235 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10236 tmp0 = TREE_OPERAND (tmp0, 0);
10237 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10238 tmp1 = TREE_OPERAND (tmp1, 0);
10239 /* The only case we can still associate with two variables
10240 is if they are the same, modulo negation. */
10241 if (!operand_equal_p (tmp0, tmp1, 0))
10242 ok = false;
10245 /* Only do something if we found more than two objects. Otherwise,
10246 nothing has changed and we risk infinite recursion. */
10247 if (ok
10248 && (2 < ((var0 != 0) + (var1 != 0)
10249 + (con0 != 0) + (con1 != 0)
10250 + (lit0 != 0) + (lit1 != 0)
10251 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10253 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10254 if (code == MINUS_EXPR)
10255 code = PLUS_EXPR;
10257 var0 = associate_trees (var0, var1, code, type);
10258 con0 = associate_trees (con0, con1, code, type);
10259 lit0 = associate_trees (lit0, lit1, code, type);
10260 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10262 /* Preserve the MINUS_EXPR if the negative part of the literal is
10263 greater than the positive part. Otherwise, the multiplicative
10264 folding code (i.e extract_muldiv) may be fooled in case
10265 unsigned constants are subtracted, like in the following
10266 example: ((X*2 + 4) - 8U)/2. */
10267 if (minus_lit0 && lit0)
10269 if (TREE_CODE (lit0) == INTEGER_CST
10270 && TREE_CODE (minus_lit0) == INTEGER_CST
10271 && tree_int_cst_lt (lit0, minus_lit0))
10273 minus_lit0 = associate_trees (minus_lit0, lit0,
10274 MINUS_EXPR, type);
10275 lit0 = 0;
10277 else
10279 lit0 = associate_trees (lit0, minus_lit0,
10280 MINUS_EXPR, type);
10281 minus_lit0 = 0;
10284 if (minus_lit0)
10286 if (con0 == 0)
10287 return fold_convert (type,
10288 associate_trees (var0, minus_lit0,
10289 MINUS_EXPR, type));
10290 else
10292 con0 = associate_trees (con0, minus_lit0,
10293 MINUS_EXPR, type);
10294 return fold_convert (type,
10295 associate_trees (var0, con0,
10296 PLUS_EXPR, type));
10300 con0 = associate_trees (con0, lit0, code, type);
10301 return fold_convert (type, associate_trees (var0, con0,
10302 code, type));
10306 return NULL_TREE;
10308 case MINUS_EXPR:
10309 /* Pointer simplifications for subtraction, simple reassociations. */
10310 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10312 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10313 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10314 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10316 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10317 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10318 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10319 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10320 return fold_build2 (PLUS_EXPR, type,
10321 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10322 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10324 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10325 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10327 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10328 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10329 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10330 if (tmp)
10331 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10334 /* A - (-B) -> A + B */
10335 if (TREE_CODE (arg1) == NEGATE_EXPR)
10336 return fold_build2 (PLUS_EXPR, type, op0,
10337 fold_convert (type, TREE_OPERAND (arg1, 0)));
10338 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10339 if (TREE_CODE (arg0) == NEGATE_EXPR
10340 && (FLOAT_TYPE_P (type)
10341 || INTEGRAL_TYPE_P (type))
10342 && negate_expr_p (arg1)
10343 && reorder_operands_p (arg0, arg1))
10344 return fold_build2 (MINUS_EXPR, type,
10345 fold_convert (type, negate_expr (arg1)),
10346 fold_convert (type, TREE_OPERAND (arg0, 0)));
10347 /* Convert -A - 1 to ~A. */
10348 if (INTEGRAL_TYPE_P (type)
10349 && TREE_CODE (arg0) == NEGATE_EXPR
10350 && integer_onep (arg1)
10351 && !TYPE_OVERFLOW_TRAPS (type))
10352 return fold_build1 (BIT_NOT_EXPR, type,
10353 fold_convert (type, TREE_OPERAND (arg0, 0)));
10355 /* Convert -1 - A to ~A. */
10356 if (INTEGRAL_TYPE_P (type)
10357 && integer_all_onesp (arg0))
10358 return fold_build1 (BIT_NOT_EXPR, type, op1);
10361 /* X - (X / CST) * CST is X % CST. */
10362 if (INTEGRAL_TYPE_P (type)
10363 && TREE_CODE (arg1) == MULT_EXPR
10364 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10365 && operand_equal_p (arg0,
10366 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10367 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10368 TREE_OPERAND (arg1, 1), 0))
10369 return fold_convert (type,
10370 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10371 arg0, TREE_OPERAND (arg1, 1)));
10373 if (! FLOAT_TYPE_P (type))
10375 if (integer_zerop (arg0))
10376 return negate_expr (fold_convert (type, arg1));
10377 if (integer_zerop (arg1))
10378 return non_lvalue (fold_convert (type, arg0));
10380 /* Fold A - (A & B) into ~B & A. */
10381 if (!TREE_SIDE_EFFECTS (arg0)
10382 && TREE_CODE (arg1) == BIT_AND_EXPR)
10384 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10386 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10387 return fold_build2 (BIT_AND_EXPR, type,
10388 fold_build1 (BIT_NOT_EXPR, type, arg10),
10389 fold_convert (type, arg0));
10391 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10393 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10394 return fold_build2 (BIT_AND_EXPR, type,
10395 fold_build1 (BIT_NOT_EXPR, type, arg11),
10396 fold_convert (type, arg0));
10400 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10401 any power of 2 minus 1. */
10402 if (TREE_CODE (arg0) == BIT_AND_EXPR
10403 && TREE_CODE (arg1) == BIT_AND_EXPR
10404 && operand_equal_p (TREE_OPERAND (arg0, 0),
10405 TREE_OPERAND (arg1, 0), 0))
10407 tree mask0 = TREE_OPERAND (arg0, 1);
10408 tree mask1 = TREE_OPERAND (arg1, 1);
10409 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10411 if (operand_equal_p (tem, mask1, 0))
10413 tem = fold_build2 (BIT_XOR_EXPR, type,
10414 TREE_OPERAND (arg0, 0), mask1);
10415 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10420 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10421 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10422 return non_lvalue (fold_convert (type, arg0));
10424 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10425 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10426 (-ARG1 + ARG0) reduces to -ARG1. */
10427 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10428 return negate_expr (fold_convert (type, arg1));
10430 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10431 __complex__ ( x, -y ). This is not the same for SNaNs or if
10432 signed zeros are involved. */
10433 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10434 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10435 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10437 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10438 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10439 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10440 bool arg0rz = false, arg0iz = false;
10441 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10442 || (arg0i && (arg0iz = real_zerop (arg0i))))
10444 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10445 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10446 if (arg0rz && arg1i && real_zerop (arg1i))
10448 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10449 arg1r ? arg1r
10450 : build1 (REALPART_EXPR, rtype, arg1));
10451 tree ip = arg0i ? arg0i
10452 : build1 (IMAGPART_EXPR, rtype, arg0);
10453 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10455 else if (arg0iz && arg1r && real_zerop (arg1r))
10457 tree rp = arg0r ? arg0r
10458 : build1 (REALPART_EXPR, rtype, arg0);
10459 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10460 arg1i ? arg1i
10461 : build1 (IMAGPART_EXPR, rtype, arg1));
10462 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10467 /* Fold &x - &x. This can happen from &x.foo - &x.
10468 This is unsafe for certain floats even in non-IEEE formats.
10469 In IEEE, it is unsafe because it does wrong for NaNs.
10470 Also note that operand_equal_p is always false if an operand
10471 is volatile. */
10473 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10474 && operand_equal_p (arg0, arg1, 0))
10475 return fold_convert (type, integer_zero_node);
10477 /* A - B -> A + (-B) if B is easily negatable. */
10478 if (negate_expr_p (arg1)
10479 && ((FLOAT_TYPE_P (type)
10480 /* Avoid this transformation if B is a positive REAL_CST. */
10481 && (TREE_CODE (arg1) != REAL_CST
10482 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10483 || INTEGRAL_TYPE_P (type)))
10484 return fold_build2 (PLUS_EXPR, type,
10485 fold_convert (type, arg0),
10486 fold_convert (type, negate_expr (arg1)));
10488 /* Try folding difference of addresses. */
10490 HOST_WIDE_INT diff;
10492 if ((TREE_CODE (arg0) == ADDR_EXPR
10493 || TREE_CODE (arg1) == ADDR_EXPR)
10494 && ptr_difference_const (arg0, arg1, &diff))
10495 return build_int_cst_type (type, diff);
10498 /* Fold &a[i] - &a[j] to i-j. */
10499 if (TREE_CODE (arg0) == ADDR_EXPR
10500 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10501 && TREE_CODE (arg1) == ADDR_EXPR
10502 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10504 tree aref0 = TREE_OPERAND (arg0, 0);
10505 tree aref1 = TREE_OPERAND (arg1, 0);
10506 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10507 TREE_OPERAND (aref1, 0), 0))
10509 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10510 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10511 tree esz = array_ref_element_size (aref0);
10512 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10513 return fold_build2 (MULT_EXPR, type, diff,
10514 fold_convert (type, esz));
10519 if (flag_unsafe_math_optimizations
10520 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10521 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10522 && (tem = distribute_real_division (code, type, arg0, arg1)))
10523 return tem;
10525 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10526 same or one. Make sure type is not saturating.
10527 fold_plusminus_mult_expr will re-associate. */
10528 if ((TREE_CODE (arg0) == MULT_EXPR
10529 || TREE_CODE (arg1) == MULT_EXPR)
10530 && !TYPE_SATURATING (type)
10531 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10533 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10534 if (tem)
10535 return tem;
10538 goto associate;
10540 case MULT_EXPR:
10541 /* (-A) * (-B) -> A * B */
10542 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10543 return fold_build2 (MULT_EXPR, type,
10544 fold_convert (type, TREE_OPERAND (arg0, 0)),
10545 fold_convert (type, negate_expr (arg1)));
10546 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10547 return fold_build2 (MULT_EXPR, type,
10548 fold_convert (type, negate_expr (arg0)),
10549 fold_convert (type, TREE_OPERAND (arg1, 0)));
10551 if (! FLOAT_TYPE_P (type))
10553 if (integer_zerop (arg1))
10554 return omit_one_operand (type, arg1, arg0);
10555 if (integer_onep (arg1))
10556 return non_lvalue (fold_convert (type, arg0));
10557 /* Transform x * -1 into -x. Make sure to do the negation
10558 on the original operand with conversions not stripped
10559 because we can only strip non-sign-changing conversions. */
10560 if (integer_all_onesp (arg1))
10561 return fold_convert (type, negate_expr (op0));
10562 /* Transform x * -C into -x * C if x is easily negatable. */
10563 if (TREE_CODE (arg1) == INTEGER_CST
10564 && tree_int_cst_sgn (arg1) == -1
10565 && negate_expr_p (arg0)
10566 && (tem = negate_expr (arg1)) != arg1
10567 && !TREE_OVERFLOW (tem))
10568 return fold_build2 (MULT_EXPR, type,
10569 fold_convert (type, negate_expr (arg0)), tem);
10571 /* (a * (1 << b)) is (a << b) */
10572 if (TREE_CODE (arg1) == LSHIFT_EXPR
10573 && integer_onep (TREE_OPERAND (arg1, 0)))
10574 return fold_build2 (LSHIFT_EXPR, type, op0,
10575 TREE_OPERAND (arg1, 1));
10576 if (TREE_CODE (arg0) == LSHIFT_EXPR
10577 && integer_onep (TREE_OPERAND (arg0, 0)))
10578 return fold_build2 (LSHIFT_EXPR, type, op1,
10579 TREE_OPERAND (arg0, 1));
10581 /* (A + A) * C -> A * 2 * C */
10582 if (TREE_CODE (arg0) == PLUS_EXPR
10583 && TREE_CODE (arg1) == INTEGER_CST
10584 && operand_equal_p (TREE_OPERAND (arg0, 0),
10585 TREE_OPERAND (arg0, 1), 0))
10586 return fold_build2 (MULT_EXPR, type,
10587 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10588 TREE_OPERAND (arg0, 1)),
10589 fold_build2 (MULT_EXPR, type,
10590 build_int_cst (type, 2) , arg1));
10592 strict_overflow_p = false;
10593 if (TREE_CODE (arg1) == INTEGER_CST
10594 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10595 &strict_overflow_p)))
10597 if (strict_overflow_p)
10598 fold_overflow_warning (("assuming signed overflow does not "
10599 "occur when simplifying "
10600 "multiplication"),
10601 WARN_STRICT_OVERFLOW_MISC);
10602 return fold_convert (type, tem);
10605 /* Optimize z * conj(z) for integer complex numbers. */
10606 if (TREE_CODE (arg0) == CONJ_EXPR
10607 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10608 return fold_mult_zconjz (type, arg1);
10609 if (TREE_CODE (arg1) == CONJ_EXPR
10610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10611 return fold_mult_zconjz (type, arg0);
10613 else
10615 /* Maybe fold x * 0 to 0. The expressions aren't the same
10616 when x is NaN, since x * 0 is also NaN. Nor are they the
10617 same in modes with signed zeros, since multiplying a
10618 negative value by 0 gives -0, not +0. */
10619 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10621 && real_zerop (arg1))
10622 return omit_one_operand (type, arg1, arg0);
10623 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10624 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10625 && real_onep (arg1))
10626 return non_lvalue (fold_convert (type, arg0));
10628 /* Transform x * -1.0 into -x. */
10629 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10630 && real_minus_onep (arg1))
10631 return fold_convert (type, negate_expr (arg0));
10633 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10634 the result for floating point types due to rounding so it is applied
10635 only if -fassociative-math was specify. */
10636 if (flag_associative_math
10637 && TREE_CODE (arg0) == RDIV_EXPR
10638 && TREE_CODE (arg1) == REAL_CST
10639 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10641 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10642 arg1, 0);
10643 if (tem)
10644 return fold_build2 (RDIV_EXPR, type, tem,
10645 TREE_OPERAND (arg0, 1));
10648 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10649 if (operand_equal_p (arg0, arg1, 0))
10651 tree tem = fold_strip_sign_ops (arg0);
10652 if (tem != NULL_TREE)
10654 tem = fold_convert (type, tem);
10655 return fold_build2 (MULT_EXPR, type, tem, tem);
10659 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10660 This is not the same for NaNs or if signed zeros are
10661 involved. */
10662 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10663 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10664 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10665 && TREE_CODE (arg1) == COMPLEX_CST
10666 && real_zerop (TREE_REALPART (arg1)))
10668 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10669 if (real_onep (TREE_IMAGPART (arg1)))
10670 return fold_build2 (COMPLEX_EXPR, type,
10671 negate_expr (fold_build1 (IMAGPART_EXPR,
10672 rtype, arg0)),
10673 fold_build1 (REALPART_EXPR, rtype, arg0));
10674 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10675 return fold_build2 (COMPLEX_EXPR, type,
10676 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10677 negate_expr (fold_build1 (REALPART_EXPR,
10678 rtype, arg0)));
10681 /* Optimize z * conj(z) for floating point complex numbers.
10682 Guarded by flag_unsafe_math_optimizations as non-finite
10683 imaginary components don't produce scalar results. */
10684 if (flag_unsafe_math_optimizations
10685 && TREE_CODE (arg0) == CONJ_EXPR
10686 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10687 return fold_mult_zconjz (type, arg1);
10688 if (flag_unsafe_math_optimizations
10689 && TREE_CODE (arg1) == CONJ_EXPR
10690 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10691 return fold_mult_zconjz (type, arg0);
10693 if (flag_unsafe_math_optimizations)
10695 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10696 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10698 /* Optimizations of root(...)*root(...). */
10699 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10701 tree rootfn, arg;
10702 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10703 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10705 /* Optimize sqrt(x)*sqrt(x) as x. */
10706 if (BUILTIN_SQRT_P (fcode0)
10707 && operand_equal_p (arg00, arg10, 0)
10708 && ! HONOR_SNANS (TYPE_MODE (type)))
10709 return arg00;
10711 /* Optimize root(x)*root(y) as root(x*y). */
10712 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10713 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10714 return build_call_expr (rootfn, 1, arg);
10717 /* Optimize expN(x)*expN(y) as expN(x+y). */
10718 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10720 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10721 tree arg = fold_build2 (PLUS_EXPR, type,
10722 CALL_EXPR_ARG (arg0, 0),
10723 CALL_EXPR_ARG (arg1, 0));
10724 return build_call_expr (expfn, 1, arg);
10727 /* Optimizations of pow(...)*pow(...). */
10728 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10729 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10730 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10732 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10733 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10734 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10735 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10737 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10738 if (operand_equal_p (arg01, arg11, 0))
10740 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10741 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10742 return build_call_expr (powfn, 2, arg, arg01);
10745 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10746 if (operand_equal_p (arg00, arg10, 0))
10748 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10749 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10750 return build_call_expr (powfn, 2, arg00, arg);
10754 /* Optimize tan(x)*cos(x) as sin(x). */
10755 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10756 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10757 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10758 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10759 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10760 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10761 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10762 CALL_EXPR_ARG (arg1, 0), 0))
10764 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10766 if (sinfn != NULL_TREE)
10767 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10770 /* Optimize x*pow(x,c) as pow(x,c+1). */
10771 if (fcode1 == BUILT_IN_POW
10772 || fcode1 == BUILT_IN_POWF
10773 || fcode1 == BUILT_IN_POWL)
10775 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10776 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10777 if (TREE_CODE (arg11) == REAL_CST
10778 && !TREE_OVERFLOW (arg11)
10779 && operand_equal_p (arg0, arg10, 0))
10781 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10782 REAL_VALUE_TYPE c;
10783 tree arg;
10785 c = TREE_REAL_CST (arg11);
10786 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10787 arg = build_real (type, c);
10788 return build_call_expr (powfn, 2, arg0, arg);
10792 /* Optimize pow(x,c)*x as pow(x,c+1). */
10793 if (fcode0 == BUILT_IN_POW
10794 || fcode0 == BUILT_IN_POWF
10795 || fcode0 == BUILT_IN_POWL)
10797 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10798 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10799 if (TREE_CODE (arg01) == REAL_CST
10800 && !TREE_OVERFLOW (arg01)
10801 && operand_equal_p (arg1, arg00, 0))
10803 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10804 REAL_VALUE_TYPE c;
10805 tree arg;
10807 c = TREE_REAL_CST (arg01);
10808 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10809 arg = build_real (type, c);
10810 return build_call_expr (powfn, 2, arg1, arg);
10814 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10815 if (optimize_function_for_speed_p (cfun)
10816 && operand_equal_p (arg0, arg1, 0))
10818 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10820 if (powfn)
10822 tree arg = build_real (type, dconst2);
10823 return build_call_expr (powfn, 2, arg0, arg);
10828 goto associate;
10830 case BIT_IOR_EXPR:
10831 bit_ior:
10832 if (integer_all_onesp (arg1))
10833 return omit_one_operand (type, arg1, arg0);
10834 if (integer_zerop (arg1))
10835 return non_lvalue (fold_convert (type, arg0));
10836 if (operand_equal_p (arg0, arg1, 0))
10837 return non_lvalue (fold_convert (type, arg0));
10839 /* ~X | X is -1. */
10840 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10841 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10843 t1 = fold_convert (type, integer_zero_node);
10844 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10845 return omit_one_operand (type, t1, arg1);
10848 /* X | ~X is -1. */
10849 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10850 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10852 t1 = fold_convert (type, integer_zero_node);
10853 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10854 return omit_one_operand (type, t1, arg0);
10857 /* Canonicalize (X & C1) | C2. */
10858 if (TREE_CODE (arg0) == BIT_AND_EXPR
10859 && TREE_CODE (arg1) == INTEGER_CST
10860 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10862 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10863 int width = TYPE_PRECISION (type), w;
10864 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10865 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10866 hi2 = TREE_INT_CST_HIGH (arg1);
10867 lo2 = TREE_INT_CST_LOW (arg1);
10869 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10870 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10871 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10873 if (width > HOST_BITS_PER_WIDE_INT)
10875 mhi = (unsigned HOST_WIDE_INT) -1
10876 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10877 mlo = -1;
10879 else
10881 mhi = 0;
10882 mlo = (unsigned HOST_WIDE_INT) -1
10883 >> (HOST_BITS_PER_WIDE_INT - width);
10886 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10887 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10888 return fold_build2 (BIT_IOR_EXPR, type,
10889 TREE_OPERAND (arg0, 0), arg1);
10891 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10892 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10893 mode which allows further optimizations. */
10894 hi1 &= mhi;
10895 lo1 &= mlo;
10896 hi2 &= mhi;
10897 lo2 &= mlo;
10898 hi3 = hi1 & ~hi2;
10899 lo3 = lo1 & ~lo2;
10900 for (w = BITS_PER_UNIT;
10901 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10902 w <<= 1)
10904 unsigned HOST_WIDE_INT mask
10905 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10906 if (((lo1 | lo2) & mask) == mask
10907 && (lo1 & ~mask) == 0 && hi1 == 0)
10909 hi3 = 0;
10910 lo3 = mask;
10911 break;
10914 if (hi3 != hi1 || lo3 != lo1)
10915 return fold_build2 (BIT_IOR_EXPR, type,
10916 fold_build2 (BIT_AND_EXPR, type,
10917 TREE_OPERAND (arg0, 0),
10918 build_int_cst_wide (type,
10919 lo3, hi3)),
10920 arg1);
10923 /* (X & Y) | Y is (X, Y). */
10924 if (TREE_CODE (arg0) == BIT_AND_EXPR
10925 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10926 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10927 /* (X & Y) | X is (Y, X). */
10928 if (TREE_CODE (arg0) == BIT_AND_EXPR
10929 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10930 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10931 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10932 /* X | (X & Y) is (Y, X). */
10933 if (TREE_CODE (arg1) == BIT_AND_EXPR
10934 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10935 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10936 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10937 /* X | (Y & X) is (Y, X). */
10938 if (TREE_CODE (arg1) == BIT_AND_EXPR
10939 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10940 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10941 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10943 t1 = distribute_bit_expr (code, type, arg0, arg1);
10944 if (t1 != NULL_TREE)
10945 return t1;
10947 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10949 This results in more efficient code for machines without a NAND
10950 instruction. Combine will canonicalize to the first form
10951 which will allow use of NAND instructions provided by the
10952 backend if they exist. */
10953 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10954 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10956 return fold_build1 (BIT_NOT_EXPR, type,
10957 build2 (BIT_AND_EXPR, type,
10958 fold_convert (type,
10959 TREE_OPERAND (arg0, 0)),
10960 fold_convert (type,
10961 TREE_OPERAND (arg1, 0))));
10964 /* See if this can be simplified into a rotate first. If that
10965 is unsuccessful continue in the association code. */
10966 goto bit_rotate;
10968 case BIT_XOR_EXPR:
10969 if (integer_zerop (arg1))
10970 return non_lvalue (fold_convert (type, arg0));
10971 if (integer_all_onesp (arg1))
10972 return fold_build1 (BIT_NOT_EXPR, type, op0);
10973 if (operand_equal_p (arg0, arg1, 0))
10974 return omit_one_operand (type, integer_zero_node, arg0);
10976 /* ~X ^ X is -1. */
10977 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10978 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10980 t1 = fold_convert (type, integer_zero_node);
10981 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10982 return omit_one_operand (type, t1, arg1);
10985 /* X ^ ~X is -1. */
10986 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10987 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10989 t1 = fold_convert (type, integer_zero_node);
10990 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10991 return omit_one_operand (type, t1, arg0);
10994 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10995 with a constant, and the two constants have no bits in common,
10996 we should treat this as a BIT_IOR_EXPR since this may produce more
10997 simplifications. */
10998 if (TREE_CODE (arg0) == BIT_AND_EXPR
10999 && TREE_CODE (arg1) == BIT_AND_EXPR
11000 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11001 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11002 && integer_zerop (const_binop (BIT_AND_EXPR,
11003 TREE_OPERAND (arg0, 1),
11004 TREE_OPERAND (arg1, 1), 0)))
11006 code = BIT_IOR_EXPR;
11007 goto bit_ior;
11010 /* (X | Y) ^ X -> Y & ~ X*/
11011 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11012 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11014 tree t2 = TREE_OPERAND (arg0, 1);
11015 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11016 arg1);
11017 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11018 fold_convert (type, t1));
11019 return t1;
11022 /* (Y | X) ^ X -> Y & ~ X*/
11023 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11024 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11026 tree t2 = TREE_OPERAND (arg0, 0);
11027 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11028 arg1);
11029 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11030 fold_convert (type, t1));
11031 return t1;
11034 /* X ^ (X | Y) -> Y & ~ X*/
11035 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11036 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11038 tree t2 = TREE_OPERAND (arg1, 1);
11039 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11040 arg0);
11041 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11042 fold_convert (type, t1));
11043 return t1;
11046 /* X ^ (Y | X) -> Y & ~ X*/
11047 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11048 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11050 tree t2 = TREE_OPERAND (arg1, 0);
11051 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11052 arg0);
11053 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11054 fold_convert (type, t1));
11055 return t1;
11058 /* Convert ~X ^ ~Y to X ^ Y. */
11059 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11060 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11061 return fold_build2 (code, type,
11062 fold_convert (type, TREE_OPERAND (arg0, 0)),
11063 fold_convert (type, TREE_OPERAND (arg1, 0)));
11065 /* Convert ~X ^ C to X ^ ~C. */
11066 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11067 && TREE_CODE (arg1) == INTEGER_CST)
11068 return fold_build2 (code, type,
11069 fold_convert (type, TREE_OPERAND (arg0, 0)),
11070 fold_build1 (BIT_NOT_EXPR, type, arg1));
11072 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11073 if (TREE_CODE (arg0) == BIT_AND_EXPR
11074 && integer_onep (TREE_OPERAND (arg0, 1))
11075 && integer_onep (arg1))
11076 return fold_build2 (EQ_EXPR, type, arg0,
11077 build_int_cst (TREE_TYPE (arg0), 0));
11079 /* Fold (X & Y) ^ Y as ~X & Y. */
11080 if (TREE_CODE (arg0) == BIT_AND_EXPR
11081 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11083 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11084 return fold_build2 (BIT_AND_EXPR, type,
11085 fold_build1 (BIT_NOT_EXPR, type, tem),
11086 fold_convert (type, arg1));
11088 /* Fold (X & Y) ^ X as ~Y & X. */
11089 if (TREE_CODE (arg0) == BIT_AND_EXPR
11090 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11091 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11093 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11094 return fold_build2 (BIT_AND_EXPR, type,
11095 fold_build1 (BIT_NOT_EXPR, type, tem),
11096 fold_convert (type, arg1));
11098 /* Fold X ^ (X & Y) as X & ~Y. */
11099 if (TREE_CODE (arg1) == BIT_AND_EXPR
11100 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11102 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11103 return fold_build2 (BIT_AND_EXPR, type,
11104 fold_convert (type, arg0),
11105 fold_build1 (BIT_NOT_EXPR, type, tem));
11107 /* Fold X ^ (Y & X) as ~Y & X. */
11108 if (TREE_CODE (arg1) == BIT_AND_EXPR
11109 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11110 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11112 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11113 return fold_build2 (BIT_AND_EXPR, type,
11114 fold_build1 (BIT_NOT_EXPR, type, tem),
11115 fold_convert (type, arg0));
11118 /* See if this can be simplified into a rotate first. If that
11119 is unsuccessful continue in the association code. */
11120 goto bit_rotate;
11122 case BIT_AND_EXPR:
11123 if (integer_all_onesp (arg1))
11124 return non_lvalue (fold_convert (type, arg0));
11125 if (integer_zerop (arg1))
11126 return omit_one_operand (type, arg1, arg0);
11127 if (operand_equal_p (arg0, arg1, 0))
11128 return non_lvalue (fold_convert (type, arg0));
11130 /* ~X & X is always zero. */
11131 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11132 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11133 return omit_one_operand (type, integer_zero_node, arg1);
11135 /* X & ~X is always zero. */
11136 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11137 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11138 return omit_one_operand (type, integer_zero_node, arg0);
11140 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11141 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11142 && TREE_CODE (arg1) == INTEGER_CST
11143 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11145 tree tmp1 = fold_convert (type, arg1);
11146 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11147 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11148 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11149 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11150 return fold_convert (type,
11151 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11154 /* (X | Y) & Y is (X, Y). */
11155 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11156 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11157 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11158 /* (X | Y) & X is (Y, X). */
11159 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11160 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11161 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11162 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11163 /* X & (X | Y) is (Y, X). */
11164 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11165 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11166 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11167 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11168 /* X & (Y | X) is (Y, X). */
11169 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11170 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11171 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11172 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11174 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11175 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11176 && integer_onep (TREE_OPERAND (arg0, 1))
11177 && integer_onep (arg1))
11179 tem = TREE_OPERAND (arg0, 0);
11180 return fold_build2 (EQ_EXPR, type,
11181 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11182 build_int_cst (TREE_TYPE (tem), 1)),
11183 build_int_cst (TREE_TYPE (tem), 0));
11185 /* Fold ~X & 1 as (X & 1) == 0. */
11186 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11187 && integer_onep (arg1))
11189 tem = TREE_OPERAND (arg0, 0);
11190 return fold_build2 (EQ_EXPR, type,
11191 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11192 build_int_cst (TREE_TYPE (tem), 1)),
11193 build_int_cst (TREE_TYPE (tem), 0));
11196 /* Fold (X ^ Y) & Y as ~X & Y. */
11197 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11198 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11200 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11201 return fold_build2 (BIT_AND_EXPR, type,
11202 fold_build1 (BIT_NOT_EXPR, type, tem),
11203 fold_convert (type, arg1));
11205 /* Fold (X ^ Y) & X as ~Y & X. */
11206 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11207 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11208 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11210 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11211 return fold_build2 (BIT_AND_EXPR, type,
11212 fold_build1 (BIT_NOT_EXPR, type, tem),
11213 fold_convert (type, arg1));
11215 /* Fold X & (X ^ Y) as X & ~Y. */
11216 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11217 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11219 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11220 return fold_build2 (BIT_AND_EXPR, type,
11221 fold_convert (type, arg0),
11222 fold_build1 (BIT_NOT_EXPR, type, tem));
11224 /* Fold X & (Y ^ X) as ~Y & X. */
11225 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11226 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11227 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11229 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11230 return fold_build2 (BIT_AND_EXPR, type,
11231 fold_build1 (BIT_NOT_EXPR, type, tem),
11232 fold_convert (type, arg0));
11235 t1 = distribute_bit_expr (code, type, arg0, arg1);
11236 if (t1 != NULL_TREE)
11237 return t1;
11238 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11239 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11240 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11242 unsigned int prec
11243 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11245 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11246 && (~TREE_INT_CST_LOW (arg1)
11247 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11248 return fold_convert (type, TREE_OPERAND (arg0, 0));
11251 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11253 This results in more efficient code for machines without a NOR
11254 instruction. Combine will canonicalize to the first form
11255 which will allow use of NOR instructions provided by the
11256 backend if they exist. */
11257 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11258 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11260 return fold_build1 (BIT_NOT_EXPR, type,
11261 build2 (BIT_IOR_EXPR, type,
11262 fold_convert (type,
11263 TREE_OPERAND (arg0, 0)),
11264 fold_convert (type,
11265 TREE_OPERAND (arg1, 0))));
11268 /* If arg0 is derived from the address of an object or function, we may
11269 be able to fold this expression using the object or function's
11270 alignment. */
11271 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11273 unsigned HOST_WIDE_INT modulus, residue;
11274 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11276 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11278 /* This works because modulus is a power of 2. If this weren't the
11279 case, we'd have to replace it by its greatest power-of-2
11280 divisor: modulus & -modulus. */
11281 if (low < modulus)
11282 return build_int_cst (type, residue & low);
11285 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11286 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11287 if the new mask might be further optimized. */
11288 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11289 || TREE_CODE (arg0) == RSHIFT_EXPR)
11290 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11291 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11292 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11293 < TYPE_PRECISION (TREE_TYPE (arg0))
11294 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11295 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11297 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11298 unsigned HOST_WIDE_INT mask
11299 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11300 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11301 tree shift_type = TREE_TYPE (arg0);
11303 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11304 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11305 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11306 && TYPE_PRECISION (TREE_TYPE (arg0))
11307 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11309 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11310 tree arg00 = TREE_OPERAND (arg0, 0);
11311 /* See if more bits can be proven as zero because of
11312 zero extension. */
11313 if (TREE_CODE (arg00) == NOP_EXPR
11314 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11316 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11317 if (TYPE_PRECISION (inner_type)
11318 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11319 && TYPE_PRECISION (inner_type) < prec)
11321 prec = TYPE_PRECISION (inner_type);
11322 /* See if we can shorten the right shift. */
11323 if (shiftc < prec)
11324 shift_type = inner_type;
11327 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11328 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11329 zerobits <<= prec - shiftc;
11330 /* For arithmetic shift if sign bit could be set, zerobits
11331 can contain actually sign bits, so no transformation is
11332 possible, unless MASK masks them all away. In that
11333 case the shift needs to be converted into logical shift. */
11334 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11335 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11337 if ((mask & zerobits) == 0)
11338 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11339 else
11340 zerobits = 0;
11344 /* ((X << 16) & 0xff00) is (X, 0). */
11345 if ((mask & zerobits) == mask)
11346 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11348 newmask = mask | zerobits;
11349 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11351 unsigned int prec;
11353 /* Only do the transformation if NEWMASK is some integer
11354 mode's mask. */
11355 for (prec = BITS_PER_UNIT;
11356 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11357 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11358 break;
11359 if (prec < HOST_BITS_PER_WIDE_INT
11360 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11362 if (shift_type != TREE_TYPE (arg0))
11364 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11365 fold_convert (shift_type,
11366 TREE_OPERAND (arg0, 0)),
11367 TREE_OPERAND (arg0, 1));
11368 tem = fold_convert (type, tem);
11370 else
11371 tem = op0;
11372 return fold_build2 (BIT_AND_EXPR, type, tem,
11373 build_int_cst_type (TREE_TYPE (op1),
11374 newmask));
11379 goto associate;
11381 case RDIV_EXPR:
11382 /* Don't touch a floating-point divide by zero unless the mode
11383 of the constant can represent infinity. */
11384 if (TREE_CODE (arg1) == REAL_CST
11385 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11386 && real_zerop (arg1))
11387 return NULL_TREE;
11389 /* Optimize A / A to 1.0 if we don't care about
11390 NaNs or Infinities. Skip the transformation
11391 for non-real operands. */
11392 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11393 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11394 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11395 && operand_equal_p (arg0, arg1, 0))
11397 tree r = build_real (TREE_TYPE (arg0), dconst1);
11399 return omit_two_operands (type, r, arg0, arg1);
11402 /* The complex version of the above A / A optimization. */
11403 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11404 && operand_equal_p (arg0, arg1, 0))
11406 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11407 if (! HONOR_NANS (TYPE_MODE (elem_type))
11408 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11410 tree r = build_real (elem_type, dconst1);
11411 /* omit_two_operands will call fold_convert for us. */
11412 return omit_two_operands (type, r, arg0, arg1);
11416 /* (-A) / (-B) -> A / B */
11417 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11418 return fold_build2 (RDIV_EXPR, type,
11419 TREE_OPERAND (arg0, 0),
11420 negate_expr (arg1));
11421 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11422 return fold_build2 (RDIV_EXPR, type,
11423 negate_expr (arg0),
11424 TREE_OPERAND (arg1, 0));
11426 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11427 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11428 && real_onep (arg1))
11429 return non_lvalue (fold_convert (type, arg0));
11431 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11432 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11433 && real_minus_onep (arg1))
11434 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11436 /* If ARG1 is a constant, we can convert this to a multiply by the
11437 reciprocal. This does not have the same rounding properties,
11438 so only do this if -freciprocal-math. We can actually
11439 always safely do it if ARG1 is a power of two, but it's hard to
11440 tell if it is or not in a portable manner. */
11441 if (TREE_CODE (arg1) == REAL_CST)
11443 if (flag_reciprocal_math
11444 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11445 arg1, 0)))
11446 return fold_build2 (MULT_EXPR, type, arg0, tem);
11447 /* Find the reciprocal if optimizing and the result is exact. */
11448 if (optimize)
11450 REAL_VALUE_TYPE r;
11451 r = TREE_REAL_CST (arg1);
11452 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11454 tem = build_real (type, r);
11455 return fold_build2 (MULT_EXPR, type,
11456 fold_convert (type, arg0), tem);
11460 /* Convert A/B/C to A/(B*C). */
11461 if (flag_reciprocal_math
11462 && TREE_CODE (arg0) == RDIV_EXPR)
11463 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11464 fold_build2 (MULT_EXPR, type,
11465 TREE_OPERAND (arg0, 1), arg1));
11467 /* Convert A/(B/C) to (A/B)*C. */
11468 if (flag_reciprocal_math
11469 && TREE_CODE (arg1) == RDIV_EXPR)
11470 return fold_build2 (MULT_EXPR, type,
11471 fold_build2 (RDIV_EXPR, type, arg0,
11472 TREE_OPERAND (arg1, 0)),
11473 TREE_OPERAND (arg1, 1));
11475 /* Convert C1/(X*C2) into (C1/C2)/X. */
11476 if (flag_reciprocal_math
11477 && TREE_CODE (arg1) == MULT_EXPR
11478 && TREE_CODE (arg0) == REAL_CST
11479 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11481 tree tem = const_binop (RDIV_EXPR, arg0,
11482 TREE_OPERAND (arg1, 1), 0);
11483 if (tem)
11484 return fold_build2 (RDIV_EXPR, type, tem,
11485 TREE_OPERAND (arg1, 0));
11488 if (flag_unsafe_math_optimizations)
11490 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11491 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11493 /* Optimize sin(x)/cos(x) as tan(x). */
11494 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11495 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11496 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11497 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11498 CALL_EXPR_ARG (arg1, 0), 0))
11500 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11502 if (tanfn != NULL_TREE)
11503 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11506 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11507 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11508 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11509 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11510 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11511 CALL_EXPR_ARG (arg1, 0), 0))
11513 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11515 if (tanfn != NULL_TREE)
11517 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11518 return fold_build2 (RDIV_EXPR, type,
11519 build_real (type, dconst1), tmp);
11523 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11524 NaNs or Infinities. */
11525 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11526 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11527 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11529 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11530 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11532 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11533 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11534 && operand_equal_p (arg00, arg01, 0))
11536 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11538 if (cosfn != NULL_TREE)
11539 return build_call_expr (cosfn, 1, arg00);
11543 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11544 NaNs or Infinities. */
11545 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11546 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11547 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11549 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11550 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11552 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11553 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11554 && operand_equal_p (arg00, arg01, 0))
11556 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11558 if (cosfn != NULL_TREE)
11560 tree tmp = build_call_expr (cosfn, 1, arg00);
11561 return fold_build2 (RDIV_EXPR, type,
11562 build_real (type, dconst1),
11563 tmp);
11568 /* Optimize pow(x,c)/x as pow(x,c-1). */
11569 if (fcode0 == BUILT_IN_POW
11570 || fcode0 == BUILT_IN_POWF
11571 || fcode0 == BUILT_IN_POWL)
11573 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11574 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11575 if (TREE_CODE (arg01) == REAL_CST
11576 && !TREE_OVERFLOW (arg01)
11577 && operand_equal_p (arg1, arg00, 0))
11579 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11580 REAL_VALUE_TYPE c;
11581 tree arg;
11583 c = TREE_REAL_CST (arg01);
11584 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11585 arg = build_real (type, c);
11586 return build_call_expr (powfn, 2, arg1, arg);
11590 /* Optimize a/root(b/c) into a*root(c/b). */
11591 if (BUILTIN_ROOT_P (fcode1))
11593 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11595 if (TREE_CODE (rootarg) == RDIV_EXPR)
11597 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11598 tree b = TREE_OPERAND (rootarg, 0);
11599 tree c = TREE_OPERAND (rootarg, 1);
11601 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11603 tmp = build_call_expr (rootfn, 1, tmp);
11604 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11608 /* Optimize x/expN(y) into x*expN(-y). */
11609 if (BUILTIN_EXPONENT_P (fcode1))
11611 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11612 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11613 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11614 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11617 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11618 if (fcode1 == BUILT_IN_POW
11619 || fcode1 == BUILT_IN_POWF
11620 || fcode1 == BUILT_IN_POWL)
11622 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11623 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11624 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11625 tree neg11 = fold_convert (type, negate_expr (arg11));
11626 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11627 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11630 return NULL_TREE;
11632 case TRUNC_DIV_EXPR:
11633 case FLOOR_DIV_EXPR:
11634 /* Simplify A / (B << N) where A and B are positive and B is
11635 a power of 2, to A >> (N + log2(B)). */
11636 strict_overflow_p = false;
11637 if (TREE_CODE (arg1) == LSHIFT_EXPR
11638 && (TYPE_UNSIGNED (type)
11639 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11641 tree sval = TREE_OPERAND (arg1, 0);
11642 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11644 tree sh_cnt = TREE_OPERAND (arg1, 1);
11645 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11647 if (strict_overflow_p)
11648 fold_overflow_warning (("assuming signed overflow does not "
11649 "occur when simplifying A / (B << N)"),
11650 WARN_STRICT_OVERFLOW_MISC);
11652 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11653 sh_cnt, build_int_cst (NULL_TREE, pow2));
11654 return fold_build2 (RSHIFT_EXPR, type,
11655 fold_convert (type, arg0), sh_cnt);
11659 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11660 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11661 if (INTEGRAL_TYPE_P (type)
11662 && TYPE_UNSIGNED (type)
11663 && code == FLOOR_DIV_EXPR)
11664 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11666 /* Fall thru */
11668 case ROUND_DIV_EXPR:
11669 case CEIL_DIV_EXPR:
11670 case EXACT_DIV_EXPR:
11671 if (integer_onep (arg1))
11672 return non_lvalue (fold_convert (type, arg0));
11673 if (integer_zerop (arg1))
11674 return NULL_TREE;
11675 /* X / -1 is -X. */
11676 if (!TYPE_UNSIGNED (type)
11677 && TREE_CODE (arg1) == INTEGER_CST
11678 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11679 && TREE_INT_CST_HIGH (arg1) == -1)
11680 return fold_convert (type, negate_expr (arg0));
11682 /* Convert -A / -B to A / B when the type is signed and overflow is
11683 undefined. */
11684 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11685 && TREE_CODE (arg0) == NEGATE_EXPR
11686 && negate_expr_p (arg1))
11688 if (INTEGRAL_TYPE_P (type))
11689 fold_overflow_warning (("assuming signed overflow does not occur "
11690 "when distributing negation across "
11691 "division"),
11692 WARN_STRICT_OVERFLOW_MISC);
11693 return fold_build2 (code, type,
11694 fold_convert (type, TREE_OPERAND (arg0, 0)),
11695 fold_convert (type, negate_expr (arg1)));
11697 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11698 && TREE_CODE (arg1) == NEGATE_EXPR
11699 && negate_expr_p (arg0))
11701 if (INTEGRAL_TYPE_P (type))
11702 fold_overflow_warning (("assuming signed overflow does not occur "
11703 "when distributing negation across "
11704 "division"),
11705 WARN_STRICT_OVERFLOW_MISC);
11706 return fold_build2 (code, type,
11707 fold_convert (type, negate_expr (arg0)),
11708 fold_convert (type, TREE_OPERAND (arg1, 0)));
11711 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11712 operation, EXACT_DIV_EXPR.
11714 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11715 At one time others generated faster code, it's not clear if they do
11716 after the last round to changes to the DIV code in expmed.c. */
11717 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11718 && multiple_of_p (type, arg0, arg1))
11719 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11721 strict_overflow_p = false;
11722 if (TREE_CODE (arg1) == INTEGER_CST
11723 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11724 &strict_overflow_p)))
11726 if (strict_overflow_p)
11727 fold_overflow_warning (("assuming signed overflow does not occur "
11728 "when simplifying division"),
11729 WARN_STRICT_OVERFLOW_MISC);
11730 return fold_convert (type, tem);
11733 return NULL_TREE;
11735 case CEIL_MOD_EXPR:
11736 case FLOOR_MOD_EXPR:
11737 case ROUND_MOD_EXPR:
11738 case TRUNC_MOD_EXPR:
11739 /* X % 1 is always zero, but be sure to preserve any side
11740 effects in X. */
11741 if (integer_onep (arg1))
11742 return omit_one_operand (type, integer_zero_node, arg0);
11744 /* X % 0, return X % 0 unchanged so that we can get the
11745 proper warnings and errors. */
11746 if (integer_zerop (arg1))
11747 return NULL_TREE;
11749 /* 0 % X is always zero, but be sure to preserve any side
11750 effects in X. Place this after checking for X == 0. */
11751 if (integer_zerop (arg0))
11752 return omit_one_operand (type, integer_zero_node, arg1);
11754 /* X % -1 is zero. */
11755 if (!TYPE_UNSIGNED (type)
11756 && TREE_CODE (arg1) == INTEGER_CST
11757 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11758 && TREE_INT_CST_HIGH (arg1) == -1)
11759 return omit_one_operand (type, integer_zero_node, arg0);
11761 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11762 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11763 strict_overflow_p = false;
11764 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11765 && (TYPE_UNSIGNED (type)
11766 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11768 tree c = arg1;
11769 /* Also optimize A % (C << N) where C is a power of 2,
11770 to A & ((C << N) - 1). */
11771 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11772 c = TREE_OPERAND (arg1, 0);
11774 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11776 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11777 build_int_cst (TREE_TYPE (arg1), 1));
11778 if (strict_overflow_p)
11779 fold_overflow_warning (("assuming signed overflow does not "
11780 "occur when simplifying "
11781 "X % (power of two)"),
11782 WARN_STRICT_OVERFLOW_MISC);
11783 return fold_build2 (BIT_AND_EXPR, type,
11784 fold_convert (type, arg0),
11785 fold_convert (type, mask));
11789 /* X % -C is the same as X % C. */
11790 if (code == TRUNC_MOD_EXPR
11791 && !TYPE_UNSIGNED (type)
11792 && TREE_CODE (arg1) == INTEGER_CST
11793 && !TREE_OVERFLOW (arg1)
11794 && TREE_INT_CST_HIGH (arg1) < 0
11795 && !TYPE_OVERFLOW_TRAPS (type)
11796 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11797 && !sign_bit_p (arg1, arg1))
11798 return fold_build2 (code, type, fold_convert (type, arg0),
11799 fold_convert (type, negate_expr (arg1)));
11801 /* X % -Y is the same as X % Y. */
11802 if (code == TRUNC_MOD_EXPR
11803 && !TYPE_UNSIGNED (type)
11804 && TREE_CODE (arg1) == NEGATE_EXPR
11805 && !TYPE_OVERFLOW_TRAPS (type))
11806 return fold_build2 (code, type, fold_convert (type, arg0),
11807 fold_convert (type, TREE_OPERAND (arg1, 0)));
11809 if (TREE_CODE (arg1) == INTEGER_CST
11810 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11811 &strict_overflow_p)))
11813 if (strict_overflow_p)
11814 fold_overflow_warning (("assuming signed overflow does not occur "
11815 "when simplifying modulus"),
11816 WARN_STRICT_OVERFLOW_MISC);
11817 return fold_convert (type, tem);
11820 return NULL_TREE;
11822 case LROTATE_EXPR:
11823 case RROTATE_EXPR:
11824 if (integer_all_onesp (arg0))
11825 return omit_one_operand (type, arg0, arg1);
11826 goto shift;
11828 case RSHIFT_EXPR:
11829 /* Optimize -1 >> x for arithmetic right shifts. */
11830 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11831 && tree_expr_nonnegative_p (arg1))
11832 return omit_one_operand (type, arg0, arg1);
11833 /* ... fall through ... */
11835 case LSHIFT_EXPR:
11836 shift:
11837 if (integer_zerop (arg1))
11838 return non_lvalue (fold_convert (type, arg0));
11839 if (integer_zerop (arg0))
11840 return omit_one_operand (type, arg0, arg1);
11842 /* Since negative shift count is not well-defined,
11843 don't try to compute it in the compiler. */
11844 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11845 return NULL_TREE;
11847 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11848 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11849 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11850 && host_integerp (TREE_OPERAND (arg0, 1), false)
11851 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11853 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11854 + TREE_INT_CST_LOW (arg1));
11856 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11857 being well defined. */
11858 if (low >= TYPE_PRECISION (type))
11860 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11861 low = low % TYPE_PRECISION (type);
11862 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11863 return build_int_cst (type, 0);
11864 else
11865 low = TYPE_PRECISION (type) - 1;
11868 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11869 build_int_cst (type, low));
11872 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11873 into x & ((unsigned)-1 >> c) for unsigned types. */
11874 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11875 || (TYPE_UNSIGNED (type)
11876 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11877 && host_integerp (arg1, false)
11878 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11879 && host_integerp (TREE_OPERAND (arg0, 1), false)
11880 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11882 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11883 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11884 tree lshift;
11885 tree arg00;
11887 if (low0 == low1)
11889 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11891 lshift = build_int_cst (type, -1);
11892 lshift = int_const_binop (code, lshift, arg1, 0);
11894 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11898 /* Rewrite an LROTATE_EXPR by a constant into an
11899 RROTATE_EXPR by a new constant. */
11900 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11902 tree tem = build_int_cst (TREE_TYPE (arg1),
11903 TYPE_PRECISION (type));
11904 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11905 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11908 /* If we have a rotate of a bit operation with the rotate count and
11909 the second operand of the bit operation both constant,
11910 permute the two operations. */
11911 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11912 && (TREE_CODE (arg0) == BIT_AND_EXPR
11913 || TREE_CODE (arg0) == BIT_IOR_EXPR
11914 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11915 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11916 return fold_build2 (TREE_CODE (arg0), type,
11917 fold_build2 (code, type,
11918 TREE_OPERAND (arg0, 0), arg1),
11919 fold_build2 (code, type,
11920 TREE_OPERAND (arg0, 1), arg1));
11922 /* Two consecutive rotates adding up to the precision of the
11923 type can be ignored. */
11924 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11925 && TREE_CODE (arg0) == RROTATE_EXPR
11926 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11927 && TREE_INT_CST_HIGH (arg1) == 0
11928 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11929 && ((TREE_INT_CST_LOW (arg1)
11930 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11931 == (unsigned int) TYPE_PRECISION (type)))
11932 return TREE_OPERAND (arg0, 0);
11934 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11935 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11936 if the latter can be further optimized. */
11937 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11938 && TREE_CODE (arg0) == BIT_AND_EXPR
11939 && TREE_CODE (arg1) == INTEGER_CST
11940 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11942 tree mask = fold_build2 (code, type,
11943 fold_convert (type, TREE_OPERAND (arg0, 1)),
11944 arg1);
11945 tree shift = fold_build2 (code, type,
11946 fold_convert (type, TREE_OPERAND (arg0, 0)),
11947 arg1);
11948 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11949 if (tem)
11950 return tem;
11953 return NULL_TREE;
11955 case MIN_EXPR:
11956 if (operand_equal_p (arg0, arg1, 0))
11957 return omit_one_operand (type, arg0, arg1);
11958 if (INTEGRAL_TYPE_P (type)
11959 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11960 return omit_one_operand (type, arg1, arg0);
11961 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11962 if (tem)
11963 return tem;
11964 goto associate;
11966 case MAX_EXPR:
11967 if (operand_equal_p (arg0, arg1, 0))
11968 return omit_one_operand (type, arg0, arg1);
11969 if (INTEGRAL_TYPE_P (type)
11970 && TYPE_MAX_VALUE (type)
11971 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11972 return omit_one_operand (type, arg1, arg0);
11973 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11974 if (tem)
11975 return tem;
11976 goto associate;
11978 case TRUTH_ANDIF_EXPR:
11979 /* Note that the operands of this must be ints
11980 and their values must be 0 or 1.
11981 ("true" is a fixed value perhaps depending on the language.) */
11982 /* If first arg is constant zero, return it. */
11983 if (integer_zerop (arg0))
11984 return fold_convert (type, arg0);
11985 case TRUTH_AND_EXPR:
11986 /* If either arg is constant true, drop it. */
11987 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11988 return non_lvalue (fold_convert (type, arg1));
11989 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11990 /* Preserve sequence points. */
11991 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11992 return non_lvalue (fold_convert (type, arg0));
11993 /* If second arg is constant zero, result is zero, but first arg
11994 must be evaluated. */
11995 if (integer_zerop (arg1))
11996 return omit_one_operand (type, arg1, arg0);
11997 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11998 case will be handled here. */
11999 if (integer_zerop (arg0))
12000 return omit_one_operand (type, arg0, arg1);
12002 /* !X && X is always false. */
12003 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12004 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12005 return omit_one_operand (type, integer_zero_node, arg1);
12006 /* X && !X is always false. */
12007 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12008 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12009 return omit_one_operand (type, integer_zero_node, arg0);
12011 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12012 means A >= Y && A != MAX, but in this case we know that
12013 A < X <= MAX. */
12015 if (!TREE_SIDE_EFFECTS (arg0)
12016 && !TREE_SIDE_EFFECTS (arg1))
12018 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12019 if (tem && !operand_equal_p (tem, arg0, 0))
12020 return fold_build2 (code, type, tem, arg1);
12022 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12023 if (tem && !operand_equal_p (tem, arg1, 0))
12024 return fold_build2 (code, type, arg0, tem);
12027 truth_andor:
12028 /* We only do these simplifications if we are optimizing. */
12029 if (!optimize)
12030 return NULL_TREE;
12032 /* Check for things like (A || B) && (A || C). We can convert this
12033 to A || (B && C). Note that either operator can be any of the four
12034 truth and/or operations and the transformation will still be
12035 valid. Also note that we only care about order for the
12036 ANDIF and ORIF operators. If B contains side effects, this
12037 might change the truth-value of A. */
12038 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12039 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12040 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12041 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12042 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12043 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12045 tree a00 = TREE_OPERAND (arg0, 0);
12046 tree a01 = TREE_OPERAND (arg0, 1);
12047 tree a10 = TREE_OPERAND (arg1, 0);
12048 tree a11 = TREE_OPERAND (arg1, 1);
12049 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12050 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12051 && (code == TRUTH_AND_EXPR
12052 || code == TRUTH_OR_EXPR));
12054 if (operand_equal_p (a00, a10, 0))
12055 return fold_build2 (TREE_CODE (arg0), type, a00,
12056 fold_build2 (code, type, a01, a11));
12057 else if (commutative && operand_equal_p (a00, a11, 0))
12058 return fold_build2 (TREE_CODE (arg0), type, a00,
12059 fold_build2 (code, type, a01, a10));
12060 else if (commutative && operand_equal_p (a01, a10, 0))
12061 return fold_build2 (TREE_CODE (arg0), type, a01,
12062 fold_build2 (code, type, a00, a11));
12064 /* This case if tricky because we must either have commutative
12065 operators or else A10 must not have side-effects. */
12067 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12068 && operand_equal_p (a01, a11, 0))
12069 return fold_build2 (TREE_CODE (arg0), type,
12070 fold_build2 (code, type, a00, a10),
12071 a01);
12074 /* See if we can build a range comparison. */
12075 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12076 return tem;
12078 /* Check for the possibility of merging component references. If our
12079 lhs is another similar operation, try to merge its rhs with our
12080 rhs. Then try to merge our lhs and rhs. */
12081 if (TREE_CODE (arg0) == code
12082 && 0 != (tem = fold_truthop (code, type,
12083 TREE_OPERAND (arg0, 1), arg1)))
12084 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12086 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12087 return tem;
12089 return NULL_TREE;
12091 case TRUTH_ORIF_EXPR:
12092 /* Note that the operands of this must be ints
12093 and their values must be 0 or true.
12094 ("true" is a fixed value perhaps depending on the language.) */
12095 /* If first arg is constant true, return it. */
12096 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12097 return fold_convert (type, arg0);
12098 case TRUTH_OR_EXPR:
12099 /* If either arg is constant zero, drop it. */
12100 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12101 return non_lvalue (fold_convert (type, arg1));
12102 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12103 /* Preserve sequence points. */
12104 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12105 return non_lvalue (fold_convert (type, arg0));
12106 /* If second arg is constant true, result is true, but we must
12107 evaluate first arg. */
12108 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12109 return omit_one_operand (type, arg1, arg0);
12110 /* Likewise for first arg, but note this only occurs here for
12111 TRUTH_OR_EXPR. */
12112 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12113 return omit_one_operand (type, arg0, arg1);
12115 /* !X || X is always true. */
12116 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12117 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12118 return omit_one_operand (type, integer_one_node, arg1);
12119 /* X || !X is always true. */
12120 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12121 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12122 return omit_one_operand (type, integer_one_node, arg0);
12124 goto truth_andor;
12126 case TRUTH_XOR_EXPR:
12127 /* If the second arg is constant zero, drop it. */
12128 if (integer_zerop (arg1))
12129 return non_lvalue (fold_convert (type, arg0));
12130 /* If the second arg is constant true, this is a logical inversion. */
12131 if (integer_onep (arg1))
12133 /* Only call invert_truthvalue if operand is a truth value. */
12134 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12135 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12136 else
12137 tem = invert_truthvalue (arg0);
12138 return non_lvalue (fold_convert (type, tem));
12140 /* Identical arguments cancel to zero. */
12141 if (operand_equal_p (arg0, arg1, 0))
12142 return omit_one_operand (type, integer_zero_node, arg0);
12144 /* !X ^ X is always true. */
12145 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12146 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12147 return omit_one_operand (type, integer_one_node, arg1);
12149 /* X ^ !X is always true. */
12150 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12151 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12152 return omit_one_operand (type, integer_one_node, arg0);
12154 return NULL_TREE;
12156 case EQ_EXPR:
12157 case NE_EXPR:
12158 tem = fold_comparison (code, type, op0, op1);
12159 if (tem != NULL_TREE)
12160 return tem;
12162 /* bool_var != 0 becomes bool_var. */
12163 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12164 && code == NE_EXPR)
12165 return non_lvalue (fold_convert (type, arg0));
12167 /* bool_var == 1 becomes bool_var. */
12168 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12169 && code == EQ_EXPR)
12170 return non_lvalue (fold_convert (type, arg0));
12172 /* bool_var != 1 becomes !bool_var. */
12173 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12174 && code == NE_EXPR)
12175 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12177 /* bool_var == 0 becomes !bool_var. */
12178 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12179 && code == EQ_EXPR)
12180 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12182 /* If this is an equality comparison of the address of two non-weak,
12183 unaliased symbols neither of which are extern (since we do not
12184 have access to attributes for externs), then we know the result. */
12185 if (TREE_CODE (arg0) == ADDR_EXPR
12186 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12187 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12188 && ! lookup_attribute ("alias",
12189 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12190 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12191 && TREE_CODE (arg1) == ADDR_EXPR
12192 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12193 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12194 && ! lookup_attribute ("alias",
12195 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12196 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12198 /* We know that we're looking at the address of two
12199 non-weak, unaliased, static _DECL nodes.
12201 It is both wasteful and incorrect to call operand_equal_p
12202 to compare the two ADDR_EXPR nodes. It is wasteful in that
12203 all we need to do is test pointer equality for the arguments
12204 to the two ADDR_EXPR nodes. It is incorrect to use
12205 operand_equal_p as that function is NOT equivalent to a
12206 C equality test. It can in fact return false for two
12207 objects which would test as equal using the C equality
12208 operator. */
12209 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12210 return constant_boolean_node (equal
12211 ? code == EQ_EXPR : code != EQ_EXPR,
12212 type);
12215 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12216 a MINUS_EXPR of a constant, we can convert it into a comparison with
12217 a revised constant as long as no overflow occurs. */
12218 if (TREE_CODE (arg1) == INTEGER_CST
12219 && (TREE_CODE (arg0) == PLUS_EXPR
12220 || TREE_CODE (arg0) == MINUS_EXPR)
12221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12222 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12223 ? MINUS_EXPR : PLUS_EXPR,
12224 fold_convert (TREE_TYPE (arg0), arg1),
12225 TREE_OPERAND (arg0, 1), 0))
12226 && !TREE_OVERFLOW (tem))
12227 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12229 /* Similarly for a NEGATE_EXPR. */
12230 if (TREE_CODE (arg0) == NEGATE_EXPR
12231 && TREE_CODE (arg1) == INTEGER_CST
12232 && 0 != (tem = negate_expr (arg1))
12233 && TREE_CODE (tem) == INTEGER_CST
12234 && !TREE_OVERFLOW (tem))
12235 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12237 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12238 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12239 && TREE_CODE (arg1) == INTEGER_CST
12240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12241 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12242 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12243 fold_convert (TREE_TYPE (arg0), arg1),
12244 TREE_OPERAND (arg0, 1)));
12246 /* Transform comparisons of the form X +- C CMP X. */
12247 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12248 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12249 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12250 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12251 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12253 tree cst = TREE_OPERAND (arg0, 1);
12255 if (code == EQ_EXPR
12256 && !integer_zerop (cst))
12257 return omit_two_operands (type, boolean_false_node,
12258 TREE_OPERAND (arg0, 0), arg1);
12259 else
12260 return omit_two_operands (type, boolean_true_node,
12261 TREE_OPERAND (arg0, 0), arg1);
12264 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12265 for !=. Don't do this for ordered comparisons due to overflow. */
12266 if (TREE_CODE (arg0) == MINUS_EXPR
12267 && integer_zerop (arg1))
12268 return fold_build2 (code, type,
12269 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12271 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12272 if (TREE_CODE (arg0) == ABS_EXPR
12273 && (integer_zerop (arg1) || real_zerop (arg1)))
12274 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12276 /* If this is an EQ or NE comparison with zero and ARG0 is
12277 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12278 two operations, but the latter can be done in one less insn
12279 on machines that have only two-operand insns or on which a
12280 constant cannot be the first operand. */
12281 if (TREE_CODE (arg0) == BIT_AND_EXPR
12282 && integer_zerop (arg1))
12284 tree arg00 = TREE_OPERAND (arg0, 0);
12285 tree arg01 = TREE_OPERAND (arg0, 1);
12286 if (TREE_CODE (arg00) == LSHIFT_EXPR
12287 && integer_onep (TREE_OPERAND (arg00, 0)))
12289 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12290 arg01, TREE_OPERAND (arg00, 1));
12291 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12292 build_int_cst (TREE_TYPE (arg0), 1));
12293 return fold_build2 (code, type,
12294 fold_convert (TREE_TYPE (arg1), tem), arg1);
12296 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12297 && integer_onep (TREE_OPERAND (arg01, 0)))
12299 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12300 arg00, TREE_OPERAND (arg01, 1));
12301 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12302 build_int_cst (TREE_TYPE (arg0), 1));
12303 return fold_build2 (code, type,
12304 fold_convert (TREE_TYPE (arg1), tem), arg1);
12308 /* If this is an NE or EQ comparison of zero against the result of a
12309 signed MOD operation whose second operand is a power of 2, make
12310 the MOD operation unsigned since it is simpler and equivalent. */
12311 if (integer_zerop (arg1)
12312 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12313 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12314 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12315 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12316 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12317 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12319 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12320 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12321 fold_convert (newtype,
12322 TREE_OPERAND (arg0, 0)),
12323 fold_convert (newtype,
12324 TREE_OPERAND (arg0, 1)));
12326 return fold_build2 (code, type, newmod,
12327 fold_convert (newtype, arg1));
12330 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12331 C1 is a valid shift constant, and C2 is a power of two, i.e.
12332 a single bit. */
12333 if (TREE_CODE (arg0) == BIT_AND_EXPR
12334 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12335 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12336 == INTEGER_CST
12337 && integer_pow2p (TREE_OPERAND (arg0, 1))
12338 && integer_zerop (arg1))
12340 tree itype = TREE_TYPE (arg0);
12341 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12342 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12344 /* Check for a valid shift count. */
12345 if (TREE_INT_CST_HIGH (arg001) == 0
12346 && TREE_INT_CST_LOW (arg001) < prec)
12348 tree arg01 = TREE_OPERAND (arg0, 1);
12349 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12350 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12351 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12352 can be rewritten as (X & (C2 << C1)) != 0. */
12353 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12355 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12356 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12357 return fold_build2 (code, type, tem, arg1);
12359 /* Otherwise, for signed (arithmetic) shifts,
12360 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12361 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12362 else if (!TYPE_UNSIGNED (itype))
12363 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12364 arg000, build_int_cst (itype, 0));
12365 /* Otherwise, of unsigned (logical) shifts,
12366 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12367 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12368 else
12369 return omit_one_operand (type,
12370 code == EQ_EXPR ? integer_one_node
12371 : integer_zero_node,
12372 arg000);
12376 /* If this is an NE comparison of zero with an AND of one, remove the
12377 comparison since the AND will give the correct value. */
12378 if (code == NE_EXPR
12379 && integer_zerop (arg1)
12380 && TREE_CODE (arg0) == BIT_AND_EXPR
12381 && integer_onep (TREE_OPERAND (arg0, 1)))
12382 return fold_convert (type, arg0);
12384 /* If we have (A & C) == C where C is a power of 2, convert this into
12385 (A & C) != 0. Similarly for NE_EXPR. */
12386 if (TREE_CODE (arg0) == BIT_AND_EXPR
12387 && integer_pow2p (TREE_OPERAND (arg0, 1))
12388 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12389 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12390 arg0, fold_convert (TREE_TYPE (arg0),
12391 integer_zero_node));
12393 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12394 bit, then fold the expression into A < 0 or A >= 0. */
12395 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12396 if (tem)
12397 return tem;
12399 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12400 Similarly for NE_EXPR. */
12401 if (TREE_CODE (arg0) == BIT_AND_EXPR
12402 && TREE_CODE (arg1) == INTEGER_CST
12403 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12405 tree notc = fold_build1 (BIT_NOT_EXPR,
12406 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12407 TREE_OPERAND (arg0, 1));
12408 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12409 arg1, notc);
12410 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12411 if (integer_nonzerop (dandnotc))
12412 return omit_one_operand (type, rslt, arg0);
12415 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12416 Similarly for NE_EXPR. */
12417 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12418 && TREE_CODE (arg1) == INTEGER_CST
12419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12421 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12422 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12423 TREE_OPERAND (arg0, 1), notd);
12424 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12425 if (integer_nonzerop (candnotd))
12426 return omit_one_operand (type, rslt, arg0);
12429 /* If this is a comparison of a field, we may be able to simplify it. */
12430 if ((TREE_CODE (arg0) == COMPONENT_REF
12431 || TREE_CODE (arg0) == BIT_FIELD_REF)
12432 /* Handle the constant case even without -O
12433 to make sure the warnings are given. */
12434 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12436 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12437 if (t1)
12438 return t1;
12441 /* Optimize comparisons of strlen vs zero to a compare of the
12442 first character of the string vs zero. To wit,
12443 strlen(ptr) == 0 => *ptr == 0
12444 strlen(ptr) != 0 => *ptr != 0
12445 Other cases should reduce to one of these two (or a constant)
12446 due to the return value of strlen being unsigned. */
12447 if (TREE_CODE (arg0) == CALL_EXPR
12448 && integer_zerop (arg1))
12450 tree fndecl = get_callee_fndecl (arg0);
12452 if (fndecl
12453 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12454 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12455 && call_expr_nargs (arg0) == 1
12456 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12458 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12459 return fold_build2 (code, type, iref,
12460 build_int_cst (TREE_TYPE (iref), 0));
12464 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12465 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12466 if (TREE_CODE (arg0) == RSHIFT_EXPR
12467 && integer_zerop (arg1)
12468 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12470 tree arg00 = TREE_OPERAND (arg0, 0);
12471 tree arg01 = TREE_OPERAND (arg0, 1);
12472 tree itype = TREE_TYPE (arg00);
12473 if (TREE_INT_CST_HIGH (arg01) == 0
12474 && TREE_INT_CST_LOW (arg01)
12475 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12477 if (TYPE_UNSIGNED (itype))
12479 itype = signed_type_for (itype);
12480 arg00 = fold_convert (itype, arg00);
12482 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12483 type, arg00, build_int_cst (itype, 0));
12487 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12488 if (integer_zerop (arg1)
12489 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12490 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12491 TREE_OPERAND (arg0, 1));
12493 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12494 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12495 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12496 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12497 build_int_cst (TREE_TYPE (arg1), 0));
12498 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12499 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12500 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12501 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12502 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12503 build_int_cst (TREE_TYPE (arg1), 0));
12505 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12506 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12507 && TREE_CODE (arg1) == INTEGER_CST
12508 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12509 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12510 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12511 TREE_OPERAND (arg0, 1), arg1));
12513 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12514 (X & C) == 0 when C is a single bit. */
12515 if (TREE_CODE (arg0) == BIT_AND_EXPR
12516 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12517 && integer_zerop (arg1)
12518 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12520 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12521 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12522 TREE_OPERAND (arg0, 1));
12523 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12524 type, tem, arg1);
12527 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12528 constant C is a power of two, i.e. a single bit. */
12529 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12530 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12531 && integer_zerop (arg1)
12532 && integer_pow2p (TREE_OPERAND (arg0, 1))
12533 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12534 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12536 tree arg00 = TREE_OPERAND (arg0, 0);
12537 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12538 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12541 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12542 when is C is a power of two, i.e. a single bit. */
12543 if (TREE_CODE (arg0) == BIT_AND_EXPR
12544 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12545 && integer_zerop (arg1)
12546 && integer_pow2p (TREE_OPERAND (arg0, 1))
12547 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12548 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12550 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12551 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12552 arg000, TREE_OPERAND (arg0, 1));
12553 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12554 tem, build_int_cst (TREE_TYPE (tem), 0));
12557 if (integer_zerop (arg1)
12558 && tree_expr_nonzero_p (arg0))
12560 tree res = constant_boolean_node (code==NE_EXPR, type);
12561 return omit_one_operand (type, res, arg0);
12564 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12565 if (TREE_CODE (arg0) == NEGATE_EXPR
12566 && TREE_CODE (arg1) == NEGATE_EXPR)
12567 return fold_build2 (code, type,
12568 TREE_OPERAND (arg0, 0),
12569 TREE_OPERAND (arg1, 0));
12571 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12572 if (TREE_CODE (arg0) == BIT_AND_EXPR
12573 && TREE_CODE (arg1) == BIT_AND_EXPR)
12575 tree arg00 = TREE_OPERAND (arg0, 0);
12576 tree arg01 = TREE_OPERAND (arg0, 1);
12577 tree arg10 = TREE_OPERAND (arg1, 0);
12578 tree arg11 = TREE_OPERAND (arg1, 1);
12579 tree itype = TREE_TYPE (arg0);
12581 if (operand_equal_p (arg01, arg11, 0))
12582 return fold_build2 (code, type,
12583 fold_build2 (BIT_AND_EXPR, itype,
12584 fold_build2 (BIT_XOR_EXPR, itype,
12585 arg00, arg10),
12586 arg01),
12587 build_int_cst (itype, 0));
12589 if (operand_equal_p (arg01, arg10, 0))
12590 return fold_build2 (code, type,
12591 fold_build2 (BIT_AND_EXPR, itype,
12592 fold_build2 (BIT_XOR_EXPR, itype,
12593 arg00, arg11),
12594 arg01),
12595 build_int_cst (itype, 0));
12597 if (operand_equal_p (arg00, arg11, 0))
12598 return fold_build2 (code, type,
12599 fold_build2 (BIT_AND_EXPR, itype,
12600 fold_build2 (BIT_XOR_EXPR, itype,
12601 arg01, arg10),
12602 arg00),
12603 build_int_cst (itype, 0));
12605 if (operand_equal_p (arg00, arg10, 0))
12606 return fold_build2 (code, type,
12607 fold_build2 (BIT_AND_EXPR, itype,
12608 fold_build2 (BIT_XOR_EXPR, itype,
12609 arg01, arg11),
12610 arg00),
12611 build_int_cst (itype, 0));
12614 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12615 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12617 tree arg00 = TREE_OPERAND (arg0, 0);
12618 tree arg01 = TREE_OPERAND (arg0, 1);
12619 tree arg10 = TREE_OPERAND (arg1, 0);
12620 tree arg11 = TREE_OPERAND (arg1, 1);
12621 tree itype = TREE_TYPE (arg0);
12623 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12624 operand_equal_p guarantees no side-effects so we don't need
12625 to use omit_one_operand on Z. */
12626 if (operand_equal_p (arg01, arg11, 0))
12627 return fold_build2 (code, type, arg00, arg10);
12628 if (operand_equal_p (arg01, arg10, 0))
12629 return fold_build2 (code, type, arg00, arg11);
12630 if (operand_equal_p (arg00, arg11, 0))
12631 return fold_build2 (code, type, arg01, arg10);
12632 if (operand_equal_p (arg00, arg10, 0))
12633 return fold_build2 (code, type, arg01, arg11);
12635 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12636 if (TREE_CODE (arg01) == INTEGER_CST
12637 && TREE_CODE (arg11) == INTEGER_CST)
12638 return fold_build2 (code, type,
12639 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12640 fold_build2 (BIT_XOR_EXPR, itype,
12641 arg01, arg11)),
12642 arg10);
12645 /* Attempt to simplify equality/inequality comparisons of complex
12646 values. Only lower the comparison if the result is known or
12647 can be simplified to a single scalar comparison. */
12648 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12649 || TREE_CODE (arg0) == COMPLEX_CST)
12650 && (TREE_CODE (arg1) == COMPLEX_EXPR
12651 || TREE_CODE (arg1) == COMPLEX_CST))
12653 tree real0, imag0, real1, imag1;
12654 tree rcond, icond;
12656 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12658 real0 = TREE_OPERAND (arg0, 0);
12659 imag0 = TREE_OPERAND (arg0, 1);
12661 else
12663 real0 = TREE_REALPART (arg0);
12664 imag0 = TREE_IMAGPART (arg0);
12667 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12669 real1 = TREE_OPERAND (arg1, 0);
12670 imag1 = TREE_OPERAND (arg1, 1);
12672 else
12674 real1 = TREE_REALPART (arg1);
12675 imag1 = TREE_IMAGPART (arg1);
12678 rcond = fold_binary (code, type, real0, real1);
12679 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12681 if (integer_zerop (rcond))
12683 if (code == EQ_EXPR)
12684 return omit_two_operands (type, boolean_false_node,
12685 imag0, imag1);
12686 return fold_build2 (NE_EXPR, type, imag0, imag1);
12688 else
12690 if (code == NE_EXPR)
12691 return omit_two_operands (type, boolean_true_node,
12692 imag0, imag1);
12693 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12697 icond = fold_binary (code, type, imag0, imag1);
12698 if (icond && TREE_CODE (icond) == INTEGER_CST)
12700 if (integer_zerop (icond))
12702 if (code == EQ_EXPR)
12703 return omit_two_operands (type, boolean_false_node,
12704 real0, real1);
12705 return fold_build2 (NE_EXPR, type, real0, real1);
12707 else
12709 if (code == NE_EXPR)
12710 return omit_two_operands (type, boolean_true_node,
12711 real0, real1);
12712 return fold_build2 (EQ_EXPR, type, real0, real1);
12717 return NULL_TREE;
12719 case LT_EXPR:
12720 case GT_EXPR:
12721 case LE_EXPR:
12722 case GE_EXPR:
12723 tem = fold_comparison (code, type, op0, op1);
12724 if (tem != NULL_TREE)
12725 return tem;
12727 /* Transform comparisons of the form X +- C CMP X. */
12728 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12729 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12730 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12731 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12732 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12733 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12735 tree arg01 = TREE_OPERAND (arg0, 1);
12736 enum tree_code code0 = TREE_CODE (arg0);
12737 int is_positive;
12739 if (TREE_CODE (arg01) == REAL_CST)
12740 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12741 else
12742 is_positive = tree_int_cst_sgn (arg01);
12744 /* (X - c) > X becomes false. */
12745 if (code == GT_EXPR
12746 && ((code0 == MINUS_EXPR && is_positive >= 0)
12747 || (code0 == PLUS_EXPR && is_positive <= 0)))
12749 if (TREE_CODE (arg01) == INTEGER_CST
12750 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12751 fold_overflow_warning (("assuming signed overflow does not "
12752 "occur when assuming that (X - c) > X "
12753 "is always false"),
12754 WARN_STRICT_OVERFLOW_ALL);
12755 return constant_boolean_node (0, type);
12758 /* Likewise (X + c) < X becomes false. */
12759 if (code == LT_EXPR
12760 && ((code0 == PLUS_EXPR && is_positive >= 0)
12761 || (code0 == MINUS_EXPR && is_positive <= 0)))
12763 if (TREE_CODE (arg01) == INTEGER_CST
12764 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12765 fold_overflow_warning (("assuming signed overflow does not "
12766 "occur when assuming that "
12767 "(X + c) < X is always false"),
12768 WARN_STRICT_OVERFLOW_ALL);
12769 return constant_boolean_node (0, type);
12772 /* Convert (X - c) <= X to true. */
12773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12774 && code == LE_EXPR
12775 && ((code0 == MINUS_EXPR && is_positive >= 0)
12776 || (code0 == PLUS_EXPR && is_positive <= 0)))
12778 if (TREE_CODE (arg01) == INTEGER_CST
12779 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12780 fold_overflow_warning (("assuming signed overflow does not "
12781 "occur when assuming that "
12782 "(X - c) <= X is always true"),
12783 WARN_STRICT_OVERFLOW_ALL);
12784 return constant_boolean_node (1, type);
12787 /* Convert (X + c) >= X to true. */
12788 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12789 && code == GE_EXPR
12790 && ((code0 == PLUS_EXPR && is_positive >= 0)
12791 || (code0 == MINUS_EXPR && is_positive <= 0)))
12793 if (TREE_CODE (arg01) == INTEGER_CST
12794 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12795 fold_overflow_warning (("assuming signed overflow does not "
12796 "occur when assuming that "
12797 "(X + c) >= X is always true"),
12798 WARN_STRICT_OVERFLOW_ALL);
12799 return constant_boolean_node (1, type);
12802 if (TREE_CODE (arg01) == INTEGER_CST)
12804 /* Convert X + c > X and X - c < X to true for integers. */
12805 if (code == GT_EXPR
12806 && ((code0 == PLUS_EXPR && is_positive > 0)
12807 || (code0 == MINUS_EXPR && is_positive < 0)))
12809 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12810 fold_overflow_warning (("assuming signed overflow does "
12811 "not occur when assuming that "
12812 "(X + c) > X is always true"),
12813 WARN_STRICT_OVERFLOW_ALL);
12814 return constant_boolean_node (1, type);
12817 if (code == LT_EXPR
12818 && ((code0 == MINUS_EXPR && is_positive > 0)
12819 || (code0 == PLUS_EXPR && is_positive < 0)))
12821 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12822 fold_overflow_warning (("assuming signed overflow does "
12823 "not occur when assuming that "
12824 "(X - c) < X is always true"),
12825 WARN_STRICT_OVERFLOW_ALL);
12826 return constant_boolean_node (1, type);
12829 /* Convert X + c <= X and X - c >= X to false for integers. */
12830 if (code == LE_EXPR
12831 && ((code0 == PLUS_EXPR && is_positive > 0)
12832 || (code0 == MINUS_EXPR && is_positive < 0)))
12834 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12835 fold_overflow_warning (("assuming signed overflow does "
12836 "not occur when assuming that "
12837 "(X + c) <= X is always false"),
12838 WARN_STRICT_OVERFLOW_ALL);
12839 return constant_boolean_node (0, type);
12842 if (code == GE_EXPR
12843 && ((code0 == MINUS_EXPR && is_positive > 0)
12844 || (code0 == PLUS_EXPR && is_positive < 0)))
12846 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12847 fold_overflow_warning (("assuming signed overflow does "
12848 "not occur when assuming that "
12849 "(X - c) >= X is always false"),
12850 WARN_STRICT_OVERFLOW_ALL);
12851 return constant_boolean_node (0, type);
12856 /* Comparisons with the highest or lowest possible integer of
12857 the specified precision will have known values. */
12859 tree arg1_type = TREE_TYPE (arg1);
12860 unsigned int width = TYPE_PRECISION (arg1_type);
12862 if (TREE_CODE (arg1) == INTEGER_CST
12863 && width <= 2 * HOST_BITS_PER_WIDE_INT
12864 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12866 HOST_WIDE_INT signed_max_hi;
12867 unsigned HOST_WIDE_INT signed_max_lo;
12868 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12870 if (width <= HOST_BITS_PER_WIDE_INT)
12872 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12873 - 1;
12874 signed_max_hi = 0;
12875 max_hi = 0;
12877 if (TYPE_UNSIGNED (arg1_type))
12879 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12880 min_lo = 0;
12881 min_hi = 0;
12883 else
12885 max_lo = signed_max_lo;
12886 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12887 min_hi = -1;
12890 else
12892 width -= HOST_BITS_PER_WIDE_INT;
12893 signed_max_lo = -1;
12894 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12895 - 1;
12896 max_lo = -1;
12897 min_lo = 0;
12899 if (TYPE_UNSIGNED (arg1_type))
12901 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12902 min_hi = 0;
12904 else
12906 max_hi = signed_max_hi;
12907 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12911 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12912 && TREE_INT_CST_LOW (arg1) == max_lo)
12913 switch (code)
12915 case GT_EXPR:
12916 return omit_one_operand (type, integer_zero_node, arg0);
12918 case GE_EXPR:
12919 return fold_build2 (EQ_EXPR, type, op0, op1);
12921 case LE_EXPR:
12922 return omit_one_operand (type, integer_one_node, arg0);
12924 case LT_EXPR:
12925 return fold_build2 (NE_EXPR, type, op0, op1);
12927 /* The GE_EXPR and LT_EXPR cases above are not normally
12928 reached because of previous transformations. */
12930 default:
12931 break;
12933 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12934 == max_hi
12935 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12936 switch (code)
12938 case GT_EXPR:
12939 arg1 = const_binop (PLUS_EXPR, arg1,
12940 build_int_cst (TREE_TYPE (arg1), 1), 0);
12941 return fold_build2 (EQ_EXPR, type,
12942 fold_convert (TREE_TYPE (arg1), arg0),
12943 arg1);
12944 case LE_EXPR:
12945 arg1 = const_binop (PLUS_EXPR, arg1,
12946 build_int_cst (TREE_TYPE (arg1), 1), 0);
12947 return fold_build2 (NE_EXPR, type,
12948 fold_convert (TREE_TYPE (arg1), arg0),
12949 arg1);
12950 default:
12951 break;
12953 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12954 == min_hi
12955 && TREE_INT_CST_LOW (arg1) == min_lo)
12956 switch (code)
12958 case LT_EXPR:
12959 return omit_one_operand (type, integer_zero_node, arg0);
12961 case LE_EXPR:
12962 return fold_build2 (EQ_EXPR, type, op0, op1);
12964 case GE_EXPR:
12965 return omit_one_operand (type, integer_one_node, arg0);
12967 case GT_EXPR:
12968 return fold_build2 (NE_EXPR, type, op0, op1);
12970 default:
12971 break;
12973 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12974 == min_hi
12975 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12976 switch (code)
12978 case GE_EXPR:
12979 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12980 return fold_build2 (NE_EXPR, type,
12981 fold_convert (TREE_TYPE (arg1), arg0),
12982 arg1);
12983 case LT_EXPR:
12984 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12985 return fold_build2 (EQ_EXPR, type,
12986 fold_convert (TREE_TYPE (arg1), arg0),
12987 arg1);
12988 default:
12989 break;
12992 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12993 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12994 && TYPE_UNSIGNED (arg1_type)
12995 /* We will flip the signedness of the comparison operator
12996 associated with the mode of arg1, so the sign bit is
12997 specified by this mode. Check that arg1 is the signed
12998 max associated with this sign bit. */
12999 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13000 /* signed_type does not work on pointer types. */
13001 && INTEGRAL_TYPE_P (arg1_type))
13003 /* The following case also applies to X < signed_max+1
13004 and X >= signed_max+1 because previous transformations. */
13005 if (code == LE_EXPR || code == GT_EXPR)
13007 tree st;
13008 st = signed_type_for (TREE_TYPE (arg1));
13009 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13010 type, fold_convert (st, arg0),
13011 build_int_cst (st, 0));
13017 /* If we are comparing an ABS_EXPR with a constant, we can
13018 convert all the cases into explicit comparisons, but they may
13019 well not be faster than doing the ABS and one comparison.
13020 But ABS (X) <= C is a range comparison, which becomes a subtraction
13021 and a comparison, and is probably faster. */
13022 if (code == LE_EXPR
13023 && TREE_CODE (arg1) == INTEGER_CST
13024 && TREE_CODE (arg0) == ABS_EXPR
13025 && ! TREE_SIDE_EFFECTS (arg0)
13026 && (0 != (tem = negate_expr (arg1)))
13027 && TREE_CODE (tem) == INTEGER_CST
13028 && !TREE_OVERFLOW (tem))
13029 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13030 build2 (GE_EXPR, type,
13031 TREE_OPERAND (arg0, 0), tem),
13032 build2 (LE_EXPR, type,
13033 TREE_OPERAND (arg0, 0), arg1));
13035 /* Convert ABS_EXPR<x> >= 0 to true. */
13036 strict_overflow_p = false;
13037 if (code == GE_EXPR
13038 && (integer_zerop (arg1)
13039 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13040 && real_zerop (arg1)))
13041 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13043 if (strict_overflow_p)
13044 fold_overflow_warning (("assuming signed overflow does not occur "
13045 "when simplifying comparison of "
13046 "absolute value and zero"),
13047 WARN_STRICT_OVERFLOW_CONDITIONAL);
13048 return omit_one_operand (type, integer_one_node, arg0);
13051 /* Convert ABS_EXPR<x> < 0 to false. */
13052 strict_overflow_p = false;
13053 if (code == LT_EXPR
13054 && (integer_zerop (arg1) || real_zerop (arg1))
13055 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13057 if (strict_overflow_p)
13058 fold_overflow_warning (("assuming signed overflow does not occur "
13059 "when simplifying comparison of "
13060 "absolute value and zero"),
13061 WARN_STRICT_OVERFLOW_CONDITIONAL);
13062 return omit_one_operand (type, integer_zero_node, arg0);
13065 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13066 and similarly for >= into !=. */
13067 if ((code == LT_EXPR || code == GE_EXPR)
13068 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13069 && TREE_CODE (arg1) == LSHIFT_EXPR
13070 && integer_onep (TREE_OPERAND (arg1, 0)))
13071 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13072 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13073 TREE_OPERAND (arg1, 1)),
13074 build_int_cst (TREE_TYPE (arg0), 0));
13076 if ((code == LT_EXPR || code == GE_EXPR)
13077 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13078 && CONVERT_EXPR_P (arg1)
13079 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13080 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13081 return
13082 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13083 fold_convert (TREE_TYPE (arg0),
13084 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13085 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13086 1))),
13087 build_int_cst (TREE_TYPE (arg0), 0));
13089 return NULL_TREE;
13091 case UNORDERED_EXPR:
13092 case ORDERED_EXPR:
13093 case UNLT_EXPR:
13094 case UNLE_EXPR:
13095 case UNGT_EXPR:
13096 case UNGE_EXPR:
13097 case UNEQ_EXPR:
13098 case LTGT_EXPR:
13099 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13101 t1 = fold_relational_const (code, type, arg0, arg1);
13102 if (t1 != NULL_TREE)
13103 return t1;
13106 /* If the first operand is NaN, the result is constant. */
13107 if (TREE_CODE (arg0) == REAL_CST
13108 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13109 && (code != LTGT_EXPR || ! flag_trapping_math))
13111 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13112 ? integer_zero_node
13113 : integer_one_node;
13114 return omit_one_operand (type, t1, arg1);
13117 /* If the second operand is NaN, the result is constant. */
13118 if (TREE_CODE (arg1) == REAL_CST
13119 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13120 && (code != LTGT_EXPR || ! flag_trapping_math))
13122 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13123 ? integer_zero_node
13124 : integer_one_node;
13125 return omit_one_operand (type, t1, arg0);
13128 /* Simplify unordered comparison of something with itself. */
13129 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13130 && operand_equal_p (arg0, arg1, 0))
13131 return constant_boolean_node (1, type);
13133 if (code == LTGT_EXPR
13134 && !flag_trapping_math
13135 && operand_equal_p (arg0, arg1, 0))
13136 return constant_boolean_node (0, type);
13138 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13140 tree targ0 = strip_float_extensions (arg0);
13141 tree targ1 = strip_float_extensions (arg1);
13142 tree newtype = TREE_TYPE (targ0);
13144 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13145 newtype = TREE_TYPE (targ1);
13147 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13148 return fold_build2 (code, type, fold_convert (newtype, targ0),
13149 fold_convert (newtype, targ1));
13152 return NULL_TREE;
13154 case COMPOUND_EXPR:
13155 /* When pedantic, a compound expression can be neither an lvalue
13156 nor an integer constant expression. */
13157 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13158 return NULL_TREE;
13159 /* Don't let (0, 0) be null pointer constant. */
13160 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13161 : fold_convert (type, arg1);
13162 return pedantic_non_lvalue (tem);
13164 case COMPLEX_EXPR:
13165 if ((TREE_CODE (arg0) == REAL_CST
13166 && TREE_CODE (arg1) == REAL_CST)
13167 || (TREE_CODE (arg0) == INTEGER_CST
13168 && TREE_CODE (arg1) == INTEGER_CST))
13169 return build_complex (type, arg0, arg1);
13170 return NULL_TREE;
13172 case ASSERT_EXPR:
13173 /* An ASSERT_EXPR should never be passed to fold_binary. */
13174 gcc_unreachable ();
13176 default:
13177 return NULL_TREE;
13178 } /* switch (code) */
13181 /* Callback for walk_tree, looking for LABEL_EXPR.
13182 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13183 Do not check the sub-tree of GOTO_EXPR. */
13185 static tree
13186 contains_label_1 (tree *tp,
13187 int *walk_subtrees,
13188 void *data ATTRIBUTE_UNUSED)
13190 switch (TREE_CODE (*tp))
13192 case LABEL_EXPR:
13193 return *tp;
13194 case GOTO_EXPR:
13195 *walk_subtrees = 0;
13196 /* no break */
13197 default:
13198 return NULL_TREE;
13202 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13203 accessible from outside the sub-tree. Returns NULL_TREE if no
13204 addressable label is found. */
13206 static bool
13207 contains_label_p (tree st)
13209 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13212 /* Fold a ternary expression of code CODE and type TYPE with operands
13213 OP0, OP1, and OP2. Return the folded expression if folding is
13214 successful. Otherwise, return NULL_TREE. */
13216 tree
13217 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13219 tree tem;
13220 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13221 enum tree_code_class kind = TREE_CODE_CLASS (code);
13223 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13224 && TREE_CODE_LENGTH (code) == 3);
13226 /* Strip any conversions that don't change the mode. This is safe
13227 for every expression, except for a comparison expression because
13228 its signedness is derived from its operands. So, in the latter
13229 case, only strip conversions that don't change the signedness.
13231 Note that this is done as an internal manipulation within the
13232 constant folder, in order to find the simplest representation of
13233 the arguments so that their form can be studied. In any cases,
13234 the appropriate type conversions should be put back in the tree
13235 that will get out of the constant folder. */
13236 if (op0)
13238 arg0 = op0;
13239 STRIP_NOPS (arg0);
13242 if (op1)
13244 arg1 = op1;
13245 STRIP_NOPS (arg1);
13248 switch (code)
13250 case COMPONENT_REF:
13251 if (TREE_CODE (arg0) == CONSTRUCTOR
13252 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13254 unsigned HOST_WIDE_INT idx;
13255 tree field, value;
13256 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13257 if (field == arg1)
13258 return value;
13260 return NULL_TREE;
13262 case COND_EXPR:
13263 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13264 so all simple results must be passed through pedantic_non_lvalue. */
13265 if (TREE_CODE (arg0) == INTEGER_CST)
13267 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13268 tem = integer_zerop (arg0) ? op2 : op1;
13269 /* Only optimize constant conditions when the selected branch
13270 has the same type as the COND_EXPR. This avoids optimizing
13271 away "c ? x : throw", where the throw has a void type.
13272 Avoid throwing away that operand which contains label. */
13273 if ((!TREE_SIDE_EFFECTS (unused_op)
13274 || !contains_label_p (unused_op))
13275 && (! VOID_TYPE_P (TREE_TYPE (tem))
13276 || VOID_TYPE_P (type)))
13277 return pedantic_non_lvalue (tem);
13278 return NULL_TREE;
13280 if (operand_equal_p (arg1, op2, 0))
13281 return pedantic_omit_one_operand (type, arg1, arg0);
13283 /* If we have A op B ? A : C, we may be able to convert this to a
13284 simpler expression, depending on the operation and the values
13285 of B and C. Signed zeros prevent all of these transformations,
13286 for reasons given above each one.
13288 Also try swapping the arguments and inverting the conditional. */
13289 if (COMPARISON_CLASS_P (arg0)
13290 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13291 arg1, TREE_OPERAND (arg0, 1))
13292 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13294 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13295 if (tem)
13296 return tem;
13299 if (COMPARISON_CLASS_P (arg0)
13300 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13301 op2,
13302 TREE_OPERAND (arg0, 1))
13303 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13305 tem = fold_truth_not_expr (arg0);
13306 if (tem && COMPARISON_CLASS_P (tem))
13308 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13309 if (tem)
13310 return tem;
13314 /* If the second operand is simpler than the third, swap them
13315 since that produces better jump optimization results. */
13316 if (truth_value_p (TREE_CODE (arg0))
13317 && tree_swap_operands_p (op1, op2, false))
13319 /* See if this can be inverted. If it can't, possibly because
13320 it was a floating-point inequality comparison, don't do
13321 anything. */
13322 tem = fold_truth_not_expr (arg0);
13323 if (tem)
13324 return fold_build3 (code, type, tem, op2, op1);
13327 /* Convert A ? 1 : 0 to simply A. */
13328 if (integer_onep (op1)
13329 && integer_zerop (op2)
13330 /* If we try to convert OP0 to our type, the
13331 call to fold will try to move the conversion inside
13332 a COND, which will recurse. In that case, the COND_EXPR
13333 is probably the best choice, so leave it alone. */
13334 && type == TREE_TYPE (arg0))
13335 return pedantic_non_lvalue (arg0);
13337 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13338 over COND_EXPR in cases such as floating point comparisons. */
13339 if (integer_zerop (op1)
13340 && integer_onep (op2)
13341 && truth_value_p (TREE_CODE (arg0)))
13342 return pedantic_non_lvalue (fold_convert (type,
13343 invert_truthvalue (arg0)));
13345 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13346 if (TREE_CODE (arg0) == LT_EXPR
13347 && integer_zerop (TREE_OPERAND (arg0, 1))
13348 && integer_zerop (op2)
13349 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13351 /* sign_bit_p only checks ARG1 bits within A's precision.
13352 If <sign bit of A> has wider type than A, bits outside
13353 of A's precision in <sign bit of A> need to be checked.
13354 If they are all 0, this optimization needs to be done
13355 in unsigned A's type, if they are all 1 in signed A's type,
13356 otherwise this can't be done. */
13357 if (TYPE_PRECISION (TREE_TYPE (tem))
13358 < TYPE_PRECISION (TREE_TYPE (arg1))
13359 && TYPE_PRECISION (TREE_TYPE (tem))
13360 < TYPE_PRECISION (type))
13362 unsigned HOST_WIDE_INT mask_lo;
13363 HOST_WIDE_INT mask_hi;
13364 int inner_width, outer_width;
13365 tree tem_type;
13367 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13368 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13369 if (outer_width > TYPE_PRECISION (type))
13370 outer_width = TYPE_PRECISION (type);
13372 if (outer_width > HOST_BITS_PER_WIDE_INT)
13374 mask_hi = ((unsigned HOST_WIDE_INT) -1
13375 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13376 mask_lo = -1;
13378 else
13380 mask_hi = 0;
13381 mask_lo = ((unsigned HOST_WIDE_INT) -1
13382 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13384 if (inner_width > HOST_BITS_PER_WIDE_INT)
13386 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13387 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13388 mask_lo = 0;
13390 else
13391 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13392 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13394 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13395 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13397 tem_type = signed_type_for (TREE_TYPE (tem));
13398 tem = fold_convert (tem_type, tem);
13400 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13401 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13403 tem_type = unsigned_type_for (TREE_TYPE (tem));
13404 tem = fold_convert (tem_type, tem);
13406 else
13407 tem = NULL;
13410 if (tem)
13411 return fold_convert (type,
13412 fold_build2 (BIT_AND_EXPR,
13413 TREE_TYPE (tem), tem,
13414 fold_convert (TREE_TYPE (tem),
13415 arg1)));
13418 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13419 already handled above. */
13420 if (TREE_CODE (arg0) == BIT_AND_EXPR
13421 && integer_onep (TREE_OPERAND (arg0, 1))
13422 && integer_zerop (op2)
13423 && integer_pow2p (arg1))
13425 tree tem = TREE_OPERAND (arg0, 0);
13426 STRIP_NOPS (tem);
13427 if (TREE_CODE (tem) == RSHIFT_EXPR
13428 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13429 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13430 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13431 return fold_build2 (BIT_AND_EXPR, type,
13432 TREE_OPERAND (tem, 0), arg1);
13435 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13436 is probably obsolete because the first operand should be a
13437 truth value (that's why we have the two cases above), but let's
13438 leave it in until we can confirm this for all front-ends. */
13439 if (integer_zerop (op2)
13440 && TREE_CODE (arg0) == NE_EXPR
13441 && integer_zerop (TREE_OPERAND (arg0, 1))
13442 && integer_pow2p (arg1)
13443 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13444 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13445 arg1, OEP_ONLY_CONST))
13446 return pedantic_non_lvalue (fold_convert (type,
13447 TREE_OPERAND (arg0, 0)));
13449 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13450 if (integer_zerop (op2)
13451 && truth_value_p (TREE_CODE (arg0))
13452 && truth_value_p (TREE_CODE (arg1)))
13453 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13454 fold_convert (type, arg0),
13455 arg1);
13457 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13458 if (integer_onep (op2)
13459 && truth_value_p (TREE_CODE (arg0))
13460 && truth_value_p (TREE_CODE (arg1)))
13462 /* Only perform transformation if ARG0 is easily inverted. */
13463 tem = fold_truth_not_expr (arg0);
13464 if (tem)
13465 return fold_build2 (TRUTH_ORIF_EXPR, type,
13466 fold_convert (type, tem),
13467 arg1);
13470 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13471 if (integer_zerop (arg1)
13472 && truth_value_p (TREE_CODE (arg0))
13473 && truth_value_p (TREE_CODE (op2)))
13475 /* Only perform transformation if ARG0 is easily inverted. */
13476 tem = fold_truth_not_expr (arg0);
13477 if (tem)
13478 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13479 fold_convert (type, tem),
13480 op2);
13483 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13484 if (integer_onep (arg1)
13485 && truth_value_p (TREE_CODE (arg0))
13486 && truth_value_p (TREE_CODE (op2)))
13487 return fold_build2 (TRUTH_ORIF_EXPR, type,
13488 fold_convert (type, arg0),
13489 op2);
13491 return NULL_TREE;
13493 case CALL_EXPR:
13494 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13495 of fold_ternary on them. */
13496 gcc_unreachable ();
13498 case BIT_FIELD_REF:
13499 if ((TREE_CODE (arg0) == VECTOR_CST
13500 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13501 && type == TREE_TYPE (TREE_TYPE (arg0)))
13503 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13504 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13506 if (width != 0
13507 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13508 && (idx % width) == 0
13509 && (idx = idx / width)
13510 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13512 tree elements = NULL_TREE;
13514 if (TREE_CODE (arg0) == VECTOR_CST)
13515 elements = TREE_VECTOR_CST_ELTS (arg0);
13516 else
13518 unsigned HOST_WIDE_INT idx;
13519 tree value;
13521 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13522 elements = tree_cons (NULL_TREE, value, elements);
13524 while (idx-- > 0 && elements)
13525 elements = TREE_CHAIN (elements);
13526 if (elements)
13527 return TREE_VALUE (elements);
13528 else
13529 return fold_convert (type, integer_zero_node);
13533 /* A bit-field-ref that referenced the full argument can be stripped. */
13534 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13535 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13536 && integer_zerop (op2))
13537 return fold_convert (type, arg0);
13539 return NULL_TREE;
13541 default:
13542 return NULL_TREE;
13543 } /* switch (code) */
13546 /* Perform constant folding and related simplification of EXPR.
13547 The related simplifications include x*1 => x, x*0 => 0, etc.,
13548 and application of the associative law.
13549 NOP_EXPR conversions may be removed freely (as long as we
13550 are careful not to change the type of the overall expression).
13551 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13552 but we can constant-fold them if they have constant operands. */
13554 #ifdef ENABLE_FOLD_CHECKING
13555 # define fold(x) fold_1 (x)
13556 static tree fold_1 (tree);
13557 static
13558 #endif
13559 tree
13560 fold (tree expr)
13562 const tree t = expr;
13563 enum tree_code code = TREE_CODE (t);
13564 enum tree_code_class kind = TREE_CODE_CLASS (code);
13565 tree tem;
13567 /* Return right away if a constant. */
13568 if (kind == tcc_constant)
13569 return t;
13571 /* CALL_EXPR-like objects with variable numbers of operands are
13572 treated specially. */
13573 if (kind == tcc_vl_exp)
13575 if (code == CALL_EXPR)
13577 tem = fold_call_expr (expr, false);
13578 return tem ? tem : expr;
13580 return expr;
13583 if (IS_EXPR_CODE_CLASS (kind))
13585 tree type = TREE_TYPE (t);
13586 tree op0, op1, op2;
13588 switch (TREE_CODE_LENGTH (code))
13590 case 1:
13591 op0 = TREE_OPERAND (t, 0);
13592 tem = fold_unary (code, type, op0);
13593 return tem ? tem : expr;
13594 case 2:
13595 op0 = TREE_OPERAND (t, 0);
13596 op1 = TREE_OPERAND (t, 1);
13597 tem = fold_binary (code, type, op0, op1);
13598 return tem ? tem : expr;
13599 case 3:
13600 op0 = TREE_OPERAND (t, 0);
13601 op1 = TREE_OPERAND (t, 1);
13602 op2 = TREE_OPERAND (t, 2);
13603 tem = fold_ternary (code, type, op0, op1, op2);
13604 return tem ? tem : expr;
13605 default:
13606 break;
13610 switch (code)
13612 case ARRAY_REF:
13614 tree op0 = TREE_OPERAND (t, 0);
13615 tree op1 = TREE_OPERAND (t, 1);
13617 if (TREE_CODE (op1) == INTEGER_CST
13618 && TREE_CODE (op0) == CONSTRUCTOR
13619 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13621 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13622 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13623 unsigned HOST_WIDE_INT begin = 0;
13625 /* Find a matching index by means of a binary search. */
13626 while (begin != end)
13628 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13629 tree index = VEC_index (constructor_elt, elts, middle)->index;
13631 if (TREE_CODE (index) == INTEGER_CST
13632 && tree_int_cst_lt (index, op1))
13633 begin = middle + 1;
13634 else if (TREE_CODE (index) == INTEGER_CST
13635 && tree_int_cst_lt (op1, index))
13636 end = middle;
13637 else if (TREE_CODE (index) == RANGE_EXPR
13638 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13639 begin = middle + 1;
13640 else if (TREE_CODE (index) == RANGE_EXPR
13641 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13642 end = middle;
13643 else
13644 return VEC_index (constructor_elt, elts, middle)->value;
13648 return t;
13651 case CONST_DECL:
13652 return fold (DECL_INITIAL (t));
13654 default:
13655 return t;
13656 } /* switch (code) */
13659 #ifdef ENABLE_FOLD_CHECKING
13660 #undef fold
13662 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13663 static void fold_check_failed (const_tree, const_tree);
13664 void print_fold_checksum (const_tree);
13666 /* When --enable-checking=fold, compute a digest of expr before
13667 and after actual fold call to see if fold did not accidentally
13668 change original expr. */
13670 tree
13671 fold (tree expr)
13673 tree ret;
13674 struct md5_ctx ctx;
13675 unsigned char checksum_before[16], checksum_after[16];
13676 htab_t ht;
13678 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13679 md5_init_ctx (&ctx);
13680 fold_checksum_tree (expr, &ctx, ht);
13681 md5_finish_ctx (&ctx, checksum_before);
13682 htab_empty (ht);
13684 ret = fold_1 (expr);
13686 md5_init_ctx (&ctx);
13687 fold_checksum_tree (expr, &ctx, ht);
13688 md5_finish_ctx (&ctx, checksum_after);
13689 htab_delete (ht);
13691 if (memcmp (checksum_before, checksum_after, 16))
13692 fold_check_failed (expr, ret);
13694 return ret;
13697 void
13698 print_fold_checksum (const_tree expr)
13700 struct md5_ctx ctx;
13701 unsigned char checksum[16], cnt;
13702 htab_t ht;
13704 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13705 md5_init_ctx (&ctx);
13706 fold_checksum_tree (expr, &ctx, ht);
13707 md5_finish_ctx (&ctx, checksum);
13708 htab_delete (ht);
13709 for (cnt = 0; cnt < 16; ++cnt)
13710 fprintf (stderr, "%02x", checksum[cnt]);
13711 putc ('\n', stderr);
13714 static void
13715 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13717 internal_error ("fold check: original tree changed by fold");
13720 static void
13721 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13723 const void **slot;
13724 enum tree_code code;
13725 union tree_node buf;
13726 int i, len;
13728 recursive_label:
13730 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13731 <= sizeof (struct tree_function_decl))
13732 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13733 if (expr == NULL)
13734 return;
13735 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13736 if (*slot != NULL)
13737 return;
13738 *slot = expr;
13739 code = TREE_CODE (expr);
13740 if (TREE_CODE_CLASS (code) == tcc_declaration
13741 && DECL_ASSEMBLER_NAME_SET_P (expr))
13743 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13744 memcpy ((char *) &buf, expr, tree_size (expr));
13745 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13746 expr = (tree) &buf;
13748 else if (TREE_CODE_CLASS (code) == tcc_type
13749 && (TYPE_POINTER_TO (expr)
13750 || TYPE_REFERENCE_TO (expr)
13751 || TYPE_CACHED_VALUES_P (expr)
13752 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13753 || TYPE_NEXT_VARIANT (expr)))
13755 /* Allow these fields to be modified. */
13756 tree tmp;
13757 memcpy ((char *) &buf, expr, tree_size (expr));
13758 expr = tmp = (tree) &buf;
13759 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13760 TYPE_POINTER_TO (tmp) = NULL;
13761 TYPE_REFERENCE_TO (tmp) = NULL;
13762 TYPE_NEXT_VARIANT (tmp) = NULL;
13763 if (TYPE_CACHED_VALUES_P (tmp))
13765 TYPE_CACHED_VALUES_P (tmp) = 0;
13766 TYPE_CACHED_VALUES (tmp) = NULL;
13769 md5_process_bytes (expr, tree_size (expr), ctx);
13770 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13771 if (TREE_CODE_CLASS (code) != tcc_type
13772 && TREE_CODE_CLASS (code) != tcc_declaration
13773 && code != TREE_LIST
13774 && code != SSA_NAME)
13775 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13776 switch (TREE_CODE_CLASS (code))
13778 case tcc_constant:
13779 switch (code)
13781 case STRING_CST:
13782 md5_process_bytes (TREE_STRING_POINTER (expr),
13783 TREE_STRING_LENGTH (expr), ctx);
13784 break;
13785 case COMPLEX_CST:
13786 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13787 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13788 break;
13789 case VECTOR_CST:
13790 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13791 break;
13792 default:
13793 break;
13795 break;
13796 case tcc_exceptional:
13797 switch (code)
13799 case TREE_LIST:
13800 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13801 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13802 expr = TREE_CHAIN (expr);
13803 goto recursive_label;
13804 break;
13805 case TREE_VEC:
13806 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13807 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13808 break;
13809 default:
13810 break;
13812 break;
13813 case tcc_expression:
13814 case tcc_reference:
13815 case tcc_comparison:
13816 case tcc_unary:
13817 case tcc_binary:
13818 case tcc_statement:
13819 case tcc_vl_exp:
13820 len = TREE_OPERAND_LENGTH (expr);
13821 for (i = 0; i < len; ++i)
13822 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13823 break;
13824 case tcc_declaration:
13825 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13826 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13827 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13829 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13830 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13831 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13832 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13833 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13835 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13836 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13838 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13840 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13841 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13842 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13844 break;
13845 case tcc_type:
13846 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13847 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13848 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13849 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13850 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13851 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13852 if (INTEGRAL_TYPE_P (expr)
13853 || SCALAR_FLOAT_TYPE_P (expr))
13855 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13856 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13858 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13859 if (TREE_CODE (expr) == RECORD_TYPE
13860 || TREE_CODE (expr) == UNION_TYPE
13861 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13862 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13863 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13864 break;
13865 default:
13866 break;
13870 /* Helper function for outputting the checksum of a tree T. When
13871 debugging with gdb, you can "define mynext" to be "next" followed
13872 by "call debug_fold_checksum (op0)", then just trace down till the
13873 outputs differ. */
13875 void
13876 debug_fold_checksum (const_tree t)
13878 int i;
13879 unsigned char checksum[16];
13880 struct md5_ctx ctx;
13881 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13883 md5_init_ctx (&ctx);
13884 fold_checksum_tree (t, &ctx, ht);
13885 md5_finish_ctx (&ctx, checksum);
13886 htab_empty (ht);
13888 for (i = 0; i < 16; i++)
13889 fprintf (stderr, "%d ", checksum[i]);
13891 fprintf (stderr, "\n");
13894 #endif
13896 /* Fold a unary tree expression with code CODE of type TYPE with an
13897 operand OP0. Return a folded expression if successful. Otherwise,
13898 return a tree expression with code CODE of type TYPE with an
13899 operand OP0. */
13901 tree
13902 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13904 tree tem;
13905 #ifdef ENABLE_FOLD_CHECKING
13906 unsigned char checksum_before[16], checksum_after[16];
13907 struct md5_ctx ctx;
13908 htab_t ht;
13910 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13911 md5_init_ctx (&ctx);
13912 fold_checksum_tree (op0, &ctx, ht);
13913 md5_finish_ctx (&ctx, checksum_before);
13914 htab_empty (ht);
13915 #endif
13917 tem = fold_unary (code, type, op0);
13918 if (!tem)
13919 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13921 #ifdef ENABLE_FOLD_CHECKING
13922 md5_init_ctx (&ctx);
13923 fold_checksum_tree (op0, &ctx, ht);
13924 md5_finish_ctx (&ctx, checksum_after);
13925 htab_delete (ht);
13927 if (memcmp (checksum_before, checksum_after, 16))
13928 fold_check_failed (op0, tem);
13929 #endif
13930 return tem;
13933 /* Fold a binary tree expression with code CODE of type TYPE with
13934 operands OP0 and OP1. Return a folded expression if successful.
13935 Otherwise, return a tree expression with code CODE of type TYPE
13936 with operands OP0 and OP1. */
13938 tree
13939 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13940 MEM_STAT_DECL)
13942 tree tem;
13943 #ifdef ENABLE_FOLD_CHECKING
13944 unsigned char checksum_before_op0[16],
13945 checksum_before_op1[16],
13946 checksum_after_op0[16],
13947 checksum_after_op1[16];
13948 struct md5_ctx ctx;
13949 htab_t ht;
13951 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13952 md5_init_ctx (&ctx);
13953 fold_checksum_tree (op0, &ctx, ht);
13954 md5_finish_ctx (&ctx, checksum_before_op0);
13955 htab_empty (ht);
13957 md5_init_ctx (&ctx);
13958 fold_checksum_tree (op1, &ctx, ht);
13959 md5_finish_ctx (&ctx, checksum_before_op1);
13960 htab_empty (ht);
13961 #endif
13963 tem = fold_binary (code, type, op0, op1);
13964 if (!tem)
13965 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13967 #ifdef ENABLE_FOLD_CHECKING
13968 md5_init_ctx (&ctx);
13969 fold_checksum_tree (op0, &ctx, ht);
13970 md5_finish_ctx (&ctx, checksum_after_op0);
13971 htab_empty (ht);
13973 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13974 fold_check_failed (op0, tem);
13976 md5_init_ctx (&ctx);
13977 fold_checksum_tree (op1, &ctx, ht);
13978 md5_finish_ctx (&ctx, checksum_after_op1);
13979 htab_delete (ht);
13981 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13982 fold_check_failed (op1, tem);
13983 #endif
13984 return tem;
13987 /* Fold a ternary tree expression with code CODE of type TYPE with
13988 operands OP0, OP1, and OP2. Return a folded expression if
13989 successful. Otherwise, return a tree expression with code CODE of
13990 type TYPE with operands OP0, OP1, and OP2. */
13992 tree
13993 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13994 MEM_STAT_DECL)
13996 tree tem;
13997 #ifdef ENABLE_FOLD_CHECKING
13998 unsigned char checksum_before_op0[16],
13999 checksum_before_op1[16],
14000 checksum_before_op2[16],
14001 checksum_after_op0[16],
14002 checksum_after_op1[16],
14003 checksum_after_op2[16];
14004 struct md5_ctx ctx;
14005 htab_t ht;
14007 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14008 md5_init_ctx (&ctx);
14009 fold_checksum_tree (op0, &ctx, ht);
14010 md5_finish_ctx (&ctx, checksum_before_op0);
14011 htab_empty (ht);
14013 md5_init_ctx (&ctx);
14014 fold_checksum_tree (op1, &ctx, ht);
14015 md5_finish_ctx (&ctx, checksum_before_op1);
14016 htab_empty (ht);
14018 md5_init_ctx (&ctx);
14019 fold_checksum_tree (op2, &ctx, ht);
14020 md5_finish_ctx (&ctx, checksum_before_op2);
14021 htab_empty (ht);
14022 #endif
14024 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14025 tem = fold_ternary (code, type, op0, op1, op2);
14026 if (!tem)
14027 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14029 #ifdef ENABLE_FOLD_CHECKING
14030 md5_init_ctx (&ctx);
14031 fold_checksum_tree (op0, &ctx, ht);
14032 md5_finish_ctx (&ctx, checksum_after_op0);
14033 htab_empty (ht);
14035 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14036 fold_check_failed (op0, tem);
14038 md5_init_ctx (&ctx);
14039 fold_checksum_tree (op1, &ctx, ht);
14040 md5_finish_ctx (&ctx, checksum_after_op1);
14041 htab_empty (ht);
14043 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14044 fold_check_failed (op1, tem);
14046 md5_init_ctx (&ctx);
14047 fold_checksum_tree (op2, &ctx, ht);
14048 md5_finish_ctx (&ctx, checksum_after_op2);
14049 htab_delete (ht);
14051 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14052 fold_check_failed (op2, tem);
14053 #endif
14054 return tem;
14057 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14058 arguments in ARGARRAY, and a null static chain.
14059 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14060 of type TYPE from the given operands as constructed by build_call_array. */
14062 tree
14063 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14065 tree tem;
14066 #ifdef ENABLE_FOLD_CHECKING
14067 unsigned char checksum_before_fn[16],
14068 checksum_before_arglist[16],
14069 checksum_after_fn[16],
14070 checksum_after_arglist[16];
14071 struct md5_ctx ctx;
14072 htab_t ht;
14073 int i;
14075 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14076 md5_init_ctx (&ctx);
14077 fold_checksum_tree (fn, &ctx, ht);
14078 md5_finish_ctx (&ctx, checksum_before_fn);
14079 htab_empty (ht);
14081 md5_init_ctx (&ctx);
14082 for (i = 0; i < nargs; i++)
14083 fold_checksum_tree (argarray[i], &ctx, ht);
14084 md5_finish_ctx (&ctx, checksum_before_arglist);
14085 htab_empty (ht);
14086 #endif
14088 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14090 #ifdef ENABLE_FOLD_CHECKING
14091 md5_init_ctx (&ctx);
14092 fold_checksum_tree (fn, &ctx, ht);
14093 md5_finish_ctx (&ctx, checksum_after_fn);
14094 htab_empty (ht);
14096 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14097 fold_check_failed (fn, tem);
14099 md5_init_ctx (&ctx);
14100 for (i = 0; i < nargs; i++)
14101 fold_checksum_tree (argarray[i], &ctx, ht);
14102 md5_finish_ctx (&ctx, checksum_after_arglist);
14103 htab_delete (ht);
14105 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14106 fold_check_failed (NULL_TREE, tem);
14107 #endif
14108 return tem;
14111 /* Perform constant folding and related simplification of initializer
14112 expression EXPR. These behave identically to "fold_buildN" but ignore
14113 potential run-time traps and exceptions that fold must preserve. */
14115 #define START_FOLD_INIT \
14116 int saved_signaling_nans = flag_signaling_nans;\
14117 int saved_trapping_math = flag_trapping_math;\
14118 int saved_rounding_math = flag_rounding_math;\
14119 int saved_trapv = flag_trapv;\
14120 int saved_folding_initializer = folding_initializer;\
14121 flag_signaling_nans = 0;\
14122 flag_trapping_math = 0;\
14123 flag_rounding_math = 0;\
14124 flag_trapv = 0;\
14125 folding_initializer = 1;
14127 #define END_FOLD_INIT \
14128 flag_signaling_nans = saved_signaling_nans;\
14129 flag_trapping_math = saved_trapping_math;\
14130 flag_rounding_math = saved_rounding_math;\
14131 flag_trapv = saved_trapv;\
14132 folding_initializer = saved_folding_initializer;
14134 tree
14135 fold_build1_initializer (enum tree_code code, tree type, tree op)
14137 tree result;
14138 START_FOLD_INIT;
14140 result = fold_build1 (code, type, op);
14142 END_FOLD_INIT;
14143 return result;
14146 tree
14147 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14149 tree result;
14150 START_FOLD_INIT;
14152 result = fold_build2 (code, type, op0, op1);
14154 END_FOLD_INIT;
14155 return result;
14158 tree
14159 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14160 tree op2)
14162 tree result;
14163 START_FOLD_INIT;
14165 result = fold_build3 (code, type, op0, op1, op2);
14167 END_FOLD_INIT;
14168 return result;
14171 tree
14172 fold_build_call_array_initializer (tree type, tree fn,
14173 int nargs, tree *argarray)
14175 tree result;
14176 START_FOLD_INIT;
14178 result = fold_build_call_array (type, fn, nargs, argarray);
14180 END_FOLD_INIT;
14181 return result;
14184 #undef START_FOLD_INIT
14185 #undef END_FOLD_INIT
14187 /* Determine if first argument is a multiple of second argument. Return 0 if
14188 it is not, or we cannot easily determined it to be.
14190 An example of the sort of thing we care about (at this point; this routine
14191 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14192 fold cases do now) is discovering that
14194 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14196 is a multiple of
14198 SAVE_EXPR (J * 8)
14200 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14202 This code also handles discovering that
14204 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14206 is a multiple of 8 so we don't have to worry about dealing with a
14207 possible remainder.
14209 Note that we *look* inside a SAVE_EXPR only to determine how it was
14210 calculated; it is not safe for fold to do much of anything else with the
14211 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14212 at run time. For example, the latter example above *cannot* be implemented
14213 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14214 evaluation time of the original SAVE_EXPR is not necessarily the same at
14215 the time the new expression is evaluated. The only optimization of this
14216 sort that would be valid is changing
14218 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14220 divided by 8 to
14222 SAVE_EXPR (I) * SAVE_EXPR (J)
14224 (where the same SAVE_EXPR (J) is used in the original and the
14225 transformed version). */
14228 multiple_of_p (tree type, const_tree top, const_tree bottom)
14230 if (operand_equal_p (top, bottom, 0))
14231 return 1;
14233 if (TREE_CODE (type) != INTEGER_TYPE)
14234 return 0;
14236 switch (TREE_CODE (top))
14238 case BIT_AND_EXPR:
14239 /* Bitwise and provides a power of two multiple. If the mask is
14240 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14241 if (!integer_pow2p (bottom))
14242 return 0;
14243 /* FALLTHRU */
14245 case MULT_EXPR:
14246 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14247 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14249 case PLUS_EXPR:
14250 case MINUS_EXPR:
14251 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14252 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14254 case LSHIFT_EXPR:
14255 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14257 tree op1, t1;
14259 op1 = TREE_OPERAND (top, 1);
14260 /* const_binop may not detect overflow correctly,
14261 so check for it explicitly here. */
14262 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14263 > TREE_INT_CST_LOW (op1)
14264 && TREE_INT_CST_HIGH (op1) == 0
14265 && 0 != (t1 = fold_convert (type,
14266 const_binop (LSHIFT_EXPR,
14267 size_one_node,
14268 op1, 0)))
14269 && !TREE_OVERFLOW (t1))
14270 return multiple_of_p (type, t1, bottom);
14272 return 0;
14274 case NOP_EXPR:
14275 /* Can't handle conversions from non-integral or wider integral type. */
14276 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14277 || (TYPE_PRECISION (type)
14278 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14279 return 0;
14281 /* .. fall through ... */
14283 case SAVE_EXPR:
14284 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14286 case INTEGER_CST:
14287 if (TREE_CODE (bottom) != INTEGER_CST
14288 || integer_zerop (bottom)
14289 || (TYPE_UNSIGNED (type)
14290 && (tree_int_cst_sgn (top) < 0
14291 || tree_int_cst_sgn (bottom) < 0)))
14292 return 0;
14293 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14294 top, bottom, 0));
14296 default:
14297 return 0;
14301 /* Return true if CODE or TYPE is known to be non-negative. */
14303 static bool
14304 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14306 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14307 && truth_value_p (code))
14308 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14309 have a signed:1 type (where the value is -1 and 0). */
14310 return true;
14311 return false;
14314 /* Return true if (CODE OP0) is known to be non-negative. If the return
14315 value is based on the assumption that signed overflow is undefined,
14316 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14317 *STRICT_OVERFLOW_P. */
14319 bool
14320 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14321 bool *strict_overflow_p)
14323 if (TYPE_UNSIGNED (type))
14324 return true;
14326 switch (code)
14328 case ABS_EXPR:
14329 /* We can't return 1 if flag_wrapv is set because
14330 ABS_EXPR<INT_MIN> = INT_MIN. */
14331 if (!INTEGRAL_TYPE_P (type))
14332 return true;
14333 if (TYPE_OVERFLOW_UNDEFINED (type))
14335 *strict_overflow_p = true;
14336 return true;
14338 break;
14340 case NON_LVALUE_EXPR:
14341 case FLOAT_EXPR:
14342 case FIX_TRUNC_EXPR:
14343 return tree_expr_nonnegative_warnv_p (op0,
14344 strict_overflow_p);
14346 case NOP_EXPR:
14348 tree inner_type = TREE_TYPE (op0);
14349 tree outer_type = type;
14351 if (TREE_CODE (outer_type) == REAL_TYPE)
14353 if (TREE_CODE (inner_type) == REAL_TYPE)
14354 return tree_expr_nonnegative_warnv_p (op0,
14355 strict_overflow_p);
14356 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14358 if (TYPE_UNSIGNED (inner_type))
14359 return true;
14360 return tree_expr_nonnegative_warnv_p (op0,
14361 strict_overflow_p);
14364 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14366 if (TREE_CODE (inner_type) == REAL_TYPE)
14367 return tree_expr_nonnegative_warnv_p (op0,
14368 strict_overflow_p);
14369 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14370 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14371 && TYPE_UNSIGNED (inner_type);
14374 break;
14376 default:
14377 return tree_simple_nonnegative_warnv_p (code, type);
14380 /* We don't know sign of `t', so be conservative and return false. */
14381 return false;
14384 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14385 value is based on the assumption that signed overflow is undefined,
14386 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14387 *STRICT_OVERFLOW_P. */
14389 bool
14390 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14391 tree op1, bool *strict_overflow_p)
14393 if (TYPE_UNSIGNED (type))
14394 return true;
14396 switch (code)
14398 case POINTER_PLUS_EXPR:
14399 case PLUS_EXPR:
14400 if (FLOAT_TYPE_P (type))
14401 return (tree_expr_nonnegative_warnv_p (op0,
14402 strict_overflow_p)
14403 && tree_expr_nonnegative_warnv_p (op1,
14404 strict_overflow_p));
14406 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14407 both unsigned and at least 2 bits shorter than the result. */
14408 if (TREE_CODE (type) == INTEGER_TYPE
14409 && TREE_CODE (op0) == NOP_EXPR
14410 && TREE_CODE (op1) == NOP_EXPR)
14412 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14413 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14414 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14415 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14417 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14418 TYPE_PRECISION (inner2)) + 1;
14419 return prec < TYPE_PRECISION (type);
14422 break;
14424 case MULT_EXPR:
14425 if (FLOAT_TYPE_P (type))
14427 /* x * x for floating point x is always non-negative. */
14428 if (operand_equal_p (op0, op1, 0))
14429 return true;
14430 return (tree_expr_nonnegative_warnv_p (op0,
14431 strict_overflow_p)
14432 && tree_expr_nonnegative_warnv_p (op1,
14433 strict_overflow_p));
14436 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14437 both unsigned and their total bits is shorter than the result. */
14438 if (TREE_CODE (type) == INTEGER_TYPE
14439 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14440 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14442 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14443 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14444 : TREE_TYPE (op0);
14445 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14446 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14447 : TREE_TYPE (op1);
14449 bool unsigned0 = TYPE_UNSIGNED (inner0);
14450 bool unsigned1 = TYPE_UNSIGNED (inner1);
14452 if (TREE_CODE (op0) == INTEGER_CST)
14453 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14455 if (TREE_CODE (op1) == INTEGER_CST)
14456 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14458 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14459 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14461 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14462 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14463 : TYPE_PRECISION (inner0);
14465 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14466 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14467 : TYPE_PRECISION (inner1);
14469 return precision0 + precision1 < TYPE_PRECISION (type);
14472 return false;
14474 case BIT_AND_EXPR:
14475 case MAX_EXPR:
14476 return (tree_expr_nonnegative_warnv_p (op0,
14477 strict_overflow_p)
14478 || tree_expr_nonnegative_warnv_p (op1,
14479 strict_overflow_p));
14481 case BIT_IOR_EXPR:
14482 case BIT_XOR_EXPR:
14483 case MIN_EXPR:
14484 case RDIV_EXPR:
14485 case TRUNC_DIV_EXPR:
14486 case CEIL_DIV_EXPR:
14487 case FLOOR_DIV_EXPR:
14488 case ROUND_DIV_EXPR:
14489 return (tree_expr_nonnegative_warnv_p (op0,
14490 strict_overflow_p)
14491 && tree_expr_nonnegative_warnv_p (op1,
14492 strict_overflow_p));
14494 case TRUNC_MOD_EXPR:
14495 case CEIL_MOD_EXPR:
14496 case FLOOR_MOD_EXPR:
14497 case ROUND_MOD_EXPR:
14498 return tree_expr_nonnegative_warnv_p (op0,
14499 strict_overflow_p);
14500 default:
14501 return tree_simple_nonnegative_warnv_p (code, type);
14504 /* We don't know sign of `t', so be conservative and return false. */
14505 return false;
14508 /* Return true if T is known to be non-negative. If the return
14509 value is based on the assumption that signed overflow is undefined,
14510 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14511 *STRICT_OVERFLOW_P. */
14513 bool
14514 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14516 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14517 return true;
14519 switch (TREE_CODE (t))
14521 case INTEGER_CST:
14522 return tree_int_cst_sgn (t) >= 0;
14524 case REAL_CST:
14525 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14527 case FIXED_CST:
14528 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14530 case COND_EXPR:
14531 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14532 strict_overflow_p)
14533 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14534 strict_overflow_p));
14535 default:
14536 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14537 TREE_TYPE (t));
14539 /* We don't know sign of `t', so be conservative and return false. */
14540 return false;
14543 /* Return true if T is known to be non-negative. If the return
14544 value is based on the assumption that signed overflow is undefined,
14545 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14546 *STRICT_OVERFLOW_P. */
14548 bool
14549 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14550 tree arg0, tree arg1, bool *strict_overflow_p)
14552 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14553 switch (DECL_FUNCTION_CODE (fndecl))
14555 CASE_FLT_FN (BUILT_IN_ACOS):
14556 CASE_FLT_FN (BUILT_IN_ACOSH):
14557 CASE_FLT_FN (BUILT_IN_CABS):
14558 CASE_FLT_FN (BUILT_IN_COSH):
14559 CASE_FLT_FN (BUILT_IN_ERFC):
14560 CASE_FLT_FN (BUILT_IN_EXP):
14561 CASE_FLT_FN (BUILT_IN_EXP10):
14562 CASE_FLT_FN (BUILT_IN_EXP2):
14563 CASE_FLT_FN (BUILT_IN_FABS):
14564 CASE_FLT_FN (BUILT_IN_FDIM):
14565 CASE_FLT_FN (BUILT_IN_HYPOT):
14566 CASE_FLT_FN (BUILT_IN_POW10):
14567 CASE_INT_FN (BUILT_IN_FFS):
14568 CASE_INT_FN (BUILT_IN_PARITY):
14569 CASE_INT_FN (BUILT_IN_POPCOUNT):
14570 case BUILT_IN_BSWAP32:
14571 case BUILT_IN_BSWAP64:
14572 /* Always true. */
14573 return true;
14575 CASE_FLT_FN (BUILT_IN_SQRT):
14576 /* sqrt(-0.0) is -0.0. */
14577 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14578 return true;
14579 return tree_expr_nonnegative_warnv_p (arg0,
14580 strict_overflow_p);
14582 CASE_FLT_FN (BUILT_IN_ASINH):
14583 CASE_FLT_FN (BUILT_IN_ATAN):
14584 CASE_FLT_FN (BUILT_IN_ATANH):
14585 CASE_FLT_FN (BUILT_IN_CBRT):
14586 CASE_FLT_FN (BUILT_IN_CEIL):
14587 CASE_FLT_FN (BUILT_IN_ERF):
14588 CASE_FLT_FN (BUILT_IN_EXPM1):
14589 CASE_FLT_FN (BUILT_IN_FLOOR):
14590 CASE_FLT_FN (BUILT_IN_FMOD):
14591 CASE_FLT_FN (BUILT_IN_FREXP):
14592 CASE_FLT_FN (BUILT_IN_LCEIL):
14593 CASE_FLT_FN (BUILT_IN_LDEXP):
14594 CASE_FLT_FN (BUILT_IN_LFLOOR):
14595 CASE_FLT_FN (BUILT_IN_LLCEIL):
14596 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14597 CASE_FLT_FN (BUILT_IN_LLRINT):
14598 CASE_FLT_FN (BUILT_IN_LLROUND):
14599 CASE_FLT_FN (BUILT_IN_LRINT):
14600 CASE_FLT_FN (BUILT_IN_LROUND):
14601 CASE_FLT_FN (BUILT_IN_MODF):
14602 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14603 CASE_FLT_FN (BUILT_IN_RINT):
14604 CASE_FLT_FN (BUILT_IN_ROUND):
14605 CASE_FLT_FN (BUILT_IN_SCALB):
14606 CASE_FLT_FN (BUILT_IN_SCALBLN):
14607 CASE_FLT_FN (BUILT_IN_SCALBN):
14608 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14609 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14610 CASE_FLT_FN (BUILT_IN_SINH):
14611 CASE_FLT_FN (BUILT_IN_TANH):
14612 CASE_FLT_FN (BUILT_IN_TRUNC):
14613 /* True if the 1st argument is nonnegative. */
14614 return tree_expr_nonnegative_warnv_p (arg0,
14615 strict_overflow_p);
14617 CASE_FLT_FN (BUILT_IN_FMAX):
14618 /* True if the 1st OR 2nd arguments are nonnegative. */
14619 return (tree_expr_nonnegative_warnv_p (arg0,
14620 strict_overflow_p)
14621 || (tree_expr_nonnegative_warnv_p (arg1,
14622 strict_overflow_p)));
14624 CASE_FLT_FN (BUILT_IN_FMIN):
14625 /* True if the 1st AND 2nd arguments are nonnegative. */
14626 return (tree_expr_nonnegative_warnv_p (arg0,
14627 strict_overflow_p)
14628 && (tree_expr_nonnegative_warnv_p (arg1,
14629 strict_overflow_p)));
14631 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14632 /* True if the 2nd argument is nonnegative. */
14633 return tree_expr_nonnegative_warnv_p (arg1,
14634 strict_overflow_p);
14636 CASE_FLT_FN (BUILT_IN_POWI):
14637 /* True if the 1st argument is nonnegative or the second
14638 argument is an even integer. */
14639 if (TREE_CODE (arg1) == INTEGER_CST
14640 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14641 return true;
14642 return tree_expr_nonnegative_warnv_p (arg0,
14643 strict_overflow_p);
14645 CASE_FLT_FN (BUILT_IN_POW):
14646 /* True if the 1st argument is nonnegative or the second
14647 argument is an even integer valued real. */
14648 if (TREE_CODE (arg1) == REAL_CST)
14650 REAL_VALUE_TYPE c;
14651 HOST_WIDE_INT n;
14653 c = TREE_REAL_CST (arg1);
14654 n = real_to_integer (&c);
14655 if ((n & 1) == 0)
14657 REAL_VALUE_TYPE cint;
14658 real_from_integer (&cint, VOIDmode, n,
14659 n < 0 ? -1 : 0, 0);
14660 if (real_identical (&c, &cint))
14661 return true;
14664 return tree_expr_nonnegative_warnv_p (arg0,
14665 strict_overflow_p);
14667 default:
14668 break;
14670 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14671 type);
14674 /* Return true if T is known to be non-negative. If the return
14675 value is based on the assumption that signed overflow is undefined,
14676 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14677 *STRICT_OVERFLOW_P. */
14679 bool
14680 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14682 enum tree_code code = TREE_CODE (t);
14683 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14684 return true;
14686 switch (code)
14688 case TARGET_EXPR:
14690 tree temp = TARGET_EXPR_SLOT (t);
14691 t = TARGET_EXPR_INITIAL (t);
14693 /* If the initializer is non-void, then it's a normal expression
14694 that will be assigned to the slot. */
14695 if (!VOID_TYPE_P (t))
14696 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14698 /* Otherwise, the initializer sets the slot in some way. One common
14699 way is an assignment statement at the end of the initializer. */
14700 while (1)
14702 if (TREE_CODE (t) == BIND_EXPR)
14703 t = expr_last (BIND_EXPR_BODY (t));
14704 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14705 || TREE_CODE (t) == TRY_CATCH_EXPR)
14706 t = expr_last (TREE_OPERAND (t, 0));
14707 else if (TREE_CODE (t) == STATEMENT_LIST)
14708 t = expr_last (t);
14709 else
14710 break;
14712 if (TREE_CODE (t) == MODIFY_EXPR
14713 && TREE_OPERAND (t, 0) == temp)
14714 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14715 strict_overflow_p);
14717 return false;
14720 case CALL_EXPR:
14722 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14723 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14725 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14726 get_callee_fndecl (t),
14727 arg0,
14728 arg1,
14729 strict_overflow_p);
14731 case COMPOUND_EXPR:
14732 case MODIFY_EXPR:
14733 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14734 strict_overflow_p);
14735 case BIND_EXPR:
14736 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14737 strict_overflow_p);
14738 case SAVE_EXPR:
14739 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14740 strict_overflow_p);
14742 default:
14743 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14744 TREE_TYPE (t));
14747 /* We don't know sign of `t', so be conservative and return false. */
14748 return false;
14751 /* Return true if T is known to be non-negative. If the return
14752 value is based on the assumption that signed overflow is undefined,
14753 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14754 *STRICT_OVERFLOW_P. */
14756 bool
14757 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14759 enum tree_code code;
14760 if (t == error_mark_node)
14761 return false;
14763 code = TREE_CODE (t);
14764 switch (TREE_CODE_CLASS (code))
14766 case tcc_binary:
14767 case tcc_comparison:
14768 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14769 TREE_TYPE (t),
14770 TREE_OPERAND (t, 0),
14771 TREE_OPERAND (t, 1),
14772 strict_overflow_p);
14774 case tcc_unary:
14775 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14776 TREE_TYPE (t),
14777 TREE_OPERAND (t, 0),
14778 strict_overflow_p);
14780 case tcc_constant:
14781 case tcc_declaration:
14782 case tcc_reference:
14783 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14785 default:
14786 break;
14789 switch (code)
14791 case TRUTH_AND_EXPR:
14792 case TRUTH_OR_EXPR:
14793 case TRUTH_XOR_EXPR:
14794 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14795 TREE_TYPE (t),
14796 TREE_OPERAND (t, 0),
14797 TREE_OPERAND (t, 1),
14798 strict_overflow_p);
14799 case TRUTH_NOT_EXPR:
14800 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14801 TREE_TYPE (t),
14802 TREE_OPERAND (t, 0),
14803 strict_overflow_p);
14805 case COND_EXPR:
14806 case CONSTRUCTOR:
14807 case OBJ_TYPE_REF:
14808 case ASSERT_EXPR:
14809 case ADDR_EXPR:
14810 case WITH_SIZE_EXPR:
14811 case EXC_PTR_EXPR:
14812 case SSA_NAME:
14813 case FILTER_EXPR:
14814 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14816 default:
14817 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14821 /* Return true if `t' is known to be non-negative. Handle warnings
14822 about undefined signed overflow. */
14824 bool
14825 tree_expr_nonnegative_p (tree t)
14827 bool ret, strict_overflow_p;
14829 strict_overflow_p = false;
14830 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14831 if (strict_overflow_p)
14832 fold_overflow_warning (("assuming signed overflow does not occur when "
14833 "determining that expression is always "
14834 "non-negative"),
14835 WARN_STRICT_OVERFLOW_MISC);
14836 return ret;
14840 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14841 For floating point we further ensure that T is not denormal.
14842 Similar logic is present in nonzero_address in rtlanal.h.
14844 If the return value is based on the assumption that signed overflow
14845 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14846 change *STRICT_OVERFLOW_P. */
14848 bool
14849 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14850 bool *strict_overflow_p)
14852 switch (code)
14854 case ABS_EXPR:
14855 return tree_expr_nonzero_warnv_p (op0,
14856 strict_overflow_p);
14858 case NOP_EXPR:
14860 tree inner_type = TREE_TYPE (op0);
14861 tree outer_type = type;
14863 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14864 && tree_expr_nonzero_warnv_p (op0,
14865 strict_overflow_p));
14867 break;
14869 case NON_LVALUE_EXPR:
14870 return tree_expr_nonzero_warnv_p (op0,
14871 strict_overflow_p);
14873 default:
14874 break;
14877 return false;
14880 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14881 For floating point we further ensure that T is not denormal.
14882 Similar logic is present in nonzero_address in rtlanal.h.
14884 If the return value is based on the assumption that signed overflow
14885 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14886 change *STRICT_OVERFLOW_P. */
14888 bool
14889 tree_binary_nonzero_warnv_p (enum tree_code code,
14890 tree type,
14891 tree op0,
14892 tree op1, bool *strict_overflow_p)
14894 bool sub_strict_overflow_p;
14895 switch (code)
14897 case POINTER_PLUS_EXPR:
14898 case PLUS_EXPR:
14899 if (TYPE_OVERFLOW_UNDEFINED (type))
14901 /* With the presence of negative values it is hard
14902 to say something. */
14903 sub_strict_overflow_p = false;
14904 if (!tree_expr_nonnegative_warnv_p (op0,
14905 &sub_strict_overflow_p)
14906 || !tree_expr_nonnegative_warnv_p (op1,
14907 &sub_strict_overflow_p))
14908 return false;
14909 /* One of operands must be positive and the other non-negative. */
14910 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14911 overflows, on a twos-complement machine the sum of two
14912 nonnegative numbers can never be zero. */
14913 return (tree_expr_nonzero_warnv_p (op0,
14914 strict_overflow_p)
14915 || tree_expr_nonzero_warnv_p (op1,
14916 strict_overflow_p));
14918 break;
14920 case MULT_EXPR:
14921 if (TYPE_OVERFLOW_UNDEFINED (type))
14923 if (tree_expr_nonzero_warnv_p (op0,
14924 strict_overflow_p)
14925 && tree_expr_nonzero_warnv_p (op1,
14926 strict_overflow_p))
14928 *strict_overflow_p = true;
14929 return true;
14932 break;
14934 case MIN_EXPR:
14935 sub_strict_overflow_p = false;
14936 if (tree_expr_nonzero_warnv_p (op0,
14937 &sub_strict_overflow_p)
14938 && tree_expr_nonzero_warnv_p (op1,
14939 &sub_strict_overflow_p))
14941 if (sub_strict_overflow_p)
14942 *strict_overflow_p = true;
14944 break;
14946 case MAX_EXPR:
14947 sub_strict_overflow_p = false;
14948 if (tree_expr_nonzero_warnv_p (op0,
14949 &sub_strict_overflow_p))
14951 if (sub_strict_overflow_p)
14952 *strict_overflow_p = true;
14954 /* When both operands are nonzero, then MAX must be too. */
14955 if (tree_expr_nonzero_warnv_p (op1,
14956 strict_overflow_p))
14957 return true;
14959 /* MAX where operand 0 is positive is positive. */
14960 return tree_expr_nonnegative_warnv_p (op0,
14961 strict_overflow_p);
14963 /* MAX where operand 1 is positive is positive. */
14964 else if (tree_expr_nonzero_warnv_p (op1,
14965 &sub_strict_overflow_p)
14966 && tree_expr_nonnegative_warnv_p (op1,
14967 &sub_strict_overflow_p))
14969 if (sub_strict_overflow_p)
14970 *strict_overflow_p = true;
14971 return true;
14973 break;
14975 case BIT_IOR_EXPR:
14976 return (tree_expr_nonzero_warnv_p (op1,
14977 strict_overflow_p)
14978 || tree_expr_nonzero_warnv_p (op0,
14979 strict_overflow_p));
14981 default:
14982 break;
14985 return false;
14988 /* Return true when T is an address and is known to be nonzero.
14989 For floating point we further ensure that T is not denormal.
14990 Similar logic is present in nonzero_address in rtlanal.h.
14992 If the return value is based on the assumption that signed overflow
14993 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14994 change *STRICT_OVERFLOW_P. */
14996 bool
14997 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14999 bool sub_strict_overflow_p;
15000 switch (TREE_CODE (t))
15002 case INTEGER_CST:
15003 return !integer_zerop (t);
15005 case ADDR_EXPR:
15007 tree base = get_base_address (TREE_OPERAND (t, 0));
15009 if (!base)
15010 return false;
15012 /* Weak declarations may link to NULL. */
15013 if (VAR_OR_FUNCTION_DECL_P (base))
15014 return !DECL_WEAK (base);
15016 /* Constants are never weak. */
15017 if (CONSTANT_CLASS_P (base))
15018 return true;
15020 return false;
15023 case COND_EXPR:
15024 sub_strict_overflow_p = false;
15025 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15026 &sub_strict_overflow_p)
15027 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15028 &sub_strict_overflow_p))
15030 if (sub_strict_overflow_p)
15031 *strict_overflow_p = true;
15032 return true;
15034 break;
15036 default:
15037 break;
15039 return false;
15042 /* Return true when T is an address and is known to be nonzero.
15043 For floating point we further ensure that T is not denormal.
15044 Similar logic is present in nonzero_address in rtlanal.h.
15046 If the return value is based on the assumption that signed overflow
15047 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15048 change *STRICT_OVERFLOW_P. */
15050 bool
15051 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15053 tree type = TREE_TYPE (t);
15054 enum tree_code code;
15056 /* Doing something useful for floating point would need more work. */
15057 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15058 return false;
15060 code = TREE_CODE (t);
15061 switch (TREE_CODE_CLASS (code))
15063 case tcc_unary:
15064 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15065 strict_overflow_p);
15066 case tcc_binary:
15067 case tcc_comparison:
15068 return tree_binary_nonzero_warnv_p (code, type,
15069 TREE_OPERAND (t, 0),
15070 TREE_OPERAND (t, 1),
15071 strict_overflow_p);
15072 case tcc_constant:
15073 case tcc_declaration:
15074 case tcc_reference:
15075 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15077 default:
15078 break;
15081 switch (code)
15083 case TRUTH_NOT_EXPR:
15084 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15085 strict_overflow_p);
15087 case TRUTH_AND_EXPR:
15088 case TRUTH_OR_EXPR:
15089 case TRUTH_XOR_EXPR:
15090 return tree_binary_nonzero_warnv_p (code, type,
15091 TREE_OPERAND (t, 0),
15092 TREE_OPERAND (t, 1),
15093 strict_overflow_p);
15095 case COND_EXPR:
15096 case CONSTRUCTOR:
15097 case OBJ_TYPE_REF:
15098 case ASSERT_EXPR:
15099 case ADDR_EXPR:
15100 case WITH_SIZE_EXPR:
15101 case EXC_PTR_EXPR:
15102 case SSA_NAME:
15103 case FILTER_EXPR:
15104 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15106 case COMPOUND_EXPR:
15107 case MODIFY_EXPR:
15108 case BIND_EXPR:
15109 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15110 strict_overflow_p);
15112 case SAVE_EXPR:
15113 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15114 strict_overflow_p);
15116 case CALL_EXPR:
15117 return alloca_call_p (t);
15119 default:
15120 break;
15122 return false;
15125 /* Return true when T is an address and is known to be nonzero.
15126 Handle warnings about undefined signed overflow. */
15128 bool
15129 tree_expr_nonzero_p (tree t)
15131 bool ret, strict_overflow_p;
15133 strict_overflow_p = false;
15134 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15135 if (strict_overflow_p)
15136 fold_overflow_warning (("assuming signed overflow does not occur when "
15137 "determining that expression is always "
15138 "non-zero"),
15139 WARN_STRICT_OVERFLOW_MISC);
15140 return ret;
15143 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15144 attempt to fold the expression to a constant without modifying TYPE,
15145 OP0 or OP1.
15147 If the expression could be simplified to a constant, then return
15148 the constant. If the expression would not be simplified to a
15149 constant, then return NULL_TREE. */
15151 tree
15152 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15154 tree tem = fold_binary (code, type, op0, op1);
15155 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15158 /* Given the components of a unary expression CODE, TYPE and OP0,
15159 attempt to fold the expression to a constant without modifying
15160 TYPE or OP0.
15162 If the expression could be simplified to a constant, then return
15163 the constant. If the expression would not be simplified to a
15164 constant, then return NULL_TREE. */
15166 tree
15167 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15169 tree tem = fold_unary (code, type, op0);
15170 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15173 /* If EXP represents referencing an element in a constant string
15174 (either via pointer arithmetic or array indexing), return the
15175 tree representing the value accessed, otherwise return NULL. */
15177 tree
15178 fold_read_from_constant_string (tree exp)
15180 if ((TREE_CODE (exp) == INDIRECT_REF
15181 || TREE_CODE (exp) == ARRAY_REF)
15182 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15184 tree exp1 = TREE_OPERAND (exp, 0);
15185 tree index;
15186 tree string;
15188 if (TREE_CODE (exp) == INDIRECT_REF)
15189 string = string_constant (exp1, &index);
15190 else
15192 tree low_bound = array_ref_low_bound (exp);
15193 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15195 /* Optimize the special-case of a zero lower bound.
15197 We convert the low_bound to sizetype to avoid some problems
15198 with constant folding. (E.g. suppose the lower bound is 1,
15199 and its mode is QI. Without the conversion,l (ARRAY
15200 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15201 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15202 if (! integer_zerop (low_bound))
15203 index = size_diffop (index, fold_convert (sizetype, low_bound));
15205 string = exp1;
15208 if (string
15209 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15210 && TREE_CODE (string) == STRING_CST
15211 && TREE_CODE (index) == INTEGER_CST
15212 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15213 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15214 == MODE_INT)
15215 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15216 return build_int_cst_type (TREE_TYPE (exp),
15217 (TREE_STRING_POINTER (string)
15218 [TREE_INT_CST_LOW (index)]));
15220 return NULL;
15223 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15224 an integer constant, real, or fixed-point constant.
15226 TYPE is the type of the result. */
15228 static tree
15229 fold_negate_const (tree arg0, tree type)
15231 tree t = NULL_TREE;
15233 switch (TREE_CODE (arg0))
15235 case INTEGER_CST:
15237 unsigned HOST_WIDE_INT low;
15238 HOST_WIDE_INT high;
15239 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15240 TREE_INT_CST_HIGH (arg0),
15241 &low, &high);
15242 t = force_fit_type_double (type, low, high, 1,
15243 (overflow | TREE_OVERFLOW (arg0))
15244 && !TYPE_UNSIGNED (type));
15245 break;
15248 case REAL_CST:
15249 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15250 break;
15252 case FIXED_CST:
15254 FIXED_VALUE_TYPE f;
15255 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15256 &(TREE_FIXED_CST (arg0)), NULL,
15257 TYPE_SATURATING (type));
15258 t = build_fixed (type, f);
15259 /* Propagate overflow flags. */
15260 if (overflow_p | TREE_OVERFLOW (arg0))
15262 TREE_OVERFLOW (t) = 1;
15263 TREE_CONSTANT_OVERFLOW (t) = 1;
15265 else if (TREE_CONSTANT_OVERFLOW (arg0))
15266 TREE_CONSTANT_OVERFLOW (t) = 1;
15267 break;
15270 default:
15271 gcc_unreachable ();
15274 return t;
15277 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15278 an integer constant or real constant.
15280 TYPE is the type of the result. */
15282 tree
15283 fold_abs_const (tree arg0, tree type)
15285 tree t = NULL_TREE;
15287 switch (TREE_CODE (arg0))
15289 case INTEGER_CST:
15290 /* If the value is unsigned, then the absolute value is
15291 the same as the ordinary value. */
15292 if (TYPE_UNSIGNED (type))
15293 t = arg0;
15294 /* Similarly, if the value is non-negative. */
15295 else if (INT_CST_LT (integer_minus_one_node, arg0))
15296 t = arg0;
15297 /* If the value is negative, then the absolute value is
15298 its negation. */
15299 else
15301 unsigned HOST_WIDE_INT low;
15302 HOST_WIDE_INT high;
15303 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15304 TREE_INT_CST_HIGH (arg0),
15305 &low, &high);
15306 t = force_fit_type_double (type, low, high, -1,
15307 overflow | TREE_OVERFLOW (arg0));
15309 break;
15311 case REAL_CST:
15312 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15313 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15314 else
15315 t = arg0;
15316 break;
15318 default:
15319 gcc_unreachable ();
15322 return t;
15325 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15326 constant. TYPE is the type of the result. */
15328 static tree
15329 fold_not_const (tree arg0, tree type)
15331 tree t = NULL_TREE;
15333 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15335 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15336 ~TREE_INT_CST_HIGH (arg0), 0,
15337 TREE_OVERFLOW (arg0));
15339 return t;
15342 /* Given CODE, a relational operator, the target type, TYPE and two
15343 constant operands OP0 and OP1, return the result of the
15344 relational operation. If the result is not a compile time
15345 constant, then return NULL_TREE. */
15347 static tree
15348 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15350 int result, invert;
15352 /* From here on, the only cases we handle are when the result is
15353 known to be a constant. */
15355 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15357 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15358 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15360 /* Handle the cases where either operand is a NaN. */
15361 if (real_isnan (c0) || real_isnan (c1))
15363 switch (code)
15365 case EQ_EXPR:
15366 case ORDERED_EXPR:
15367 result = 0;
15368 break;
15370 case NE_EXPR:
15371 case UNORDERED_EXPR:
15372 case UNLT_EXPR:
15373 case UNLE_EXPR:
15374 case UNGT_EXPR:
15375 case UNGE_EXPR:
15376 case UNEQ_EXPR:
15377 result = 1;
15378 break;
15380 case LT_EXPR:
15381 case LE_EXPR:
15382 case GT_EXPR:
15383 case GE_EXPR:
15384 case LTGT_EXPR:
15385 if (flag_trapping_math)
15386 return NULL_TREE;
15387 result = 0;
15388 break;
15390 default:
15391 gcc_unreachable ();
15394 return constant_boolean_node (result, type);
15397 return constant_boolean_node (real_compare (code, c0, c1), type);
15400 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15402 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15403 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15404 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15407 /* Handle equality/inequality of complex constants. */
15408 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15410 tree rcond = fold_relational_const (code, type,
15411 TREE_REALPART (op0),
15412 TREE_REALPART (op1));
15413 tree icond = fold_relational_const (code, type,
15414 TREE_IMAGPART (op0),
15415 TREE_IMAGPART (op1));
15416 if (code == EQ_EXPR)
15417 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15418 else if (code == NE_EXPR)
15419 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15420 else
15421 return NULL_TREE;
15424 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15426 To compute GT, swap the arguments and do LT.
15427 To compute GE, do LT and invert the result.
15428 To compute LE, swap the arguments, do LT and invert the result.
15429 To compute NE, do EQ and invert the result.
15431 Therefore, the code below must handle only EQ and LT. */
15433 if (code == LE_EXPR || code == GT_EXPR)
15435 tree tem = op0;
15436 op0 = op1;
15437 op1 = tem;
15438 code = swap_tree_comparison (code);
15441 /* Note that it is safe to invert for real values here because we
15442 have already handled the one case that it matters. */
15444 invert = 0;
15445 if (code == NE_EXPR || code == GE_EXPR)
15447 invert = 1;
15448 code = invert_tree_comparison (code, false);
15451 /* Compute a result for LT or EQ if args permit;
15452 Otherwise return T. */
15453 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15455 if (code == EQ_EXPR)
15456 result = tree_int_cst_equal (op0, op1);
15457 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15458 result = INT_CST_LT_UNSIGNED (op0, op1);
15459 else
15460 result = INT_CST_LT (op0, op1);
15462 else
15463 return NULL_TREE;
15465 if (invert)
15466 result ^= 1;
15467 return constant_boolean_node (result, type);
15470 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15471 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15472 itself. */
15474 tree
15475 fold_build_cleanup_point_expr (tree type, tree expr)
15477 /* If the expression does not have side effects then we don't have to wrap
15478 it with a cleanup point expression. */
15479 if (!TREE_SIDE_EFFECTS (expr))
15480 return expr;
15482 /* If the expression is a return, check to see if the expression inside the
15483 return has no side effects or the right hand side of the modify expression
15484 inside the return. If either don't have side effects set we don't need to
15485 wrap the expression in a cleanup point expression. Note we don't check the
15486 left hand side of the modify because it should always be a return decl. */
15487 if (TREE_CODE (expr) == RETURN_EXPR)
15489 tree op = TREE_OPERAND (expr, 0);
15490 if (!op || !TREE_SIDE_EFFECTS (op))
15491 return expr;
15492 op = TREE_OPERAND (op, 1);
15493 if (!TREE_SIDE_EFFECTS (op))
15494 return expr;
15497 return build1 (CLEANUP_POINT_EXPR, type, expr);
15500 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15501 of an indirection through OP0, or NULL_TREE if no simplification is
15502 possible. */
15504 tree
15505 fold_indirect_ref_1 (tree type, tree op0)
15507 tree sub = op0;
15508 tree subtype;
15510 STRIP_NOPS (sub);
15511 subtype = TREE_TYPE (sub);
15512 if (!POINTER_TYPE_P (subtype))
15513 return NULL_TREE;
15515 if (TREE_CODE (sub) == ADDR_EXPR)
15517 tree op = TREE_OPERAND (sub, 0);
15518 tree optype = TREE_TYPE (op);
15519 /* *&CONST_DECL -> to the value of the const decl. */
15520 if (TREE_CODE (op) == CONST_DECL)
15521 return DECL_INITIAL (op);
15522 /* *&p => p; make sure to handle *&"str"[cst] here. */
15523 if (type == optype)
15525 tree fop = fold_read_from_constant_string (op);
15526 if (fop)
15527 return fop;
15528 else
15529 return op;
15531 /* *(foo *)&fooarray => fooarray[0] */
15532 else if (TREE_CODE (optype) == ARRAY_TYPE
15533 && type == TREE_TYPE (optype))
15535 tree type_domain = TYPE_DOMAIN (optype);
15536 tree min_val = size_zero_node;
15537 if (type_domain && TYPE_MIN_VALUE (type_domain))
15538 min_val = TYPE_MIN_VALUE (type_domain);
15539 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15541 /* *(foo *)&complexfoo => __real__ complexfoo */
15542 else if (TREE_CODE (optype) == COMPLEX_TYPE
15543 && type == TREE_TYPE (optype))
15544 return fold_build1 (REALPART_EXPR, type, op);
15545 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15546 else if (TREE_CODE (optype) == VECTOR_TYPE
15547 && type == TREE_TYPE (optype))
15549 tree part_width = TYPE_SIZE (type);
15550 tree index = bitsize_int (0);
15551 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15555 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15556 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15557 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15559 tree op00 = TREE_OPERAND (sub, 0);
15560 tree op01 = TREE_OPERAND (sub, 1);
15561 tree op00type;
15563 STRIP_NOPS (op00);
15564 op00type = TREE_TYPE (op00);
15565 if (TREE_CODE (op00) == ADDR_EXPR
15566 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15567 && type == TREE_TYPE (TREE_TYPE (op00type)))
15569 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15570 tree part_width = TYPE_SIZE (type);
15571 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15572 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15573 tree index = bitsize_int (indexi);
15575 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15576 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15577 part_width, index);
15583 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15584 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15585 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15587 tree op00 = TREE_OPERAND (sub, 0);
15588 tree op01 = TREE_OPERAND (sub, 1);
15589 tree op00type;
15591 STRIP_NOPS (op00);
15592 op00type = TREE_TYPE (op00);
15593 if (TREE_CODE (op00) == ADDR_EXPR
15594 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15595 && type == TREE_TYPE (TREE_TYPE (op00type)))
15597 tree size = TYPE_SIZE_UNIT (type);
15598 if (tree_int_cst_equal (size, op01))
15599 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15603 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15604 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15605 && type == TREE_TYPE (TREE_TYPE (subtype)))
15607 tree type_domain;
15608 tree min_val = size_zero_node;
15609 sub = build_fold_indirect_ref (sub);
15610 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15611 if (type_domain && TYPE_MIN_VALUE (type_domain))
15612 min_val = TYPE_MIN_VALUE (type_domain);
15613 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15616 return NULL_TREE;
15619 /* Builds an expression for an indirection through T, simplifying some
15620 cases. */
15622 tree
15623 build_fold_indirect_ref (tree t)
15625 tree type = TREE_TYPE (TREE_TYPE (t));
15626 tree sub = fold_indirect_ref_1 (type, t);
15628 if (sub)
15629 return sub;
15630 else
15631 return build1 (INDIRECT_REF, type, t);
15634 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15636 tree
15637 fold_indirect_ref (tree t)
15639 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15641 if (sub)
15642 return sub;
15643 else
15644 return t;
15647 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15648 whose result is ignored. The type of the returned tree need not be
15649 the same as the original expression. */
15651 tree
15652 fold_ignored_result (tree t)
15654 if (!TREE_SIDE_EFFECTS (t))
15655 return integer_zero_node;
15657 for (;;)
15658 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15660 case tcc_unary:
15661 t = TREE_OPERAND (t, 0);
15662 break;
15664 case tcc_binary:
15665 case tcc_comparison:
15666 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15667 t = TREE_OPERAND (t, 0);
15668 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15669 t = TREE_OPERAND (t, 1);
15670 else
15671 return t;
15672 break;
15674 case tcc_expression:
15675 switch (TREE_CODE (t))
15677 case COMPOUND_EXPR:
15678 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15679 return t;
15680 t = TREE_OPERAND (t, 0);
15681 break;
15683 case COND_EXPR:
15684 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15685 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15686 return t;
15687 t = TREE_OPERAND (t, 0);
15688 break;
15690 default:
15691 return t;
15693 break;
15695 default:
15696 return t;
15700 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15701 This can only be applied to objects of a sizetype. */
15703 tree
15704 round_up (tree value, int divisor)
15706 tree div = NULL_TREE;
15708 gcc_assert (divisor > 0);
15709 if (divisor == 1)
15710 return value;
15712 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15713 have to do anything. Only do this when we are not given a const,
15714 because in that case, this check is more expensive than just
15715 doing it. */
15716 if (TREE_CODE (value) != INTEGER_CST)
15718 div = build_int_cst (TREE_TYPE (value), divisor);
15720 if (multiple_of_p (TREE_TYPE (value), value, div))
15721 return value;
15724 /* If divisor is a power of two, simplify this to bit manipulation. */
15725 if (divisor == (divisor & -divisor))
15727 if (TREE_CODE (value) == INTEGER_CST)
15729 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15730 unsigned HOST_WIDE_INT high;
15731 bool overflow_p;
15733 if ((low & (divisor - 1)) == 0)
15734 return value;
15736 overflow_p = TREE_OVERFLOW (value);
15737 high = TREE_INT_CST_HIGH (value);
15738 low &= ~(divisor - 1);
15739 low += divisor;
15740 if (low == 0)
15742 high++;
15743 if (high == 0)
15744 overflow_p = true;
15747 return force_fit_type_double (TREE_TYPE (value), low, high,
15748 -1, overflow_p);
15750 else
15752 tree t;
15754 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15755 value = size_binop (PLUS_EXPR, value, t);
15756 t = build_int_cst (TREE_TYPE (value), -divisor);
15757 value = size_binop (BIT_AND_EXPR, value, t);
15760 else
15762 if (!div)
15763 div = build_int_cst (TREE_TYPE (value), divisor);
15764 value = size_binop (CEIL_DIV_EXPR, value, div);
15765 value = size_binop (MULT_EXPR, value, div);
15768 return value;
15771 /* Likewise, but round down. */
15773 tree
15774 round_down (tree value, int divisor)
15776 tree div = NULL_TREE;
15778 gcc_assert (divisor > 0);
15779 if (divisor == 1)
15780 return value;
15782 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15783 have to do anything. Only do this when we are not given a const,
15784 because in that case, this check is more expensive than just
15785 doing it. */
15786 if (TREE_CODE (value) != INTEGER_CST)
15788 div = build_int_cst (TREE_TYPE (value), divisor);
15790 if (multiple_of_p (TREE_TYPE (value), value, div))
15791 return value;
15794 /* If divisor is a power of two, simplify this to bit manipulation. */
15795 if (divisor == (divisor & -divisor))
15797 tree t;
15799 t = build_int_cst (TREE_TYPE (value), -divisor);
15800 value = size_binop (BIT_AND_EXPR, value, t);
15802 else
15804 if (!div)
15805 div = build_int_cst (TREE_TYPE (value), divisor);
15806 value = size_binop (FLOOR_DIV_EXPR, value, div);
15807 value = size_binop (MULT_EXPR, value, div);
15810 return value;
15813 /* Returns the pointer to the base of the object addressed by EXP and
15814 extracts the information about the offset of the access, storing it
15815 to PBITPOS and POFFSET. */
15817 static tree
15818 split_address_to_core_and_offset (tree exp,
15819 HOST_WIDE_INT *pbitpos, tree *poffset)
15821 tree core;
15822 enum machine_mode mode;
15823 int unsignedp, volatilep;
15824 HOST_WIDE_INT bitsize;
15826 if (TREE_CODE (exp) == ADDR_EXPR)
15828 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15829 poffset, &mode, &unsignedp, &volatilep,
15830 false);
15831 core = fold_addr_expr (core);
15833 else
15835 core = exp;
15836 *pbitpos = 0;
15837 *poffset = NULL_TREE;
15840 return core;
15843 /* Returns true if addresses of E1 and E2 differ by a constant, false
15844 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15846 bool
15847 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15849 tree core1, core2;
15850 HOST_WIDE_INT bitpos1, bitpos2;
15851 tree toffset1, toffset2, tdiff, type;
15853 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15854 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15856 if (bitpos1 % BITS_PER_UNIT != 0
15857 || bitpos2 % BITS_PER_UNIT != 0
15858 || !operand_equal_p (core1, core2, 0))
15859 return false;
15861 if (toffset1 && toffset2)
15863 type = TREE_TYPE (toffset1);
15864 if (type != TREE_TYPE (toffset2))
15865 toffset2 = fold_convert (type, toffset2);
15867 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15868 if (!cst_and_fits_in_hwi (tdiff))
15869 return false;
15871 *diff = int_cst_value (tdiff);
15873 else if (toffset1 || toffset2)
15875 /* If only one of the offsets is non-constant, the difference cannot
15876 be a constant. */
15877 return false;
15879 else
15880 *diff = 0;
15882 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15883 return true;
15886 /* Simplify the floating point expression EXP when the sign of the
15887 result is not significant. Return NULL_TREE if no simplification
15888 is possible. */
15890 tree
15891 fold_strip_sign_ops (tree exp)
15893 tree arg0, arg1;
15895 switch (TREE_CODE (exp))
15897 case ABS_EXPR:
15898 case NEGATE_EXPR:
15899 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15900 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15902 case MULT_EXPR:
15903 case RDIV_EXPR:
15904 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15905 return NULL_TREE;
15906 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15907 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15908 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15909 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15910 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15911 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15912 break;
15914 case COMPOUND_EXPR:
15915 arg0 = TREE_OPERAND (exp, 0);
15916 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15917 if (arg1)
15918 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15919 break;
15921 case COND_EXPR:
15922 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15923 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15924 if (arg0 || arg1)
15925 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15926 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15927 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15928 break;
15930 case CALL_EXPR:
15932 const enum built_in_function fcode = builtin_mathfn_code (exp);
15933 switch (fcode)
15935 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15936 /* Strip copysign function call, return the 1st argument. */
15937 arg0 = CALL_EXPR_ARG (exp, 0);
15938 arg1 = CALL_EXPR_ARG (exp, 1);
15939 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15941 default:
15942 /* Strip sign ops from the argument of "odd" math functions. */
15943 if (negate_mathfn_p (fcode))
15945 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15946 if (arg0)
15947 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15949 break;
15952 break;
15954 default:
15955 break;
15957 return NULL_TREE;