ra-conflict.c: New file.
[official-gcc.git] / gcc / fold-const.c
blob62cbfd11ee04714e5356730f6d03660ed8caf2b5
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (const_tree, int);
118 static tree sign_bit_p (tree, const_tree);
119 static int simple_operand_p (const_tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (const_tree, const_tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
151 addition.
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 sign. */
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 #define LOWPART(x) \
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
173 static void
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 static void
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 HOST_WIDE_INT *hi)
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
205 unsigned int prec;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = POINTER_SIZE;
211 else
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 else
226 h1 = 0;
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
246 h1 = -1;
248 else
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 h1 = -1;
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 *lv = l1;
259 *hv = h1;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
280 tree
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
283 bool overflowed)
285 int sign_extended_type;
286 bool overflow;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
298 if (overflowed
299 || overflowable < 0
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
307 return t;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 bool unsigned_p)
327 unsigned HOST_WIDE_INT l;
328 HOST_WIDE_INT h;
330 l = l1 + l2;
331 h = h1 + h2 + (l < l1);
333 *lv = l;
334 *hv = h;
336 if (unsigned_p)
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 else
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
351 if (l1 == 0)
353 *lv = 0;
354 *hv = - h1;
355 return (*hv & h1) < 0;
357 else
359 *lv = -l1;
360 *hv = ~h1;
361 return 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 bool unsigned_p)
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
381 int i, j, k;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
388 memset (prod, 0, sizeof prod);
390 for (i = 0; i < 4; i++)
392 carry = 0;
393 for (j = 0; j < 4; j++)
395 k = i + j;
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 carry += prod[k];
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
403 prod[i + 4] = carry;
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
409 /* Unsigned overflow is immediate. */
410 if (unsigned_p)
411 return (toplow | tophigh) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
415 if (h1 < 0)
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 if (h2 < 0)
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 void
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
439 unsigned HOST_WIDE_INT signmask;
441 if (count < 0)
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 return;
447 if (SHIFT_COUNT_TRUNCATED)
448 count %= prec;
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
454 *hv = 0;
455 *lv = 0;
457 else if (count >= HOST_BITS_PER_WIDE_INT)
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
460 *lv = 0;
462 else
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 *lv = l1 << count;
469 /* Sign extend all bits that are beyond the precision. */
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
483 else
485 *hv = signmask;
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 void
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 int arith)
502 unsigned HOST_WIDE_INT signmask;
504 signmask = (arith
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 : 0);
508 if (SHIFT_COUNT_TRUNCATED)
509 count %= prec;
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
515 *hv = 0;
516 *lv = 0;
518 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *hv = 0;
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
523 else
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 *lv = ((l1 >> count)
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count >= (HOST_WIDE_INT)prec)
534 *hv = signmask;
535 *lv = signmask;
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
544 else
546 *hv = signmask;
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 void
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
565 count %= prec;
566 if (count < 0)
567 count += prec;
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
571 *lv = s1l | s2l;
572 *hv = s1h | s2h;
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 void
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
587 count %= prec;
588 if (count < 0)
589 count += prec;
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
593 *lv = s1l | s2l;
594 *hv = s1h | s2h;
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 or EXACT_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT *hrem)
616 int quo_neg = 0;
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
619 int i, j;
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
626 int overflow = 0;
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
632 if (!uns)
634 if (hnum < 0)
636 quo_neg = ~ quo_neg;
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
640 overflow = 1;
642 if (hden < 0)
644 quo_neg = ~ quo_neg;
645 neg_double (lden, hden, &lden, &hden);
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
651 *hquo = *hrem = 0;
652 /* This unsigned division rounds toward zero. */
653 *lquo = lnum / lden;
654 goto finish_up;
657 if (hnum == 0)
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
660 *hquo = *lquo = 0;
661 *hrem = hnum;
662 *lrem = lnum;
663 goto finish_up;
666 memset (quo, 0, sizeof quo);
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
682 carry = work % lden;
685 else
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
694 if (den[i] != 0)
696 den_hi_sig = i;
697 break;
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale = BASE / (den[den_hi_sig] + 1);
704 if (scale > 1)
705 { /* scale divisor and dividend */
706 carry = 0;
707 for (i = 0; i <= 4 - 1; i++)
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
714 num[4] = carry;
715 carry = 0;
716 for (i = 0; i <= 4 - 1; i++)
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
725 num_hi_sig = 4;
727 /* Main loop */
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
739 else
740 quo_est = BASE - 1;
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
744 if (tmp < BASE
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
747 quo_est--;
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
753 carry = 0;
754 for (j = 0; j <= den_hi_sig; j++)
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 quo_est--;
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
776 num [num_hi_sig] += carry;
779 /* Store the quotient digit. */
780 quo[i] = quo_est;
784 decode (quo, lquo, hquo);
786 finish_up:
787 /* If result is negative, make it so. */
788 if (quo_neg)
789 neg_double (*lquo, *hquo, lquo, hquo);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
796 switch (code)
798 case TRUNC_DIV_EXPR:
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 return overflow;
803 case FLOOR_DIV_EXPR:
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 /* quo = quo - 1; */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
809 lquo, hquo);
811 else
812 return overflow;
813 break;
815 case CEIL_DIV_EXPR:
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
824 break;
826 case ROUND_DIV_EXPR:
827 case ROUND_MOD_EXPR: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
834 /* Get absolute values. */
835 if (*hrem < 0)
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 if (hden < 0)
838 neg_double (lden, hden, &labs_den, &habs_den);
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, &ltwice, &htwice);
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den < ltwice)))
850 if (*hquo < 0)
851 /* quo = quo - 1; */
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 else
855 /* quo = quo + 1; */
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
857 lquo, hquo);
859 else
860 return overflow;
862 break;
864 default:
865 gcc_unreachable ();
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 return overflow;
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
879 static tree
880 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
895 uns = false;
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
898 type);
900 else
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
908 return NULL_TREE;
910 return build_int_cst_wide (type, quol, quoh);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
920 used. */
922 static int fold_deferring_overflow_warnings;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
939 void
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
952 deferred code. */
954 void
955 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
957 const char *warnmsg;
958 location_t locus;
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
964 if (fold_deferred_overflow_warning != NULL
965 && code != 0
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
968 return;
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
974 if (!issue || warnmsg == NULL)
975 return;
977 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
978 return;
980 /* Use the smallest code level when deciding to issue the
981 warning. */
982 if (code == 0 || code > (int) fold_deferred_overflow_code)
983 code = fold_deferred_overflow_code;
985 if (!issue_strict_overflow_warning (code))
986 return;
988 if (stmt == NULL_TREE || !expr_has_location (stmt))
989 locus = input_location;
990 else
991 locus = expr_location (stmt);
992 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
995 /* Stop deferring overflow warnings, ignoring any deferred
996 warnings. */
998 void
999 fold_undefer_and_ignore_overflow_warnings (void)
1001 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1004 /* Whether we are deferring overflow warnings. */
1006 bool
1007 fold_deferring_overflow_warnings_p (void)
1009 return fold_deferring_overflow_warnings > 0;
1012 /* This is called when we fold something based on the fact that signed
1013 overflow is undefined. */
1015 static void
1016 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1018 gcc_assert (!flag_wrapv && !flag_trapv);
1019 if (fold_deferring_overflow_warnings > 0)
1021 if (fold_deferred_overflow_warning == NULL
1022 || wc < fold_deferred_overflow_code)
1024 fold_deferred_overflow_warning = gmsgid;
1025 fold_deferred_overflow_code = wc;
1028 else if (issue_strict_overflow_warning (wc))
1029 warning (OPT_Wstrict_overflow, gmsgid);
1032 /* Return true if the built-in mathematical function specified by CODE
1033 is odd, i.e. -f(x) == f(-x). */
1035 static bool
1036 negate_mathfn_p (enum built_in_function code)
1038 switch (code)
1040 CASE_FLT_FN (BUILT_IN_ASIN):
1041 CASE_FLT_FN (BUILT_IN_ASINH):
1042 CASE_FLT_FN (BUILT_IN_ATAN):
1043 CASE_FLT_FN (BUILT_IN_ATANH):
1044 CASE_FLT_FN (BUILT_IN_CASIN):
1045 CASE_FLT_FN (BUILT_IN_CASINH):
1046 CASE_FLT_FN (BUILT_IN_CATAN):
1047 CASE_FLT_FN (BUILT_IN_CATANH):
1048 CASE_FLT_FN (BUILT_IN_CBRT):
1049 CASE_FLT_FN (BUILT_IN_CPROJ):
1050 CASE_FLT_FN (BUILT_IN_CSIN):
1051 CASE_FLT_FN (BUILT_IN_CSINH):
1052 CASE_FLT_FN (BUILT_IN_CTAN):
1053 CASE_FLT_FN (BUILT_IN_CTANH):
1054 CASE_FLT_FN (BUILT_IN_ERF):
1055 CASE_FLT_FN (BUILT_IN_LLROUND):
1056 CASE_FLT_FN (BUILT_IN_LROUND):
1057 CASE_FLT_FN (BUILT_IN_ROUND):
1058 CASE_FLT_FN (BUILT_IN_SIN):
1059 CASE_FLT_FN (BUILT_IN_SINH):
1060 CASE_FLT_FN (BUILT_IN_TAN):
1061 CASE_FLT_FN (BUILT_IN_TANH):
1062 CASE_FLT_FN (BUILT_IN_TRUNC):
1063 return true;
1065 CASE_FLT_FN (BUILT_IN_LLRINT):
1066 CASE_FLT_FN (BUILT_IN_LRINT):
1067 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1068 CASE_FLT_FN (BUILT_IN_RINT):
1069 return !flag_rounding_math;
1071 default:
1072 break;
1074 return false;
1077 /* Check whether we may negate an integer constant T without causing
1078 overflow. */
1080 bool
1081 may_negate_without_overflow_p (const_tree t)
1083 unsigned HOST_WIDE_INT val;
1084 unsigned int prec;
1085 tree type;
1087 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1089 type = TREE_TYPE (t);
1090 if (TYPE_UNSIGNED (type))
1091 return false;
1093 prec = TYPE_PRECISION (type);
1094 if (prec > HOST_BITS_PER_WIDE_INT)
1096 if (TREE_INT_CST_LOW (t) != 0)
1097 return true;
1098 prec -= HOST_BITS_PER_WIDE_INT;
1099 val = TREE_INT_CST_HIGH (t);
1101 else
1102 val = TREE_INT_CST_LOW (t);
1103 if (prec < HOST_BITS_PER_WIDE_INT)
1104 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1105 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1108 /* Determine whether an expression T can be cheaply negated using
1109 the function negate_expr without introducing undefined overflow. */
1111 static bool
1112 negate_expr_p (tree t)
1114 tree type;
1116 if (t == 0)
1117 return false;
1119 type = TREE_TYPE (t);
1121 STRIP_SIGN_NOPS (t);
1122 switch (TREE_CODE (t))
1124 case INTEGER_CST:
1125 if (TYPE_OVERFLOW_WRAPS (type))
1126 return true;
1128 /* Check that -CST will not overflow type. */
1129 return may_negate_without_overflow_p (t);
1130 case BIT_NOT_EXPR:
1131 return (INTEGRAL_TYPE_P (type)
1132 && TYPE_OVERFLOW_WRAPS (type));
1134 case FIXED_CST:
1135 case REAL_CST:
1136 case NEGATE_EXPR:
1137 return true;
1139 case COMPLEX_CST:
1140 return negate_expr_p (TREE_REALPART (t))
1141 && negate_expr_p (TREE_IMAGPART (t));
1143 case COMPLEX_EXPR:
1144 return negate_expr_p (TREE_OPERAND (t, 0))
1145 && negate_expr_p (TREE_OPERAND (t, 1));
1147 case CONJ_EXPR:
1148 return negate_expr_p (TREE_OPERAND (t, 0));
1150 case PLUS_EXPR:
1151 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1152 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1153 return false;
1154 /* -(A + B) -> (-B) - A. */
1155 if (negate_expr_p (TREE_OPERAND (t, 1))
1156 && reorder_operands_p (TREE_OPERAND (t, 0),
1157 TREE_OPERAND (t, 1)))
1158 return true;
1159 /* -(A + B) -> (-A) - B. */
1160 return negate_expr_p (TREE_OPERAND (t, 0));
1162 case MINUS_EXPR:
1163 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1164 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1165 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1166 && reorder_operands_p (TREE_OPERAND (t, 0),
1167 TREE_OPERAND (t, 1));
1169 case MULT_EXPR:
1170 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1171 break;
1173 /* Fall through. */
1175 case RDIV_EXPR:
1176 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1177 return negate_expr_p (TREE_OPERAND (t, 1))
1178 || negate_expr_p (TREE_OPERAND (t, 0));
1179 break;
1181 case TRUNC_DIV_EXPR:
1182 case ROUND_DIV_EXPR:
1183 case FLOOR_DIV_EXPR:
1184 case CEIL_DIV_EXPR:
1185 case EXACT_DIV_EXPR:
1186 /* In general we can't negate A / B, because if A is INT_MIN and
1187 B is 1, we may turn this into INT_MIN / -1 which is undefined
1188 and actually traps on some architectures. But if overflow is
1189 undefined, we can negate, because - (INT_MIN / 1) is an
1190 overflow. */
1191 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1192 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1193 break;
1194 return negate_expr_p (TREE_OPERAND (t, 1))
1195 || negate_expr_p (TREE_OPERAND (t, 0));
1197 case NOP_EXPR:
1198 /* Negate -((double)float) as (double)(-float). */
1199 if (TREE_CODE (type) == REAL_TYPE)
1201 tree tem = strip_float_extensions (t);
1202 if (tem != t)
1203 return negate_expr_p (tem);
1205 break;
1207 case CALL_EXPR:
1208 /* Negate -f(x) as f(-x). */
1209 if (negate_mathfn_p (builtin_mathfn_code (t)))
1210 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1211 break;
1213 case RSHIFT_EXPR:
1214 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1215 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1217 tree op1 = TREE_OPERAND (t, 1);
1218 if (TREE_INT_CST_HIGH (op1) == 0
1219 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1220 == TREE_INT_CST_LOW (op1))
1221 return true;
1223 break;
1225 default:
1226 break;
1228 return false;
1231 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1232 simplification is possible.
1233 If negate_expr_p would return true for T, NULL_TREE will never be
1234 returned. */
1236 static tree
1237 fold_negate_expr (tree t)
1239 tree type = TREE_TYPE (t);
1240 tree tem;
1242 switch (TREE_CODE (t))
1244 /* Convert - (~A) to A + 1. */
1245 case BIT_NOT_EXPR:
1246 if (INTEGRAL_TYPE_P (type))
1247 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1248 build_int_cst (type, 1));
1249 break;
1251 case INTEGER_CST:
1252 tem = fold_negate_const (t, type);
1253 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1254 || !TYPE_OVERFLOW_TRAPS (type))
1255 return tem;
1256 break;
1258 case REAL_CST:
1259 tem = fold_negate_const (t, type);
1260 /* Two's complement FP formats, such as c4x, may overflow. */
1261 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1262 return tem;
1263 break;
1265 case FIXED_CST:
1266 tem = fold_negate_const (t, type);
1267 return tem;
1269 case COMPLEX_CST:
1271 tree rpart = negate_expr (TREE_REALPART (t));
1272 tree ipart = negate_expr (TREE_IMAGPART (t));
1274 if ((TREE_CODE (rpart) == REAL_CST
1275 && TREE_CODE (ipart) == REAL_CST)
1276 || (TREE_CODE (rpart) == INTEGER_CST
1277 && TREE_CODE (ipart) == INTEGER_CST))
1278 return build_complex (type, rpart, ipart);
1280 break;
1282 case COMPLEX_EXPR:
1283 if (negate_expr_p (t))
1284 return fold_build2 (COMPLEX_EXPR, type,
1285 fold_negate_expr (TREE_OPERAND (t, 0)),
1286 fold_negate_expr (TREE_OPERAND (t, 1)));
1287 break;
1289 case CONJ_EXPR:
1290 if (negate_expr_p (t))
1291 return fold_build1 (CONJ_EXPR, type,
1292 fold_negate_expr (TREE_OPERAND (t, 0)));
1293 break;
1295 case NEGATE_EXPR:
1296 return TREE_OPERAND (t, 0);
1298 case PLUS_EXPR:
1299 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1300 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1302 /* -(A + B) -> (-B) - A. */
1303 if (negate_expr_p (TREE_OPERAND (t, 1))
1304 && reorder_operands_p (TREE_OPERAND (t, 0),
1305 TREE_OPERAND (t, 1)))
1307 tem = negate_expr (TREE_OPERAND (t, 1));
1308 return fold_build2 (MINUS_EXPR, type,
1309 tem, TREE_OPERAND (t, 0));
1312 /* -(A + B) -> (-A) - B. */
1313 if (negate_expr_p (TREE_OPERAND (t, 0)))
1315 tem = negate_expr (TREE_OPERAND (t, 0));
1316 return fold_build2 (MINUS_EXPR, type,
1317 tem, TREE_OPERAND (t, 1));
1320 break;
1322 case MINUS_EXPR:
1323 /* - (A - B) -> B - A */
1324 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1325 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1326 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1327 return fold_build2 (MINUS_EXPR, type,
1328 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1329 break;
1331 case MULT_EXPR:
1332 if (TYPE_UNSIGNED (type))
1333 break;
1335 /* Fall through. */
1337 case RDIV_EXPR:
1338 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1340 tem = TREE_OPERAND (t, 1);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 TREE_OPERAND (t, 0), negate_expr (tem));
1344 tem = TREE_OPERAND (t, 0);
1345 if (negate_expr_p (tem))
1346 return fold_build2 (TREE_CODE (t), type,
1347 negate_expr (tem), TREE_OPERAND (t, 1));
1349 break;
1351 case TRUNC_DIV_EXPR:
1352 case ROUND_DIV_EXPR:
1353 case FLOOR_DIV_EXPR:
1354 case CEIL_DIV_EXPR:
1355 case EXACT_DIV_EXPR:
1356 /* In general we can't negate A / B, because if A is INT_MIN and
1357 B is 1, we may turn this into INT_MIN / -1 which is undefined
1358 and actually traps on some architectures. But if overflow is
1359 undefined, we can negate, because - (INT_MIN / 1) is an
1360 overflow. */
1361 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1363 const char * const warnmsg = G_("assuming signed overflow does not "
1364 "occur when negating a division");
1365 tem = TREE_OPERAND (t, 1);
1366 if (negate_expr_p (tem))
1368 if (INTEGRAL_TYPE_P (type)
1369 && (TREE_CODE (tem) != INTEGER_CST
1370 || integer_onep (tem)))
1371 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1372 return fold_build2 (TREE_CODE (t), type,
1373 TREE_OPERAND (t, 0), negate_expr (tem));
1375 tem = TREE_OPERAND (t, 0);
1376 if (negate_expr_p (tem))
1378 if (INTEGRAL_TYPE_P (type)
1379 && (TREE_CODE (tem) != INTEGER_CST
1380 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1381 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1382 return fold_build2 (TREE_CODE (t), type,
1383 negate_expr (tem), TREE_OPERAND (t, 1));
1386 break;
1388 case NOP_EXPR:
1389 /* Convert -((double)float) into (double)(-float). */
1390 if (TREE_CODE (type) == REAL_TYPE)
1392 tem = strip_float_extensions (t);
1393 if (tem != t && negate_expr_p (tem))
1394 return fold_convert (type, negate_expr (tem));
1396 break;
1398 case CALL_EXPR:
1399 /* Negate -f(x) as f(-x). */
1400 if (negate_mathfn_p (builtin_mathfn_code (t))
1401 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1403 tree fndecl, arg;
1405 fndecl = get_callee_fndecl (t);
1406 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1407 return build_call_expr (fndecl, 1, arg);
1409 break;
1411 case RSHIFT_EXPR:
1412 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1413 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1415 tree op1 = TREE_OPERAND (t, 1);
1416 if (TREE_INT_CST_HIGH (op1) == 0
1417 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1418 == TREE_INT_CST_LOW (op1))
1420 tree ntype = TYPE_UNSIGNED (type)
1421 ? signed_type_for (type)
1422 : unsigned_type_for (type);
1423 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1424 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1425 return fold_convert (type, temp);
1428 break;
1430 default:
1431 break;
1434 return NULL_TREE;
1437 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1438 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1439 return NULL_TREE. */
1441 static tree
1442 negate_expr (tree t)
1444 tree type, tem;
1446 if (t == NULL_TREE)
1447 return NULL_TREE;
1449 type = TREE_TYPE (t);
1450 STRIP_SIGN_NOPS (t);
1452 tem = fold_negate_expr (t);
1453 if (!tem)
1454 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1455 return fold_convert (type, tem);
1458 /* Split a tree IN into a constant, literal and variable parts that could be
1459 combined with CODE to make IN. "constant" means an expression with
1460 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1461 commutative arithmetic operation. Store the constant part into *CONP,
1462 the literal in *LITP and return the variable part. If a part isn't
1463 present, set it to null. If the tree does not decompose in this way,
1464 return the entire tree as the variable part and the other parts as null.
1466 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1467 case, we negate an operand that was subtracted. Except if it is a
1468 literal for which we use *MINUS_LITP instead.
1470 If NEGATE_P is true, we are negating all of IN, again except a literal
1471 for which we use *MINUS_LITP instead.
1473 If IN is itself a literal or constant, return it as appropriate.
1475 Note that we do not guarantee that any of the three values will be the
1476 same type as IN, but they will have the same signedness and mode. */
1478 static tree
1479 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1480 tree *minus_litp, int negate_p)
1482 tree var = 0;
1484 *conp = 0;
1485 *litp = 0;
1486 *minus_litp = 0;
1488 /* Strip any conversions that don't change the machine mode or signedness. */
1489 STRIP_SIGN_NOPS (in);
1491 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1492 || TREE_CODE (in) == FIXED_CST)
1493 *litp = in;
1494 else if (TREE_CODE (in) == code
1495 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1496 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1497 /* We can associate addition and subtraction together (even
1498 though the C standard doesn't say so) for integers because
1499 the value is not affected. For reals, the value might be
1500 affected, so we can't. */
1501 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1502 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1504 tree op0 = TREE_OPERAND (in, 0);
1505 tree op1 = TREE_OPERAND (in, 1);
1506 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1507 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1509 /* First see if either of the operands is a literal, then a constant. */
1510 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1511 || TREE_CODE (op0) == FIXED_CST)
1512 *litp = op0, op0 = 0;
1513 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1514 || TREE_CODE (op1) == FIXED_CST)
1515 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1517 if (op0 != 0 && TREE_CONSTANT (op0))
1518 *conp = op0, op0 = 0;
1519 else if (op1 != 0 && TREE_CONSTANT (op1))
1520 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1522 /* If we haven't dealt with either operand, this is not a case we can
1523 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1524 if (op0 != 0 && op1 != 0)
1525 var = in;
1526 else if (op0 != 0)
1527 var = op0;
1528 else
1529 var = op1, neg_var_p = neg1_p;
1531 /* Now do any needed negations. */
1532 if (neg_litp_p)
1533 *minus_litp = *litp, *litp = 0;
1534 if (neg_conp_p)
1535 *conp = negate_expr (*conp);
1536 if (neg_var_p)
1537 var = negate_expr (var);
1539 else if (TREE_CONSTANT (in))
1540 *conp = in;
1541 else
1542 var = in;
1544 if (negate_p)
1546 if (*litp)
1547 *minus_litp = *litp, *litp = 0;
1548 else if (*minus_litp)
1549 *litp = *minus_litp, *minus_litp = 0;
1550 *conp = negate_expr (*conp);
1551 var = negate_expr (var);
1554 return var;
1557 /* Re-associate trees split by the above function. T1 and T2 are either
1558 expressions to associate or null. Return the new expression, if any. If
1559 we build an operation, do it in TYPE and with CODE. */
1561 static tree
1562 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1564 if (t1 == 0)
1565 return t2;
1566 else if (t2 == 0)
1567 return t1;
1569 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1570 try to fold this since we will have infinite recursion. But do
1571 deal with any NEGATE_EXPRs. */
1572 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1573 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1575 if (code == PLUS_EXPR)
1577 if (TREE_CODE (t1) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1579 fold_convert (type, TREE_OPERAND (t1, 0)));
1580 else if (TREE_CODE (t2) == NEGATE_EXPR)
1581 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1582 fold_convert (type, TREE_OPERAND (t2, 0)));
1583 else if (integer_zerop (t2))
1584 return fold_convert (type, t1);
1586 else if (code == MINUS_EXPR)
1588 if (integer_zerop (t2))
1589 return fold_convert (type, t1);
1592 return build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 return fold_build2 (code, type, fold_convert (type, t1),
1597 fold_convert (type, t2));
1600 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1601 for use in int_const_binop, size_binop and size_diffop. */
1603 static bool
1604 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1606 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1607 return false;
1608 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1609 return false;
1611 switch (code)
1613 case LSHIFT_EXPR:
1614 case RSHIFT_EXPR:
1615 case LROTATE_EXPR:
1616 case RROTATE_EXPR:
1617 return true;
1619 default:
1620 break;
1623 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1624 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1625 && TYPE_MODE (type1) == TYPE_MODE (type2);
1629 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1630 to produce a new constant. Return NULL_TREE if we don't know how
1631 to evaluate CODE at compile-time.
1633 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1635 tree
1636 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1638 unsigned HOST_WIDE_INT int1l, int2l;
1639 HOST_WIDE_INT int1h, int2h;
1640 unsigned HOST_WIDE_INT low;
1641 HOST_WIDE_INT hi;
1642 unsigned HOST_WIDE_INT garbagel;
1643 HOST_WIDE_INT garbageh;
1644 tree t;
1645 tree type = TREE_TYPE (arg1);
1646 int uns = TYPE_UNSIGNED (type);
1647 int is_sizetype
1648 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1649 int overflow = 0;
1651 int1l = TREE_INT_CST_LOW (arg1);
1652 int1h = TREE_INT_CST_HIGH (arg1);
1653 int2l = TREE_INT_CST_LOW (arg2);
1654 int2h = TREE_INT_CST_HIGH (arg2);
1656 switch (code)
1658 case BIT_IOR_EXPR:
1659 low = int1l | int2l, hi = int1h | int2h;
1660 break;
1662 case BIT_XOR_EXPR:
1663 low = int1l ^ int2l, hi = int1h ^ int2h;
1664 break;
1666 case BIT_AND_EXPR:
1667 low = int1l & int2l, hi = int1h & int2h;
1668 break;
1670 case RSHIFT_EXPR:
1671 int2l = -int2l;
1672 case LSHIFT_EXPR:
1673 /* It's unclear from the C standard whether shifts can overflow.
1674 The following code ignores overflow; perhaps a C standard
1675 interpretation ruling is needed. */
1676 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1677 &low, &hi, !uns);
1678 break;
1680 case RROTATE_EXPR:
1681 int2l = - int2l;
1682 case LROTATE_EXPR:
1683 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1684 &low, &hi);
1685 break;
1687 case PLUS_EXPR:
1688 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 break;
1691 case MINUS_EXPR:
1692 neg_double (int2l, int2h, &low, &hi);
1693 add_double (int1l, int1h, low, hi, &low, &hi);
1694 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1695 break;
1697 case MULT_EXPR:
1698 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1699 break;
1701 case TRUNC_DIV_EXPR:
1702 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1703 case EXACT_DIV_EXPR:
1704 /* This is a shortcut for a common special case. */
1705 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1706 && !TREE_OVERFLOW (arg1)
1707 && !TREE_OVERFLOW (arg2)
1708 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1710 if (code == CEIL_DIV_EXPR)
1711 int1l += int2l - 1;
1713 low = int1l / int2l, hi = 0;
1714 break;
1717 /* ... fall through ... */
1719 case ROUND_DIV_EXPR:
1720 if (int2h == 0 && int2l == 0)
1721 return NULL_TREE;
1722 if (int2h == 0 && int2l == 1)
1724 low = int1l, hi = int1h;
1725 break;
1727 if (int1l == int2l && int1h == int2h
1728 && ! (int1l == 0 && int1h == 0))
1730 low = 1, hi = 0;
1731 break;
1733 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1734 &low, &hi, &garbagel, &garbageh);
1735 break;
1737 case TRUNC_MOD_EXPR:
1738 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1739 /* This is a shortcut for a common special case. */
1740 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1741 && !TREE_OVERFLOW (arg1)
1742 && !TREE_OVERFLOW (arg2)
1743 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1745 if (code == CEIL_MOD_EXPR)
1746 int1l += int2l - 1;
1747 low = int1l % int2l, hi = 0;
1748 break;
1751 /* ... fall through ... */
1753 case ROUND_MOD_EXPR:
1754 if (int2h == 0 && int2l == 0)
1755 return NULL_TREE;
1756 overflow = div_and_round_double (code, uns,
1757 int1l, int1h, int2l, int2h,
1758 &garbagel, &garbageh, &low, &hi);
1759 break;
1761 case MIN_EXPR:
1762 case MAX_EXPR:
1763 if (uns)
1764 low = (((unsigned HOST_WIDE_INT) int1h
1765 < (unsigned HOST_WIDE_INT) int2h)
1766 || (((unsigned HOST_WIDE_INT) int1h
1767 == (unsigned HOST_WIDE_INT) int2h)
1768 && int1l < int2l));
1769 else
1770 low = (int1h < int2h
1771 || (int1h == int2h && int1l < int2l));
1773 if (low == (code == MIN_EXPR))
1774 low = int1l, hi = int1h;
1775 else
1776 low = int2l, hi = int2h;
1777 break;
1779 default:
1780 return NULL_TREE;
1783 if (notrunc)
1785 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1787 /* Propagate overflow flags ourselves. */
1788 if (((!uns || is_sizetype) && overflow)
1789 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1791 t = copy_node (t);
1792 TREE_OVERFLOW (t) = 1;
1795 else
1796 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1797 ((!uns || is_sizetype) && overflow)
1798 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1800 return t;
1803 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1804 constant. We assume ARG1 and ARG2 have the same data type, or at least
1805 are the same kind of constant and the same machine mode. Return zero if
1806 combining the constants is not allowed in the current operating mode.
1808 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1810 static tree
1811 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1813 /* Sanity check for the recursive cases. */
1814 if (!arg1 || !arg2)
1815 return NULL_TREE;
1817 STRIP_NOPS (arg1);
1818 STRIP_NOPS (arg2);
1820 if (TREE_CODE (arg1) == INTEGER_CST)
1821 return int_const_binop (code, arg1, arg2, notrunc);
1823 if (TREE_CODE (arg1) == REAL_CST)
1825 enum machine_mode mode;
1826 REAL_VALUE_TYPE d1;
1827 REAL_VALUE_TYPE d2;
1828 REAL_VALUE_TYPE value;
1829 REAL_VALUE_TYPE result;
1830 bool inexact;
1831 tree t, type;
1833 /* The following codes are handled by real_arithmetic. */
1834 switch (code)
1836 case PLUS_EXPR:
1837 case MINUS_EXPR:
1838 case MULT_EXPR:
1839 case RDIV_EXPR:
1840 case MIN_EXPR:
1841 case MAX_EXPR:
1842 break;
1844 default:
1845 return NULL_TREE;
1848 d1 = TREE_REAL_CST (arg1);
1849 d2 = TREE_REAL_CST (arg2);
1851 type = TREE_TYPE (arg1);
1852 mode = TYPE_MODE (type);
1854 /* Don't perform operation if we honor signaling NaNs and
1855 either operand is a NaN. */
1856 if (HONOR_SNANS (mode)
1857 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1858 return NULL_TREE;
1860 /* Don't perform operation if it would raise a division
1861 by zero exception. */
1862 if (code == RDIV_EXPR
1863 && REAL_VALUES_EQUAL (d2, dconst0)
1864 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1865 return NULL_TREE;
1867 /* If either operand is a NaN, just return it. Otherwise, set up
1868 for floating-point trap; we return an overflow. */
1869 if (REAL_VALUE_ISNAN (d1))
1870 return arg1;
1871 else if (REAL_VALUE_ISNAN (d2))
1872 return arg2;
1874 inexact = real_arithmetic (&value, code, &d1, &d2);
1875 real_convert (&result, mode, &value);
1877 /* Don't constant fold this floating point operation if
1878 the result has overflowed and flag_trapping_math. */
1879 if (flag_trapping_math
1880 && MODE_HAS_INFINITIES (mode)
1881 && REAL_VALUE_ISINF (result)
1882 && !REAL_VALUE_ISINF (d1)
1883 && !REAL_VALUE_ISINF (d2))
1884 return NULL_TREE;
1886 /* Don't constant fold this floating point operation if the
1887 result may dependent upon the run-time rounding mode and
1888 flag_rounding_math is set, or if GCC's software emulation
1889 is unable to accurately represent the result. */
1890 if ((flag_rounding_math
1891 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1892 && !flag_unsafe_math_optimizations))
1893 && (inexact || !real_identical (&result, &value)))
1894 return NULL_TREE;
1896 t = build_real (type, result);
1898 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1899 return t;
1902 if (TREE_CODE (arg1) == FIXED_CST)
1904 FIXED_VALUE_TYPE f1;
1905 FIXED_VALUE_TYPE f2;
1906 FIXED_VALUE_TYPE result;
1907 tree t, type;
1908 int sat_p;
1909 bool overflow_p;
1911 /* The following codes are handled by fixed_arithmetic. */
1912 switch (code)
1914 case PLUS_EXPR:
1915 case MINUS_EXPR:
1916 case MULT_EXPR:
1917 case TRUNC_DIV_EXPR:
1918 f2 = TREE_FIXED_CST (arg2);
1919 break;
1921 case LSHIFT_EXPR:
1922 case RSHIFT_EXPR:
1923 f2.data.high = TREE_INT_CST_HIGH (arg2);
1924 f2.data.low = TREE_INT_CST_LOW (arg2);
1925 f2.mode = SImode;
1926 break;
1928 default:
1929 return NULL_TREE;
1932 f1 = TREE_FIXED_CST (arg1);
1933 type = TREE_TYPE (arg1);
1934 sat_p = TYPE_SATURATING (type);
1935 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1936 t = build_fixed (type, result);
1937 /* Propagate overflow flags. */
1938 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1940 TREE_OVERFLOW (t) = 1;
1941 TREE_CONSTANT_OVERFLOW (t) = 1;
1943 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1944 TREE_CONSTANT_OVERFLOW (t) = 1;
1945 return t;
1948 if (TREE_CODE (arg1) == COMPLEX_CST)
1950 tree type = TREE_TYPE (arg1);
1951 tree r1 = TREE_REALPART (arg1);
1952 tree i1 = TREE_IMAGPART (arg1);
1953 tree r2 = TREE_REALPART (arg2);
1954 tree i2 = TREE_IMAGPART (arg2);
1955 tree real, imag;
1957 switch (code)
1959 case PLUS_EXPR:
1960 case MINUS_EXPR:
1961 real = const_binop (code, r1, r2, notrunc);
1962 imag = const_binop (code, i1, i2, notrunc);
1963 break;
1965 case MULT_EXPR:
1966 real = const_binop (MINUS_EXPR,
1967 const_binop (MULT_EXPR, r1, r2, notrunc),
1968 const_binop (MULT_EXPR, i1, i2, notrunc),
1969 notrunc);
1970 imag = const_binop (PLUS_EXPR,
1971 const_binop (MULT_EXPR, r1, i2, notrunc),
1972 const_binop (MULT_EXPR, i1, r2, notrunc),
1973 notrunc);
1974 break;
1976 case RDIV_EXPR:
1978 tree magsquared
1979 = const_binop (PLUS_EXPR,
1980 const_binop (MULT_EXPR, r2, r2, notrunc),
1981 const_binop (MULT_EXPR, i2, i2, notrunc),
1982 notrunc);
1983 tree t1
1984 = const_binop (PLUS_EXPR,
1985 const_binop (MULT_EXPR, r1, r2, notrunc),
1986 const_binop (MULT_EXPR, i1, i2, notrunc),
1987 notrunc);
1988 tree t2
1989 = const_binop (MINUS_EXPR,
1990 const_binop (MULT_EXPR, i1, r2, notrunc),
1991 const_binop (MULT_EXPR, r1, i2, notrunc),
1992 notrunc);
1994 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1995 code = TRUNC_DIV_EXPR;
1997 real = const_binop (code, t1, magsquared, notrunc);
1998 imag = const_binop (code, t2, magsquared, notrunc);
2000 break;
2002 default:
2003 return NULL_TREE;
2006 if (real && imag)
2007 return build_complex (type, real, imag);
2010 return NULL_TREE;
2013 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2014 indicates which particular sizetype to create. */
2016 tree
2017 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2019 return build_int_cst (sizetype_tab[(int) kind], number);
2022 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2023 is a tree code. The type of the result is taken from the operands.
2024 Both must be equivalent integer types, ala int_binop_types_match_p.
2025 If the operands are constant, so is the result. */
2027 tree
2028 size_binop (enum tree_code code, tree arg0, tree arg1)
2030 tree type = TREE_TYPE (arg0);
2032 if (arg0 == error_mark_node || arg1 == error_mark_node)
2033 return error_mark_node;
2035 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2036 TREE_TYPE (arg1)));
2038 /* Handle the special case of two integer constants faster. */
2039 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2041 /* And some specific cases even faster than that. */
2042 if (code == PLUS_EXPR)
2044 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2045 return arg1;
2046 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2047 return arg0;
2049 else if (code == MINUS_EXPR)
2051 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2052 return arg0;
2054 else if (code == MULT_EXPR)
2056 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2057 return arg1;
2060 /* Handle general case of two integer constants. */
2061 return int_const_binop (code, arg0, arg1, 0);
2064 return fold_build2 (code, type, arg0, arg1);
2067 /* Given two values, either both of sizetype or both of bitsizetype,
2068 compute the difference between the two values. Return the value
2069 in signed type corresponding to the type of the operands. */
2071 tree
2072 size_diffop (tree arg0, tree arg1)
2074 tree type = TREE_TYPE (arg0);
2075 tree ctype;
2077 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2078 TREE_TYPE (arg1)));
2080 /* If the type is already signed, just do the simple thing. */
2081 if (!TYPE_UNSIGNED (type))
2082 return size_binop (MINUS_EXPR, arg0, arg1);
2084 if (type == sizetype)
2085 ctype = ssizetype;
2086 else if (type == bitsizetype)
2087 ctype = sbitsizetype;
2088 else
2089 ctype = signed_type_for (type);
2091 /* If either operand is not a constant, do the conversions to the signed
2092 type and subtract. The hardware will do the right thing with any
2093 overflow in the subtraction. */
2094 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2095 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2096 fold_convert (ctype, arg1));
2098 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2099 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2100 overflow) and negate (which can't either). Special-case a result
2101 of zero while we're here. */
2102 if (tree_int_cst_equal (arg0, arg1))
2103 return build_int_cst (ctype, 0);
2104 else if (tree_int_cst_lt (arg1, arg0))
2105 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2106 else
2107 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2108 fold_convert (ctype, size_binop (MINUS_EXPR,
2109 arg1, arg0)));
2112 /* A subroutine of fold_convert_const handling conversions of an
2113 INTEGER_CST to another integer type. */
2115 static tree
2116 fold_convert_const_int_from_int (tree type, const_tree arg1)
2118 tree t;
2120 /* Given an integer constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2123 TREE_INT_CST_HIGH (arg1),
2124 /* Don't set the overflow when
2125 converting a pointer */
2126 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2127 (TREE_INT_CST_HIGH (arg1) < 0
2128 && (TYPE_UNSIGNED (type)
2129 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2130 | TREE_OVERFLOW (arg1));
2132 return t;
2135 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2136 to an integer type. */
2138 static tree
2139 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2141 int overflow = 0;
2142 tree t;
2144 /* The following code implements the floating point to integer
2145 conversion rules required by the Java Language Specification,
2146 that IEEE NaNs are mapped to zero and values that overflow
2147 the target precision saturate, i.e. values greater than
2148 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2149 are mapped to INT_MIN. These semantics are allowed by the
2150 C and C++ standards that simply state that the behavior of
2151 FP-to-integer conversion is unspecified upon overflow. */
2153 HOST_WIDE_INT high, low;
2154 REAL_VALUE_TYPE r;
2155 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2157 switch (code)
2159 case FIX_TRUNC_EXPR:
2160 real_trunc (&r, VOIDmode, &x);
2161 break;
2163 default:
2164 gcc_unreachable ();
2167 /* If R is NaN, return zero and show we have an overflow. */
2168 if (REAL_VALUE_ISNAN (r))
2170 overflow = 1;
2171 high = 0;
2172 low = 0;
2175 /* See if R is less than the lower bound or greater than the
2176 upper bound. */
2178 if (! overflow)
2180 tree lt = TYPE_MIN_VALUE (type);
2181 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2182 if (REAL_VALUES_LESS (r, l))
2184 overflow = 1;
2185 high = TREE_INT_CST_HIGH (lt);
2186 low = TREE_INT_CST_LOW (lt);
2190 if (! overflow)
2192 tree ut = TYPE_MAX_VALUE (type);
2193 if (ut)
2195 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2196 if (REAL_VALUES_LESS (u, r))
2198 overflow = 1;
2199 high = TREE_INT_CST_HIGH (ut);
2200 low = TREE_INT_CST_LOW (ut);
2205 if (! overflow)
2206 REAL_VALUE_TO_INT (&low, &high, r);
2208 t = force_fit_type_double (type, low, high, -1,
2209 overflow | TREE_OVERFLOW (arg1));
2210 return t;
2213 /* A subroutine of fold_convert_const handling conversions of a
2214 FIXED_CST to an integer type. */
2216 static tree
2217 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2219 tree t;
2220 double_int temp, temp_trunc;
2221 unsigned int mode;
2223 /* Right shift FIXED_CST to temp by fbit. */
2224 temp = TREE_FIXED_CST (arg1).data;
2225 mode = TREE_FIXED_CST (arg1).mode;
2226 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2228 lshift_double (temp.low, temp.high,
2229 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2230 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2232 /* Left shift temp to temp_trunc by fbit. */
2233 lshift_double (temp.low, temp.high,
2234 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2235 &temp_trunc.low, &temp_trunc.high,
2236 SIGNED_FIXED_POINT_MODE_P (mode));
2238 else
2240 temp.low = 0;
2241 temp.high = 0;
2242 temp_trunc.low = 0;
2243 temp_trunc.high = 0;
2246 /* If FIXED_CST is negative, we need to round the value toward 0.
2247 By checking if the fractional bits are not zero to add 1 to temp. */
2248 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2249 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2251 double_int one;
2252 one.low = 1;
2253 one.high = 0;
2254 temp = double_int_add (temp, one);
2257 /* Given a fixed-point constant, make new constant with new type,
2258 appropriately sign-extended or truncated. */
2259 t = force_fit_type_double (type, temp.low, temp.high, -1,
2260 (temp.high < 0
2261 && (TYPE_UNSIGNED (type)
2262 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2263 | TREE_OVERFLOW (arg1));
2265 return t;
2268 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2269 to another floating point type. */
2271 static tree
2272 fold_convert_const_real_from_real (tree type, const_tree arg1)
2274 REAL_VALUE_TYPE value;
2275 tree t;
2277 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2278 t = build_real (type, value);
2280 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2281 return t;
2284 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2285 to a floating point type. */
2287 static tree
2288 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2290 REAL_VALUE_TYPE value;
2291 tree t;
2293 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2294 t = build_real (type, value);
2296 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2297 TREE_CONSTANT_OVERFLOW (t)
2298 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2299 return t;
2302 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2303 to another fixed-point type. */
2305 static tree
2306 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2308 FIXED_VALUE_TYPE value;
2309 tree t;
2310 bool overflow_p;
2312 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2313 TYPE_SATURATING (type));
2314 t = build_fixed (type, value);
2316 /* Propagate overflow flags. */
2317 if (overflow_p | TREE_OVERFLOW (arg1))
2319 TREE_OVERFLOW (t) = 1;
2320 TREE_CONSTANT_OVERFLOW (t) = 1;
2322 else if (TREE_CONSTANT_OVERFLOW (arg1))
2323 TREE_CONSTANT_OVERFLOW (t) = 1;
2324 return t;
2327 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2328 to a fixed-point type. */
2330 static tree
2331 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2333 FIXED_VALUE_TYPE value;
2334 tree t;
2335 bool overflow_p;
2337 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2338 TREE_INT_CST (arg1),
2339 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2340 TYPE_SATURATING (type));
2341 t = build_fixed (type, value);
2343 /* Propagate overflow flags. */
2344 if (overflow_p | TREE_OVERFLOW (arg1))
2346 TREE_OVERFLOW (t) = 1;
2347 TREE_CONSTANT_OVERFLOW (t) = 1;
2349 else if (TREE_CONSTANT_OVERFLOW (arg1))
2350 TREE_CONSTANT_OVERFLOW (t) = 1;
2351 return t;
2354 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2355 to a fixed-point type. */
2357 static tree
2358 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2360 FIXED_VALUE_TYPE value;
2361 tree t;
2362 bool overflow_p;
2364 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2365 &TREE_REAL_CST (arg1),
2366 TYPE_SATURATING (type));
2367 t = build_fixed (type, value);
2369 /* Propagate overflow flags. */
2370 if (overflow_p | TREE_OVERFLOW (arg1))
2372 TREE_OVERFLOW (t) = 1;
2373 TREE_CONSTANT_OVERFLOW (t) = 1;
2375 else if (TREE_CONSTANT_OVERFLOW (arg1))
2376 TREE_CONSTANT_OVERFLOW (t) = 1;
2377 return t;
2380 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2381 type TYPE. If no simplification can be done return NULL_TREE. */
2383 static tree
2384 fold_convert_const (enum tree_code code, tree type, tree arg1)
2386 if (TREE_TYPE (arg1) == type)
2387 return arg1;
2389 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2391 if (TREE_CODE (arg1) == INTEGER_CST)
2392 return fold_convert_const_int_from_int (type, arg1);
2393 else if (TREE_CODE (arg1) == REAL_CST)
2394 return fold_convert_const_int_from_real (code, type, arg1);
2395 else if (TREE_CODE (arg1) == FIXED_CST)
2396 return fold_convert_const_int_from_fixed (type, arg1);
2398 else if (TREE_CODE (type) == REAL_TYPE)
2400 if (TREE_CODE (arg1) == INTEGER_CST)
2401 return build_real_from_int_cst (type, arg1);
2402 else if (TREE_CODE (arg1) == REAL_CST)
2403 return fold_convert_const_real_from_real (type, arg1);
2404 else if (TREE_CODE (arg1) == FIXED_CST)
2405 return fold_convert_const_real_from_fixed (type, arg1);
2407 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2409 if (TREE_CODE (arg1) == FIXED_CST)
2410 return fold_convert_const_fixed_from_fixed (type, arg1);
2411 else if (TREE_CODE (arg1) == INTEGER_CST)
2412 return fold_convert_const_fixed_from_int (type, arg1);
2413 else if (TREE_CODE (arg1) == REAL_CST)
2414 return fold_convert_const_fixed_from_real (type, arg1);
2416 return NULL_TREE;
2419 /* Construct a vector of zero elements of vector type TYPE. */
2421 static tree
2422 build_zero_vector (tree type)
2424 tree elem, list;
2425 int i, units;
2427 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2428 units = TYPE_VECTOR_SUBPARTS (type);
2430 list = NULL_TREE;
2431 for (i = 0; i < units; i++)
2432 list = tree_cons (NULL_TREE, elem, list);
2433 return build_vector (type, list);
2436 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2438 bool
2439 fold_convertible_p (const_tree type, const_tree arg)
2441 tree orig = TREE_TYPE (arg);
2443 if (type == orig)
2444 return true;
2446 if (TREE_CODE (arg) == ERROR_MARK
2447 || TREE_CODE (type) == ERROR_MARK
2448 || TREE_CODE (orig) == ERROR_MARK)
2449 return false;
2451 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2452 return true;
2454 switch (TREE_CODE (type))
2456 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2457 case POINTER_TYPE: case REFERENCE_TYPE:
2458 case OFFSET_TYPE:
2459 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2460 || TREE_CODE (orig) == OFFSET_TYPE)
2461 return true;
2462 return (TREE_CODE (orig) == VECTOR_TYPE
2463 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2465 default:
2466 return TREE_CODE (type) == TREE_CODE (orig);
2470 /* Convert expression ARG to type TYPE. Used by the middle-end for
2471 simple conversions in preference to calling the front-end's convert. */
2473 tree
2474 fold_convert (tree type, tree arg)
2476 tree orig = TREE_TYPE (arg);
2477 tree tem;
2479 if (type == orig)
2480 return arg;
2482 if (TREE_CODE (arg) == ERROR_MARK
2483 || TREE_CODE (type) == ERROR_MARK
2484 || TREE_CODE (orig) == ERROR_MARK)
2485 return error_mark_node;
2487 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2488 return fold_build1 (NOP_EXPR, type, arg);
2490 switch (TREE_CODE (type))
2492 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2493 case POINTER_TYPE: case REFERENCE_TYPE:
2494 case OFFSET_TYPE:
2495 if (TREE_CODE (arg) == INTEGER_CST)
2497 tem = fold_convert_const (NOP_EXPR, type, arg);
2498 if (tem != NULL_TREE)
2499 return tem;
2501 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2502 || TREE_CODE (orig) == OFFSET_TYPE)
2503 return fold_build1 (NOP_EXPR, type, arg);
2504 if (TREE_CODE (orig) == COMPLEX_TYPE)
2506 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2507 return fold_convert (type, tem);
2509 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2510 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2511 return fold_build1 (NOP_EXPR, type, arg);
2513 case REAL_TYPE:
2514 if (TREE_CODE (arg) == INTEGER_CST)
2516 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2517 if (tem != NULL_TREE)
2518 return tem;
2520 else if (TREE_CODE (arg) == REAL_CST)
2522 tem = fold_convert_const (NOP_EXPR, type, arg);
2523 if (tem != NULL_TREE)
2524 return tem;
2526 else if (TREE_CODE (arg) == FIXED_CST)
2528 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2529 if (tem != NULL_TREE)
2530 return tem;
2533 switch (TREE_CODE (orig))
2535 case INTEGER_TYPE:
2536 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2537 case POINTER_TYPE: case REFERENCE_TYPE:
2538 return fold_build1 (FLOAT_EXPR, type, arg);
2540 case REAL_TYPE:
2541 return fold_build1 (NOP_EXPR, type, arg);
2543 case FIXED_POINT_TYPE:
2544 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2546 case COMPLEX_TYPE:
2547 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2548 return fold_convert (type, tem);
2550 default:
2551 gcc_unreachable ();
2554 case FIXED_POINT_TYPE:
2555 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2556 || TREE_CODE (arg) == REAL_CST)
2558 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2559 if (tem != NULL_TREE)
2560 return tem;
2563 switch (TREE_CODE (orig))
2565 case FIXED_POINT_TYPE:
2566 case INTEGER_TYPE:
2567 case ENUMERAL_TYPE:
2568 case BOOLEAN_TYPE:
2569 case REAL_TYPE:
2570 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2572 case COMPLEX_TYPE:
2573 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2574 return fold_convert (type, tem);
2576 default:
2577 gcc_unreachable ();
2580 case COMPLEX_TYPE:
2581 switch (TREE_CODE (orig))
2583 case INTEGER_TYPE:
2584 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2585 case POINTER_TYPE: case REFERENCE_TYPE:
2586 case REAL_TYPE:
2587 case FIXED_POINT_TYPE:
2588 return build2 (COMPLEX_EXPR, type,
2589 fold_convert (TREE_TYPE (type), arg),
2590 fold_convert (TREE_TYPE (type), integer_zero_node));
2591 case COMPLEX_TYPE:
2593 tree rpart, ipart;
2595 if (TREE_CODE (arg) == COMPLEX_EXPR)
2597 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2598 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2599 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2602 arg = save_expr (arg);
2603 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2604 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2605 rpart = fold_convert (TREE_TYPE (type), rpart);
2606 ipart = fold_convert (TREE_TYPE (type), ipart);
2607 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2610 default:
2611 gcc_unreachable ();
2614 case VECTOR_TYPE:
2615 if (integer_zerop (arg))
2616 return build_zero_vector (type);
2617 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2618 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2619 || TREE_CODE (orig) == VECTOR_TYPE);
2620 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2622 case VOID_TYPE:
2623 tem = fold_ignored_result (arg);
2624 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2625 return tem;
2626 return fold_build1 (NOP_EXPR, type, tem);
2628 default:
2629 gcc_unreachable ();
2633 /* Return false if expr can be assumed not to be an lvalue, true
2634 otherwise. */
2636 static bool
2637 maybe_lvalue_p (const_tree x)
2639 /* We only need to wrap lvalue tree codes. */
2640 switch (TREE_CODE (x))
2642 case VAR_DECL:
2643 case PARM_DECL:
2644 case RESULT_DECL:
2645 case LABEL_DECL:
2646 case FUNCTION_DECL:
2647 case SSA_NAME:
2649 case COMPONENT_REF:
2650 case INDIRECT_REF:
2651 case ALIGN_INDIRECT_REF:
2652 case MISALIGNED_INDIRECT_REF:
2653 case ARRAY_REF:
2654 case ARRAY_RANGE_REF:
2655 case BIT_FIELD_REF:
2656 case OBJ_TYPE_REF:
2658 case REALPART_EXPR:
2659 case IMAGPART_EXPR:
2660 case PREINCREMENT_EXPR:
2661 case PREDECREMENT_EXPR:
2662 case SAVE_EXPR:
2663 case TRY_CATCH_EXPR:
2664 case WITH_CLEANUP_EXPR:
2665 case COMPOUND_EXPR:
2666 case MODIFY_EXPR:
2667 case GIMPLE_MODIFY_STMT:
2668 case TARGET_EXPR:
2669 case COND_EXPR:
2670 case BIND_EXPR:
2671 case MIN_EXPR:
2672 case MAX_EXPR:
2673 break;
2675 default:
2676 /* Assume the worst for front-end tree codes. */
2677 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2678 break;
2679 return false;
2682 return true;
2685 /* Return an expr equal to X but certainly not valid as an lvalue. */
2687 tree
2688 non_lvalue (tree x)
2690 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2691 us. */
2692 if (in_gimple_form)
2693 return x;
2695 if (! maybe_lvalue_p (x))
2696 return x;
2697 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2700 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2701 Zero means allow extended lvalues. */
2703 int pedantic_lvalues;
2705 /* When pedantic, return an expr equal to X but certainly not valid as a
2706 pedantic lvalue. Otherwise, return X. */
2708 static tree
2709 pedantic_non_lvalue (tree x)
2711 if (pedantic_lvalues)
2712 return non_lvalue (x);
2713 else
2714 return x;
2717 /* Given a tree comparison code, return the code that is the logical inverse
2718 of the given code. It is not safe to do this for floating-point
2719 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2720 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2722 enum tree_code
2723 invert_tree_comparison (enum tree_code code, bool honor_nans)
2725 if (honor_nans && flag_trapping_math)
2726 return ERROR_MARK;
2728 switch (code)
2730 case EQ_EXPR:
2731 return NE_EXPR;
2732 case NE_EXPR:
2733 return EQ_EXPR;
2734 case GT_EXPR:
2735 return honor_nans ? UNLE_EXPR : LE_EXPR;
2736 case GE_EXPR:
2737 return honor_nans ? UNLT_EXPR : LT_EXPR;
2738 case LT_EXPR:
2739 return honor_nans ? UNGE_EXPR : GE_EXPR;
2740 case LE_EXPR:
2741 return honor_nans ? UNGT_EXPR : GT_EXPR;
2742 case LTGT_EXPR:
2743 return UNEQ_EXPR;
2744 case UNEQ_EXPR:
2745 return LTGT_EXPR;
2746 case UNGT_EXPR:
2747 return LE_EXPR;
2748 case UNGE_EXPR:
2749 return LT_EXPR;
2750 case UNLT_EXPR:
2751 return GE_EXPR;
2752 case UNLE_EXPR:
2753 return GT_EXPR;
2754 case ORDERED_EXPR:
2755 return UNORDERED_EXPR;
2756 case UNORDERED_EXPR:
2757 return ORDERED_EXPR;
2758 default:
2759 gcc_unreachable ();
2763 /* Similar, but return the comparison that results if the operands are
2764 swapped. This is safe for floating-point. */
2766 enum tree_code
2767 swap_tree_comparison (enum tree_code code)
2769 switch (code)
2771 case EQ_EXPR:
2772 case NE_EXPR:
2773 case ORDERED_EXPR:
2774 case UNORDERED_EXPR:
2775 case LTGT_EXPR:
2776 case UNEQ_EXPR:
2777 return code;
2778 case GT_EXPR:
2779 return LT_EXPR;
2780 case GE_EXPR:
2781 return LE_EXPR;
2782 case LT_EXPR:
2783 return GT_EXPR;
2784 case LE_EXPR:
2785 return GE_EXPR;
2786 case UNGT_EXPR:
2787 return UNLT_EXPR;
2788 case UNGE_EXPR:
2789 return UNLE_EXPR;
2790 case UNLT_EXPR:
2791 return UNGT_EXPR;
2792 case UNLE_EXPR:
2793 return UNGE_EXPR;
2794 default:
2795 gcc_unreachable ();
2800 /* Convert a comparison tree code from an enum tree_code representation
2801 into a compcode bit-based encoding. This function is the inverse of
2802 compcode_to_comparison. */
2804 static enum comparison_code
2805 comparison_to_compcode (enum tree_code code)
2807 switch (code)
2809 case LT_EXPR:
2810 return COMPCODE_LT;
2811 case EQ_EXPR:
2812 return COMPCODE_EQ;
2813 case LE_EXPR:
2814 return COMPCODE_LE;
2815 case GT_EXPR:
2816 return COMPCODE_GT;
2817 case NE_EXPR:
2818 return COMPCODE_NE;
2819 case GE_EXPR:
2820 return COMPCODE_GE;
2821 case ORDERED_EXPR:
2822 return COMPCODE_ORD;
2823 case UNORDERED_EXPR:
2824 return COMPCODE_UNORD;
2825 case UNLT_EXPR:
2826 return COMPCODE_UNLT;
2827 case UNEQ_EXPR:
2828 return COMPCODE_UNEQ;
2829 case UNLE_EXPR:
2830 return COMPCODE_UNLE;
2831 case UNGT_EXPR:
2832 return COMPCODE_UNGT;
2833 case LTGT_EXPR:
2834 return COMPCODE_LTGT;
2835 case UNGE_EXPR:
2836 return COMPCODE_UNGE;
2837 default:
2838 gcc_unreachable ();
2842 /* Convert a compcode bit-based encoding of a comparison operator back
2843 to GCC's enum tree_code representation. This function is the
2844 inverse of comparison_to_compcode. */
2846 static enum tree_code
2847 compcode_to_comparison (enum comparison_code code)
2849 switch (code)
2851 case COMPCODE_LT:
2852 return LT_EXPR;
2853 case COMPCODE_EQ:
2854 return EQ_EXPR;
2855 case COMPCODE_LE:
2856 return LE_EXPR;
2857 case COMPCODE_GT:
2858 return GT_EXPR;
2859 case COMPCODE_NE:
2860 return NE_EXPR;
2861 case COMPCODE_GE:
2862 return GE_EXPR;
2863 case COMPCODE_ORD:
2864 return ORDERED_EXPR;
2865 case COMPCODE_UNORD:
2866 return UNORDERED_EXPR;
2867 case COMPCODE_UNLT:
2868 return UNLT_EXPR;
2869 case COMPCODE_UNEQ:
2870 return UNEQ_EXPR;
2871 case COMPCODE_UNLE:
2872 return UNLE_EXPR;
2873 case COMPCODE_UNGT:
2874 return UNGT_EXPR;
2875 case COMPCODE_LTGT:
2876 return LTGT_EXPR;
2877 case COMPCODE_UNGE:
2878 return UNGE_EXPR;
2879 default:
2880 gcc_unreachable ();
2884 /* Return a tree for the comparison which is the combination of
2885 doing the AND or OR (depending on CODE) of the two operations LCODE
2886 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2887 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2888 if this makes the transformation invalid. */
2890 tree
2891 combine_comparisons (enum tree_code code, enum tree_code lcode,
2892 enum tree_code rcode, tree truth_type,
2893 tree ll_arg, tree lr_arg)
2895 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2896 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2897 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2898 enum comparison_code compcode;
2900 switch (code)
2902 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2903 compcode = lcompcode & rcompcode;
2904 break;
2906 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2907 compcode = lcompcode | rcompcode;
2908 break;
2910 default:
2911 return NULL_TREE;
2914 if (!honor_nans)
2916 /* Eliminate unordered comparisons, as well as LTGT and ORD
2917 which are not used unless the mode has NaNs. */
2918 compcode &= ~COMPCODE_UNORD;
2919 if (compcode == COMPCODE_LTGT)
2920 compcode = COMPCODE_NE;
2921 else if (compcode == COMPCODE_ORD)
2922 compcode = COMPCODE_TRUE;
2924 else if (flag_trapping_math)
2926 /* Check that the original operation and the optimized ones will trap
2927 under the same condition. */
2928 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2929 && (lcompcode != COMPCODE_EQ)
2930 && (lcompcode != COMPCODE_ORD);
2931 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2932 && (rcompcode != COMPCODE_EQ)
2933 && (rcompcode != COMPCODE_ORD);
2934 bool trap = (compcode & COMPCODE_UNORD) == 0
2935 && (compcode != COMPCODE_EQ)
2936 && (compcode != COMPCODE_ORD);
2938 /* In a short-circuited boolean expression the LHS might be
2939 such that the RHS, if evaluated, will never trap. For
2940 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2941 if neither x nor y is NaN. (This is a mixed blessing: for
2942 example, the expression above will never trap, hence
2943 optimizing it to x < y would be invalid). */
2944 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2945 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2946 rtrap = false;
2948 /* If the comparison was short-circuited, and only the RHS
2949 trapped, we may now generate a spurious trap. */
2950 if (rtrap && !ltrap
2951 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2952 return NULL_TREE;
2954 /* If we changed the conditions that cause a trap, we lose. */
2955 if ((ltrap || rtrap) != trap)
2956 return NULL_TREE;
2959 if (compcode == COMPCODE_TRUE)
2960 return constant_boolean_node (true, truth_type);
2961 else if (compcode == COMPCODE_FALSE)
2962 return constant_boolean_node (false, truth_type);
2963 else
2964 return fold_build2 (compcode_to_comparison (compcode),
2965 truth_type, ll_arg, lr_arg);
2968 /* Return nonzero if CODE is a tree code that represents a truth value. */
2970 static int
2971 truth_value_p (enum tree_code code)
2973 return (TREE_CODE_CLASS (code) == tcc_comparison
2974 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2975 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2976 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2979 /* Return nonzero if two operands (typically of the same tree node)
2980 are necessarily equal. If either argument has side-effects this
2981 function returns zero. FLAGS modifies behavior as follows:
2983 If OEP_ONLY_CONST is set, only return nonzero for constants.
2984 This function tests whether the operands are indistinguishable;
2985 it does not test whether they are equal using C's == operation.
2986 The distinction is important for IEEE floating point, because
2987 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2988 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2990 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2991 even though it may hold multiple values during a function.
2992 This is because a GCC tree node guarantees that nothing else is
2993 executed between the evaluation of its "operands" (which may often
2994 be evaluated in arbitrary order). Hence if the operands themselves
2995 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2996 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2997 unset means assuming isochronic (or instantaneous) tree equivalence.
2998 Unless comparing arbitrary expression trees, such as from different
2999 statements, this flag can usually be left unset.
3001 If OEP_PURE_SAME is set, then pure functions with identical arguments
3002 are considered the same. It is used when the caller has other ways
3003 to ensure that global memory is unchanged in between. */
3006 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3008 /* If either is ERROR_MARK, they aren't equal. */
3009 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3010 return 0;
3012 /* If both types don't have the same signedness, then we can't consider
3013 them equal. We must check this before the STRIP_NOPS calls
3014 because they may change the signedness of the arguments. */
3015 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3016 return 0;
3018 /* If both types don't have the same precision, then it is not safe
3019 to strip NOPs. */
3020 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3021 return 0;
3023 STRIP_NOPS (arg0);
3024 STRIP_NOPS (arg1);
3026 /* In case both args are comparisons but with different comparison
3027 code, try to swap the comparison operands of one arg to produce
3028 a match and compare that variant. */
3029 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3030 && COMPARISON_CLASS_P (arg0)
3031 && COMPARISON_CLASS_P (arg1))
3033 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3035 if (TREE_CODE (arg0) == swap_code)
3036 return operand_equal_p (TREE_OPERAND (arg0, 0),
3037 TREE_OPERAND (arg1, 1), flags)
3038 && operand_equal_p (TREE_OPERAND (arg0, 1),
3039 TREE_OPERAND (arg1, 0), flags);
3042 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3043 /* This is needed for conversions and for COMPONENT_REF.
3044 Might as well play it safe and always test this. */
3045 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3046 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3047 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3048 return 0;
3050 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3051 We don't care about side effects in that case because the SAVE_EXPR
3052 takes care of that for us. In all other cases, two expressions are
3053 equal if they have no side effects. If we have two identical
3054 expressions with side effects that should be treated the same due
3055 to the only side effects being identical SAVE_EXPR's, that will
3056 be detected in the recursive calls below. */
3057 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3058 && (TREE_CODE (arg0) == SAVE_EXPR
3059 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3060 return 1;
3062 /* Next handle constant cases, those for which we can return 1 even
3063 if ONLY_CONST is set. */
3064 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3065 switch (TREE_CODE (arg0))
3067 case INTEGER_CST:
3068 return tree_int_cst_equal (arg0, arg1);
3070 case FIXED_CST:
3071 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3072 TREE_FIXED_CST (arg1));
3074 case REAL_CST:
3075 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3076 TREE_REAL_CST (arg1)))
3077 return 1;
3080 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3082 /* If we do not distinguish between signed and unsigned zero,
3083 consider them equal. */
3084 if (real_zerop (arg0) && real_zerop (arg1))
3085 return 1;
3087 return 0;
3089 case VECTOR_CST:
3091 tree v1, v2;
3093 v1 = TREE_VECTOR_CST_ELTS (arg0);
3094 v2 = TREE_VECTOR_CST_ELTS (arg1);
3095 while (v1 && v2)
3097 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3098 flags))
3099 return 0;
3100 v1 = TREE_CHAIN (v1);
3101 v2 = TREE_CHAIN (v2);
3104 return v1 == v2;
3107 case COMPLEX_CST:
3108 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3109 flags)
3110 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3111 flags));
3113 case STRING_CST:
3114 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3115 && ! memcmp (TREE_STRING_POINTER (arg0),
3116 TREE_STRING_POINTER (arg1),
3117 TREE_STRING_LENGTH (arg0)));
3119 case ADDR_EXPR:
3120 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3122 default:
3123 break;
3126 if (flags & OEP_ONLY_CONST)
3127 return 0;
3129 /* Define macros to test an operand from arg0 and arg1 for equality and a
3130 variant that allows null and views null as being different from any
3131 non-null value. In the latter case, if either is null, the both
3132 must be; otherwise, do the normal comparison. */
3133 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3134 TREE_OPERAND (arg1, N), flags)
3136 #define OP_SAME_WITH_NULL(N) \
3137 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3138 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3140 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3142 case tcc_unary:
3143 /* Two conversions are equal only if signedness and modes match. */
3144 switch (TREE_CODE (arg0))
3146 case NOP_EXPR:
3147 case CONVERT_EXPR:
3148 case FIX_TRUNC_EXPR:
3149 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3150 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3151 return 0;
3152 break;
3153 default:
3154 break;
3157 return OP_SAME (0);
3160 case tcc_comparison:
3161 case tcc_binary:
3162 if (OP_SAME (0) && OP_SAME (1))
3163 return 1;
3165 /* For commutative ops, allow the other order. */
3166 return (commutative_tree_code (TREE_CODE (arg0))
3167 && operand_equal_p (TREE_OPERAND (arg0, 0),
3168 TREE_OPERAND (arg1, 1), flags)
3169 && operand_equal_p (TREE_OPERAND (arg0, 1),
3170 TREE_OPERAND (arg1, 0), flags));
3172 case tcc_reference:
3173 /* If either of the pointer (or reference) expressions we are
3174 dereferencing contain a side effect, these cannot be equal. */
3175 if (TREE_SIDE_EFFECTS (arg0)
3176 || TREE_SIDE_EFFECTS (arg1))
3177 return 0;
3179 switch (TREE_CODE (arg0))
3181 case INDIRECT_REF:
3182 case ALIGN_INDIRECT_REF:
3183 case MISALIGNED_INDIRECT_REF:
3184 case REALPART_EXPR:
3185 case IMAGPART_EXPR:
3186 return OP_SAME (0);
3188 case ARRAY_REF:
3189 case ARRAY_RANGE_REF:
3190 /* Operands 2 and 3 may be null.
3191 Compare the array index by value if it is constant first as we
3192 may have different types but same value here. */
3193 return (OP_SAME (0)
3194 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3195 TREE_OPERAND (arg1, 1))
3196 || OP_SAME (1))
3197 && OP_SAME_WITH_NULL (2)
3198 && OP_SAME_WITH_NULL (3));
3200 case COMPONENT_REF:
3201 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3202 may be NULL when we're called to compare MEM_EXPRs. */
3203 return OP_SAME_WITH_NULL (0)
3204 && OP_SAME (1)
3205 && OP_SAME_WITH_NULL (2);
3207 case BIT_FIELD_REF:
3208 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3210 default:
3211 return 0;
3214 case tcc_expression:
3215 switch (TREE_CODE (arg0))
3217 case ADDR_EXPR:
3218 case TRUTH_NOT_EXPR:
3219 return OP_SAME (0);
3221 case TRUTH_ANDIF_EXPR:
3222 case TRUTH_ORIF_EXPR:
3223 return OP_SAME (0) && OP_SAME (1);
3225 case TRUTH_AND_EXPR:
3226 case TRUTH_OR_EXPR:
3227 case TRUTH_XOR_EXPR:
3228 if (OP_SAME (0) && OP_SAME (1))
3229 return 1;
3231 /* Otherwise take into account this is a commutative operation. */
3232 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3233 TREE_OPERAND (arg1, 1), flags)
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 0), flags));
3237 default:
3238 return 0;
3241 case tcc_vl_exp:
3242 switch (TREE_CODE (arg0))
3244 case CALL_EXPR:
3245 /* If the CALL_EXPRs call different functions, then they
3246 clearly can not be equal. */
3247 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3248 flags))
3249 return 0;
3252 unsigned int cef = call_expr_flags (arg0);
3253 if (flags & OEP_PURE_SAME)
3254 cef &= ECF_CONST | ECF_PURE;
3255 else
3256 cef &= ECF_CONST;
3257 if (!cef)
3258 return 0;
3261 /* Now see if all the arguments are the same. */
3263 const_call_expr_arg_iterator iter0, iter1;
3264 const_tree a0, a1;
3265 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3266 a1 = first_const_call_expr_arg (arg1, &iter1);
3267 a0 && a1;
3268 a0 = next_const_call_expr_arg (&iter0),
3269 a1 = next_const_call_expr_arg (&iter1))
3270 if (! operand_equal_p (a0, a1, flags))
3271 return 0;
3273 /* If we get here and both argument lists are exhausted
3274 then the CALL_EXPRs are equal. */
3275 return ! (a0 || a1);
3277 default:
3278 return 0;
3281 case tcc_declaration:
3282 /* Consider __builtin_sqrt equal to sqrt. */
3283 return (TREE_CODE (arg0) == FUNCTION_DECL
3284 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3285 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3286 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3288 default:
3289 return 0;
3292 #undef OP_SAME
3293 #undef OP_SAME_WITH_NULL
3296 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3297 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3299 When in doubt, return 0. */
3301 static int
3302 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3304 int unsignedp1, unsignedpo;
3305 tree primarg0, primarg1, primother;
3306 unsigned int correct_width;
3308 if (operand_equal_p (arg0, arg1, 0))
3309 return 1;
3311 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3312 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3313 return 0;
3315 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3316 and see if the inner values are the same. This removes any
3317 signedness comparison, which doesn't matter here. */
3318 primarg0 = arg0, primarg1 = arg1;
3319 STRIP_NOPS (primarg0);
3320 STRIP_NOPS (primarg1);
3321 if (operand_equal_p (primarg0, primarg1, 0))
3322 return 1;
3324 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3325 actual comparison operand, ARG0.
3327 First throw away any conversions to wider types
3328 already present in the operands. */
3330 primarg1 = get_narrower (arg1, &unsignedp1);
3331 primother = get_narrower (other, &unsignedpo);
3333 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3334 if (unsignedp1 == unsignedpo
3335 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3336 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3338 tree type = TREE_TYPE (arg0);
3340 /* Make sure shorter operand is extended the right way
3341 to match the longer operand. */
3342 primarg1 = fold_convert (signed_or_unsigned_type_for
3343 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3345 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3346 return 1;
3349 return 0;
3352 /* See if ARG is an expression that is either a comparison or is performing
3353 arithmetic on comparisons. The comparisons must only be comparing
3354 two different values, which will be stored in *CVAL1 and *CVAL2; if
3355 they are nonzero it means that some operands have already been found.
3356 No variables may be used anywhere else in the expression except in the
3357 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3358 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3360 If this is true, return 1. Otherwise, return zero. */
3362 static int
3363 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3365 enum tree_code code = TREE_CODE (arg);
3366 enum tree_code_class class = TREE_CODE_CLASS (code);
3368 /* We can handle some of the tcc_expression cases here. */
3369 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3370 class = tcc_unary;
3371 else if (class == tcc_expression
3372 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3373 || code == COMPOUND_EXPR))
3374 class = tcc_binary;
3376 else if (class == tcc_expression && code == SAVE_EXPR
3377 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3379 /* If we've already found a CVAL1 or CVAL2, this expression is
3380 two complex to handle. */
3381 if (*cval1 || *cval2)
3382 return 0;
3384 class = tcc_unary;
3385 *save_p = 1;
3388 switch (class)
3390 case tcc_unary:
3391 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3393 case tcc_binary:
3394 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3395 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3396 cval1, cval2, save_p));
3398 case tcc_constant:
3399 return 1;
3401 case tcc_expression:
3402 if (code == COND_EXPR)
3403 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3404 cval1, cval2, save_p)
3405 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3406 cval1, cval2, save_p)
3407 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3408 cval1, cval2, save_p));
3409 return 0;
3411 case tcc_comparison:
3412 /* First see if we can handle the first operand, then the second. For
3413 the second operand, we know *CVAL1 can't be zero. It must be that
3414 one side of the comparison is each of the values; test for the
3415 case where this isn't true by failing if the two operands
3416 are the same. */
3418 if (operand_equal_p (TREE_OPERAND (arg, 0),
3419 TREE_OPERAND (arg, 1), 0))
3420 return 0;
3422 if (*cval1 == 0)
3423 *cval1 = TREE_OPERAND (arg, 0);
3424 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3426 else if (*cval2 == 0)
3427 *cval2 = TREE_OPERAND (arg, 0);
3428 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3430 else
3431 return 0;
3433 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3435 else if (*cval2 == 0)
3436 *cval2 = TREE_OPERAND (arg, 1);
3437 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3439 else
3440 return 0;
3442 return 1;
3444 default:
3445 return 0;
3449 /* ARG is a tree that is known to contain just arithmetic operations and
3450 comparisons. Evaluate the operations in the tree substituting NEW0 for
3451 any occurrence of OLD0 as an operand of a comparison and likewise for
3452 NEW1 and OLD1. */
3454 static tree
3455 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3457 tree type = TREE_TYPE (arg);
3458 enum tree_code code = TREE_CODE (arg);
3459 enum tree_code_class class = TREE_CODE_CLASS (code);
3461 /* We can handle some of the tcc_expression cases here. */
3462 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3463 class = tcc_unary;
3464 else if (class == tcc_expression
3465 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3466 class = tcc_binary;
3468 switch (class)
3470 case tcc_unary:
3471 return fold_build1 (code, type,
3472 eval_subst (TREE_OPERAND (arg, 0),
3473 old0, new0, old1, new1));
3475 case tcc_binary:
3476 return fold_build2 (code, type,
3477 eval_subst (TREE_OPERAND (arg, 0),
3478 old0, new0, old1, new1),
3479 eval_subst (TREE_OPERAND (arg, 1),
3480 old0, new0, old1, new1));
3482 case tcc_expression:
3483 switch (code)
3485 case SAVE_EXPR:
3486 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3488 case COMPOUND_EXPR:
3489 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3491 case COND_EXPR:
3492 return fold_build3 (code, type,
3493 eval_subst (TREE_OPERAND (arg, 0),
3494 old0, new0, old1, new1),
3495 eval_subst (TREE_OPERAND (arg, 1),
3496 old0, new0, old1, new1),
3497 eval_subst (TREE_OPERAND (arg, 2),
3498 old0, new0, old1, new1));
3499 default:
3500 break;
3502 /* Fall through - ??? */
3504 case tcc_comparison:
3506 tree arg0 = TREE_OPERAND (arg, 0);
3507 tree arg1 = TREE_OPERAND (arg, 1);
3509 /* We need to check both for exact equality and tree equality. The
3510 former will be true if the operand has a side-effect. In that
3511 case, we know the operand occurred exactly once. */
3513 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3514 arg0 = new0;
3515 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3516 arg0 = new1;
3518 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3519 arg1 = new0;
3520 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3521 arg1 = new1;
3523 return fold_build2 (code, type, arg0, arg1);
3526 default:
3527 return arg;
3531 /* Return a tree for the case when the result of an expression is RESULT
3532 converted to TYPE and OMITTED was previously an operand of the expression
3533 but is now not needed (e.g., we folded OMITTED * 0).
3535 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3536 the conversion of RESULT to TYPE. */
3538 tree
3539 omit_one_operand (tree type, tree result, tree omitted)
3541 tree t = fold_convert (type, result);
3543 /* If the resulting operand is an empty statement, just return the omitted
3544 statement casted to void. */
3545 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3546 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3548 if (TREE_SIDE_EFFECTS (omitted))
3549 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3551 return non_lvalue (t);
3554 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3556 static tree
3557 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3559 tree t = fold_convert (type, result);
3561 /* If the resulting operand is an empty statement, just return the omitted
3562 statement casted to void. */
3563 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3564 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3566 if (TREE_SIDE_EFFECTS (omitted))
3567 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3569 return pedantic_non_lvalue (t);
3572 /* Return a tree for the case when the result of an expression is RESULT
3573 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3574 of the expression but are now not needed.
3576 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3577 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3578 evaluated before OMITTED2. Otherwise, if neither has side effects,
3579 just do the conversion of RESULT to TYPE. */
3581 tree
3582 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3584 tree t = fold_convert (type, result);
3586 if (TREE_SIDE_EFFECTS (omitted2))
3587 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3588 if (TREE_SIDE_EFFECTS (omitted1))
3589 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3591 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3595 /* Return a simplified tree node for the truth-negation of ARG. This
3596 never alters ARG itself. We assume that ARG is an operation that
3597 returns a truth value (0 or 1).
3599 FIXME: one would think we would fold the result, but it causes
3600 problems with the dominator optimizer. */
3602 tree
3603 fold_truth_not_expr (tree arg)
3605 tree type = TREE_TYPE (arg);
3606 enum tree_code code = TREE_CODE (arg);
3608 /* If this is a comparison, we can simply invert it, except for
3609 floating-point non-equality comparisons, in which case we just
3610 enclose a TRUTH_NOT_EXPR around what we have. */
3612 if (TREE_CODE_CLASS (code) == tcc_comparison)
3614 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3615 if (FLOAT_TYPE_P (op_type)
3616 && flag_trapping_math
3617 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3618 && code != NE_EXPR && code != EQ_EXPR)
3619 return NULL_TREE;
3620 else
3622 code = invert_tree_comparison (code,
3623 HONOR_NANS (TYPE_MODE (op_type)));
3624 if (code == ERROR_MARK)
3625 return NULL_TREE;
3626 else
3627 return build2 (code, type,
3628 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3632 switch (code)
3634 case INTEGER_CST:
3635 return constant_boolean_node (integer_zerop (arg), type);
3637 case TRUTH_AND_EXPR:
3638 return build2 (TRUTH_OR_EXPR, type,
3639 invert_truthvalue (TREE_OPERAND (arg, 0)),
3640 invert_truthvalue (TREE_OPERAND (arg, 1)));
3642 case TRUTH_OR_EXPR:
3643 return build2 (TRUTH_AND_EXPR, type,
3644 invert_truthvalue (TREE_OPERAND (arg, 0)),
3645 invert_truthvalue (TREE_OPERAND (arg, 1)));
3647 case TRUTH_XOR_EXPR:
3648 /* Here we can invert either operand. We invert the first operand
3649 unless the second operand is a TRUTH_NOT_EXPR in which case our
3650 result is the XOR of the first operand with the inside of the
3651 negation of the second operand. */
3653 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3654 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3655 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3656 else
3657 return build2 (TRUTH_XOR_EXPR, type,
3658 invert_truthvalue (TREE_OPERAND (arg, 0)),
3659 TREE_OPERAND (arg, 1));
3661 case TRUTH_ANDIF_EXPR:
3662 return build2 (TRUTH_ORIF_EXPR, type,
3663 invert_truthvalue (TREE_OPERAND (arg, 0)),
3664 invert_truthvalue (TREE_OPERAND (arg, 1)));
3666 case TRUTH_ORIF_EXPR:
3667 return build2 (TRUTH_ANDIF_EXPR, type,
3668 invert_truthvalue (TREE_OPERAND (arg, 0)),
3669 invert_truthvalue (TREE_OPERAND (arg, 1)));
3671 case TRUTH_NOT_EXPR:
3672 return TREE_OPERAND (arg, 0);
3674 case COND_EXPR:
3676 tree arg1 = TREE_OPERAND (arg, 1);
3677 tree arg2 = TREE_OPERAND (arg, 2);
3678 /* A COND_EXPR may have a throw as one operand, which
3679 then has void type. Just leave void operands
3680 as they are. */
3681 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3682 VOID_TYPE_P (TREE_TYPE (arg1))
3683 ? arg1 : invert_truthvalue (arg1),
3684 VOID_TYPE_P (TREE_TYPE (arg2))
3685 ? arg2 : invert_truthvalue (arg2));
3688 case COMPOUND_EXPR:
3689 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3690 invert_truthvalue (TREE_OPERAND (arg, 1)));
3692 case NON_LVALUE_EXPR:
3693 return invert_truthvalue (TREE_OPERAND (arg, 0));
3695 case NOP_EXPR:
3696 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3697 return build1 (TRUTH_NOT_EXPR, type, arg);
3699 case CONVERT_EXPR:
3700 case FLOAT_EXPR:
3701 return build1 (TREE_CODE (arg), type,
3702 invert_truthvalue (TREE_OPERAND (arg, 0)));
3704 case BIT_AND_EXPR:
3705 if (!integer_onep (TREE_OPERAND (arg, 1)))
3706 break;
3707 return build2 (EQ_EXPR, type, arg,
3708 build_int_cst (type, 0));
3710 case SAVE_EXPR:
3711 return build1 (TRUTH_NOT_EXPR, type, arg);
3713 case CLEANUP_POINT_EXPR:
3714 return build1 (CLEANUP_POINT_EXPR, type,
3715 invert_truthvalue (TREE_OPERAND (arg, 0)));
3717 default:
3718 break;
3721 return NULL_TREE;
3724 /* Return a simplified tree node for the truth-negation of ARG. This
3725 never alters ARG itself. We assume that ARG is an operation that
3726 returns a truth value (0 or 1).
3728 FIXME: one would think we would fold the result, but it causes
3729 problems with the dominator optimizer. */
3731 tree
3732 invert_truthvalue (tree arg)
3734 tree tem;
3736 if (TREE_CODE (arg) == ERROR_MARK)
3737 return arg;
3739 tem = fold_truth_not_expr (arg);
3740 if (!tem)
3741 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3743 return tem;
3746 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3747 operands are another bit-wise operation with a common input. If so,
3748 distribute the bit operations to save an operation and possibly two if
3749 constants are involved. For example, convert
3750 (A | B) & (A | C) into A | (B & C)
3751 Further simplification will occur if B and C are constants.
3753 If this optimization cannot be done, 0 will be returned. */
3755 static tree
3756 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3758 tree common;
3759 tree left, right;
3761 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3762 || TREE_CODE (arg0) == code
3763 || (TREE_CODE (arg0) != BIT_AND_EXPR
3764 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3765 return 0;
3767 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3769 common = TREE_OPERAND (arg0, 0);
3770 left = TREE_OPERAND (arg0, 1);
3771 right = TREE_OPERAND (arg1, 1);
3773 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3775 common = TREE_OPERAND (arg0, 0);
3776 left = TREE_OPERAND (arg0, 1);
3777 right = TREE_OPERAND (arg1, 0);
3779 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3781 common = TREE_OPERAND (arg0, 1);
3782 left = TREE_OPERAND (arg0, 0);
3783 right = TREE_OPERAND (arg1, 1);
3785 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3787 common = TREE_OPERAND (arg0, 1);
3788 left = TREE_OPERAND (arg0, 0);
3789 right = TREE_OPERAND (arg1, 0);
3791 else
3792 return 0;
3794 return fold_build2 (TREE_CODE (arg0), type, common,
3795 fold_build2 (code, type, left, right));
3798 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3799 with code CODE. This optimization is unsafe. */
3800 static tree
3801 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3803 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3804 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3806 /* (A / C) +- (B / C) -> (A +- B) / C. */
3807 if (mul0 == mul1
3808 && operand_equal_p (TREE_OPERAND (arg0, 1),
3809 TREE_OPERAND (arg1, 1), 0))
3810 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3811 fold_build2 (code, type,
3812 TREE_OPERAND (arg0, 0),
3813 TREE_OPERAND (arg1, 0)),
3814 TREE_OPERAND (arg0, 1));
3816 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3817 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3818 TREE_OPERAND (arg1, 0), 0)
3819 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3820 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3822 REAL_VALUE_TYPE r0, r1;
3823 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3824 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3825 if (!mul0)
3826 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3827 if (!mul1)
3828 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3829 real_arithmetic (&r0, code, &r0, &r1);
3830 return fold_build2 (MULT_EXPR, type,
3831 TREE_OPERAND (arg0, 0),
3832 build_real (type, r0));
3835 return NULL_TREE;
3838 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3839 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3841 static tree
3842 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3843 int unsignedp)
3845 tree result;
3847 if (bitpos == 0)
3849 tree size = TYPE_SIZE (TREE_TYPE (inner));
3850 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3851 || POINTER_TYPE_P (TREE_TYPE (inner)))
3852 && host_integerp (size, 0)
3853 && tree_low_cst (size, 0) == bitsize)
3854 return fold_convert (type, inner);
3857 result = build3 (BIT_FIELD_REF, type, inner,
3858 size_int (bitsize), bitsize_int (bitpos));
3860 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3862 return result;
3865 /* Optimize a bit-field compare.
3867 There are two cases: First is a compare against a constant and the
3868 second is a comparison of two items where the fields are at the same
3869 bit position relative to the start of a chunk (byte, halfword, word)
3870 large enough to contain it. In these cases we can avoid the shift
3871 implicit in bitfield extractions.
3873 For constants, we emit a compare of the shifted constant with the
3874 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3875 compared. For two fields at the same position, we do the ANDs with the
3876 similar mask and compare the result of the ANDs.
3878 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3879 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3880 are the left and right operands of the comparison, respectively.
3882 If the optimization described above can be done, we return the resulting
3883 tree. Otherwise we return zero. */
3885 static tree
3886 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3887 tree lhs, tree rhs)
3889 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3890 tree type = TREE_TYPE (lhs);
3891 tree signed_type, unsigned_type;
3892 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3893 enum machine_mode lmode, rmode, nmode;
3894 int lunsignedp, runsignedp;
3895 int lvolatilep = 0, rvolatilep = 0;
3896 tree linner, rinner = NULL_TREE;
3897 tree mask;
3898 tree offset;
3900 /* Get all the information about the extractions being done. If the bit size
3901 if the same as the size of the underlying object, we aren't doing an
3902 extraction at all and so can do nothing. We also don't want to
3903 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3904 then will no longer be able to replace it. */
3905 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3906 &lunsignedp, &lvolatilep, false);
3907 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3908 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3909 return 0;
3911 if (!const_p)
3913 /* If this is not a constant, we can only do something if bit positions,
3914 sizes, and signedness are the same. */
3915 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3916 &runsignedp, &rvolatilep, false);
3918 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3919 || lunsignedp != runsignedp || offset != 0
3920 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3921 return 0;
3924 /* See if we can find a mode to refer to this field. We should be able to,
3925 but fail if we can't. */
3926 nmode = get_best_mode (lbitsize, lbitpos,
3927 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3928 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3929 TYPE_ALIGN (TREE_TYPE (rinner))),
3930 word_mode, lvolatilep || rvolatilep);
3931 if (nmode == VOIDmode)
3932 return 0;
3934 /* Set signed and unsigned types of the precision of this mode for the
3935 shifts below. */
3936 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3937 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3939 /* Compute the bit position and size for the new reference and our offset
3940 within it. If the new reference is the same size as the original, we
3941 won't optimize anything, so return zero. */
3942 nbitsize = GET_MODE_BITSIZE (nmode);
3943 nbitpos = lbitpos & ~ (nbitsize - 1);
3944 lbitpos -= nbitpos;
3945 if (nbitsize == lbitsize)
3946 return 0;
3948 if (BYTES_BIG_ENDIAN)
3949 lbitpos = nbitsize - lbitsize - lbitpos;
3951 /* Make the mask to be used against the extracted field. */
3952 mask = build_int_cst_type (unsigned_type, -1);
3953 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3954 mask = const_binop (RSHIFT_EXPR, mask,
3955 size_int (nbitsize - lbitsize - lbitpos), 0);
3957 if (! const_p)
3958 /* If not comparing with constant, just rework the comparison
3959 and return. */
3960 return fold_build2 (code, compare_type,
3961 fold_build2 (BIT_AND_EXPR, unsigned_type,
3962 make_bit_field_ref (linner,
3963 unsigned_type,
3964 nbitsize, nbitpos,
3966 mask),
3967 fold_build2 (BIT_AND_EXPR, unsigned_type,
3968 make_bit_field_ref (rinner,
3969 unsigned_type,
3970 nbitsize, nbitpos,
3972 mask));
3974 /* Otherwise, we are handling the constant case. See if the constant is too
3975 big for the field. Warn and return a tree of for 0 (false) if so. We do
3976 this not only for its own sake, but to avoid having to test for this
3977 error case below. If we didn't, we might generate wrong code.
3979 For unsigned fields, the constant shifted right by the field length should
3980 be all zero. For signed fields, the high-order bits should agree with
3981 the sign bit. */
3983 if (lunsignedp)
3985 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3986 fold_convert (unsigned_type, rhs),
3987 size_int (lbitsize), 0)))
3989 warning (0, "comparison is always %d due to width of bit-field",
3990 code == NE_EXPR);
3991 return constant_boolean_node (code == NE_EXPR, compare_type);
3994 else
3996 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3997 size_int (lbitsize - 1), 0);
3998 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4000 warning (0, "comparison is always %d due to width of bit-field",
4001 code == NE_EXPR);
4002 return constant_boolean_node (code == NE_EXPR, compare_type);
4006 /* Single-bit compares should always be against zero. */
4007 if (lbitsize == 1 && ! integer_zerop (rhs))
4009 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4010 rhs = build_int_cst (type, 0);
4013 /* Make a new bitfield reference, shift the constant over the
4014 appropriate number of bits and mask it with the computed mask
4015 (in case this was a signed field). If we changed it, make a new one. */
4016 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4017 if (lvolatilep)
4019 TREE_SIDE_EFFECTS (lhs) = 1;
4020 TREE_THIS_VOLATILE (lhs) = 1;
4023 rhs = const_binop (BIT_AND_EXPR,
4024 const_binop (LSHIFT_EXPR,
4025 fold_convert (unsigned_type, rhs),
4026 size_int (lbitpos), 0),
4027 mask, 0);
4029 return build2 (code, compare_type,
4030 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4031 rhs);
4034 /* Subroutine for fold_truthop: decode a field reference.
4036 If EXP is a comparison reference, we return the innermost reference.
4038 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4039 set to the starting bit number.
4041 If the innermost field can be completely contained in a mode-sized
4042 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4044 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4045 otherwise it is not changed.
4047 *PUNSIGNEDP is set to the signedness of the field.
4049 *PMASK is set to the mask used. This is either contained in a
4050 BIT_AND_EXPR or derived from the width of the field.
4052 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4054 Return 0 if this is not a component reference or is one that we can't
4055 do anything with. */
4057 static tree
4058 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4059 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4060 int *punsignedp, int *pvolatilep,
4061 tree *pmask, tree *pand_mask)
4063 tree outer_type = 0;
4064 tree and_mask = 0;
4065 tree mask, inner, offset;
4066 tree unsigned_type;
4067 unsigned int precision;
4069 /* All the optimizations using this function assume integer fields.
4070 There are problems with FP fields since the type_for_size call
4071 below can fail for, e.g., XFmode. */
4072 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4073 return 0;
4075 /* We are interested in the bare arrangement of bits, so strip everything
4076 that doesn't affect the machine mode. However, record the type of the
4077 outermost expression if it may matter below. */
4078 if (TREE_CODE (exp) == NOP_EXPR
4079 || TREE_CODE (exp) == CONVERT_EXPR
4080 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4081 outer_type = TREE_TYPE (exp);
4082 STRIP_NOPS (exp);
4084 if (TREE_CODE (exp) == BIT_AND_EXPR)
4086 and_mask = TREE_OPERAND (exp, 1);
4087 exp = TREE_OPERAND (exp, 0);
4088 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4089 if (TREE_CODE (and_mask) != INTEGER_CST)
4090 return 0;
4093 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4094 punsignedp, pvolatilep, false);
4095 if ((inner == exp && and_mask == 0)
4096 || *pbitsize < 0 || offset != 0
4097 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4098 return 0;
4100 /* If the number of bits in the reference is the same as the bitsize of
4101 the outer type, then the outer type gives the signedness. Otherwise
4102 (in case of a small bitfield) the signedness is unchanged. */
4103 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4104 *punsignedp = TYPE_UNSIGNED (outer_type);
4106 /* Compute the mask to access the bitfield. */
4107 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4108 precision = TYPE_PRECISION (unsigned_type);
4110 mask = build_int_cst_type (unsigned_type, -1);
4112 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4113 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4115 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4116 if (and_mask != 0)
4117 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4118 fold_convert (unsigned_type, and_mask), mask);
4120 *pmask = mask;
4121 *pand_mask = and_mask;
4122 return inner;
4125 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4126 bit positions. */
4128 static int
4129 all_ones_mask_p (const_tree mask, int size)
4131 tree type = TREE_TYPE (mask);
4132 unsigned int precision = TYPE_PRECISION (type);
4133 tree tmask;
4135 tmask = build_int_cst_type (signed_type_for (type), -1);
4137 return
4138 tree_int_cst_equal (mask,
4139 const_binop (RSHIFT_EXPR,
4140 const_binop (LSHIFT_EXPR, tmask,
4141 size_int (precision - size),
4143 size_int (precision - size), 0));
4146 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4147 represents the sign bit of EXP's type. If EXP represents a sign
4148 or zero extension, also test VAL against the unextended type.
4149 The return value is the (sub)expression whose sign bit is VAL,
4150 or NULL_TREE otherwise. */
4152 static tree
4153 sign_bit_p (tree exp, const_tree val)
4155 unsigned HOST_WIDE_INT mask_lo, lo;
4156 HOST_WIDE_INT mask_hi, hi;
4157 int width;
4158 tree t;
4160 /* Tree EXP must have an integral type. */
4161 t = TREE_TYPE (exp);
4162 if (! INTEGRAL_TYPE_P (t))
4163 return NULL_TREE;
4165 /* Tree VAL must be an integer constant. */
4166 if (TREE_CODE (val) != INTEGER_CST
4167 || TREE_OVERFLOW (val))
4168 return NULL_TREE;
4170 width = TYPE_PRECISION (t);
4171 if (width > HOST_BITS_PER_WIDE_INT)
4173 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4174 lo = 0;
4176 mask_hi = ((unsigned HOST_WIDE_INT) -1
4177 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4178 mask_lo = -1;
4180 else
4182 hi = 0;
4183 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4185 mask_hi = 0;
4186 mask_lo = ((unsigned HOST_WIDE_INT) -1
4187 >> (HOST_BITS_PER_WIDE_INT - width));
4190 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4191 treat VAL as if it were unsigned. */
4192 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4193 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4194 return exp;
4196 /* Handle extension from a narrower type. */
4197 if (TREE_CODE (exp) == NOP_EXPR
4198 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4199 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4201 return NULL_TREE;
4204 /* Subroutine for fold_truthop: determine if an operand is simple enough
4205 to be evaluated unconditionally. */
4207 static int
4208 simple_operand_p (const_tree exp)
4210 /* Strip any conversions that don't change the machine mode. */
4211 STRIP_NOPS (exp);
4213 return (CONSTANT_CLASS_P (exp)
4214 || TREE_CODE (exp) == SSA_NAME
4215 || (DECL_P (exp)
4216 && ! TREE_ADDRESSABLE (exp)
4217 && ! TREE_THIS_VOLATILE (exp)
4218 && ! DECL_NONLOCAL (exp)
4219 /* Don't regard global variables as simple. They may be
4220 allocated in ways unknown to the compiler (shared memory,
4221 #pragma weak, etc). */
4222 && ! TREE_PUBLIC (exp)
4223 && ! DECL_EXTERNAL (exp)
4224 /* Loading a static variable is unduly expensive, but global
4225 registers aren't expensive. */
4226 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4229 /* The following functions are subroutines to fold_range_test and allow it to
4230 try to change a logical combination of comparisons into a range test.
4232 For example, both
4233 X == 2 || X == 3 || X == 4 || X == 5
4235 X >= 2 && X <= 5
4236 are converted to
4237 (unsigned) (X - 2) <= 3
4239 We describe each set of comparisons as being either inside or outside
4240 a range, using a variable named like IN_P, and then describe the
4241 range with a lower and upper bound. If one of the bounds is omitted,
4242 it represents either the highest or lowest value of the type.
4244 In the comments below, we represent a range by two numbers in brackets
4245 preceded by a "+" to designate being inside that range, or a "-" to
4246 designate being outside that range, so the condition can be inverted by
4247 flipping the prefix. An omitted bound is represented by a "-". For
4248 example, "- [-, 10]" means being outside the range starting at the lowest
4249 possible value and ending at 10, in other words, being greater than 10.
4250 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4251 always false.
4253 We set up things so that the missing bounds are handled in a consistent
4254 manner so neither a missing bound nor "true" and "false" need to be
4255 handled using a special case. */
4257 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4258 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4259 and UPPER1_P are nonzero if the respective argument is an upper bound
4260 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4261 must be specified for a comparison. ARG1 will be converted to ARG0's
4262 type if both are specified. */
4264 static tree
4265 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4266 tree arg1, int upper1_p)
4268 tree tem;
4269 int result;
4270 int sgn0, sgn1;
4272 /* If neither arg represents infinity, do the normal operation.
4273 Else, if not a comparison, return infinity. Else handle the special
4274 comparison rules. Note that most of the cases below won't occur, but
4275 are handled for consistency. */
4277 if (arg0 != 0 && arg1 != 0)
4279 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4280 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4281 STRIP_NOPS (tem);
4282 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4285 if (TREE_CODE_CLASS (code) != tcc_comparison)
4286 return 0;
4288 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4289 for neither. In real maths, we cannot assume open ended ranges are
4290 the same. But, this is computer arithmetic, where numbers are finite.
4291 We can therefore make the transformation of any unbounded range with
4292 the value Z, Z being greater than any representable number. This permits
4293 us to treat unbounded ranges as equal. */
4294 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4295 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4296 switch (code)
4298 case EQ_EXPR:
4299 result = sgn0 == sgn1;
4300 break;
4301 case NE_EXPR:
4302 result = sgn0 != sgn1;
4303 break;
4304 case LT_EXPR:
4305 result = sgn0 < sgn1;
4306 break;
4307 case LE_EXPR:
4308 result = sgn0 <= sgn1;
4309 break;
4310 case GT_EXPR:
4311 result = sgn0 > sgn1;
4312 break;
4313 case GE_EXPR:
4314 result = sgn0 >= sgn1;
4315 break;
4316 default:
4317 gcc_unreachable ();
4320 return constant_boolean_node (result, type);
4323 /* Given EXP, a logical expression, set the range it is testing into
4324 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4325 actually being tested. *PLOW and *PHIGH will be made of the same
4326 type as the returned expression. If EXP is not a comparison, we
4327 will most likely not be returning a useful value and range. Set
4328 *STRICT_OVERFLOW_P to true if the return value is only valid
4329 because signed overflow is undefined; otherwise, do not change
4330 *STRICT_OVERFLOW_P. */
4332 static tree
4333 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4334 bool *strict_overflow_p)
4336 enum tree_code code;
4337 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4338 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4339 int in_p, n_in_p;
4340 tree low, high, n_low, n_high;
4342 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4343 and see if we can refine the range. Some of the cases below may not
4344 happen, but it doesn't seem worth worrying about this. We "continue"
4345 the outer loop when we've changed something; otherwise we "break"
4346 the switch, which will "break" the while. */
4348 in_p = 0;
4349 low = high = build_int_cst (TREE_TYPE (exp), 0);
4351 while (1)
4353 code = TREE_CODE (exp);
4354 exp_type = TREE_TYPE (exp);
4356 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4358 if (TREE_OPERAND_LENGTH (exp) > 0)
4359 arg0 = TREE_OPERAND (exp, 0);
4360 if (TREE_CODE_CLASS (code) == tcc_comparison
4361 || TREE_CODE_CLASS (code) == tcc_unary
4362 || TREE_CODE_CLASS (code) == tcc_binary)
4363 arg0_type = TREE_TYPE (arg0);
4364 if (TREE_CODE_CLASS (code) == tcc_binary
4365 || TREE_CODE_CLASS (code) == tcc_comparison
4366 || (TREE_CODE_CLASS (code) == tcc_expression
4367 && TREE_OPERAND_LENGTH (exp) > 1))
4368 arg1 = TREE_OPERAND (exp, 1);
4371 switch (code)
4373 case TRUTH_NOT_EXPR:
4374 in_p = ! in_p, exp = arg0;
4375 continue;
4377 case EQ_EXPR: case NE_EXPR:
4378 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4379 /* We can only do something if the range is testing for zero
4380 and if the second operand is an integer constant. Note that
4381 saying something is "in" the range we make is done by
4382 complementing IN_P since it will set in the initial case of
4383 being not equal to zero; "out" is leaving it alone. */
4384 if (low == 0 || high == 0
4385 || ! integer_zerop (low) || ! integer_zerop (high)
4386 || TREE_CODE (arg1) != INTEGER_CST)
4387 break;
4389 switch (code)
4391 case NE_EXPR: /* - [c, c] */
4392 low = high = arg1;
4393 break;
4394 case EQ_EXPR: /* + [c, c] */
4395 in_p = ! in_p, low = high = arg1;
4396 break;
4397 case GT_EXPR: /* - [-, c] */
4398 low = 0, high = arg1;
4399 break;
4400 case GE_EXPR: /* + [c, -] */
4401 in_p = ! in_p, low = arg1, high = 0;
4402 break;
4403 case LT_EXPR: /* - [c, -] */
4404 low = arg1, high = 0;
4405 break;
4406 case LE_EXPR: /* + [-, c] */
4407 in_p = ! in_p, low = 0, high = arg1;
4408 break;
4409 default:
4410 gcc_unreachable ();
4413 /* If this is an unsigned comparison, we also know that EXP is
4414 greater than or equal to zero. We base the range tests we make
4415 on that fact, so we record it here so we can parse existing
4416 range tests. We test arg0_type since often the return type
4417 of, e.g. EQ_EXPR, is boolean. */
4418 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4420 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4421 in_p, low, high, 1,
4422 build_int_cst (arg0_type, 0),
4423 NULL_TREE))
4424 break;
4426 in_p = n_in_p, low = n_low, high = n_high;
4428 /* If the high bound is missing, but we have a nonzero low
4429 bound, reverse the range so it goes from zero to the low bound
4430 minus 1. */
4431 if (high == 0 && low && ! integer_zerop (low))
4433 in_p = ! in_p;
4434 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4435 integer_one_node, 0);
4436 low = build_int_cst (arg0_type, 0);
4440 exp = arg0;
4441 continue;
4443 case NEGATE_EXPR:
4444 /* (-x) IN [a,b] -> x in [-b, -a] */
4445 n_low = range_binop (MINUS_EXPR, exp_type,
4446 build_int_cst (exp_type, 0),
4447 0, high, 1);
4448 n_high = range_binop (MINUS_EXPR, exp_type,
4449 build_int_cst (exp_type, 0),
4450 0, low, 0);
4451 low = n_low, high = n_high;
4452 exp = arg0;
4453 continue;
4455 case BIT_NOT_EXPR:
4456 /* ~ X -> -X - 1 */
4457 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4458 build_int_cst (exp_type, 1));
4459 continue;
4461 case PLUS_EXPR: case MINUS_EXPR:
4462 if (TREE_CODE (arg1) != INTEGER_CST)
4463 break;
4465 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4466 move a constant to the other side. */
4467 if (!TYPE_UNSIGNED (arg0_type)
4468 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4469 break;
4471 /* If EXP is signed, any overflow in the computation is undefined,
4472 so we don't worry about it so long as our computations on
4473 the bounds don't overflow. For unsigned, overflow is defined
4474 and this is exactly the right thing. */
4475 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4476 arg0_type, low, 0, arg1, 0);
4477 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4478 arg0_type, high, 1, arg1, 0);
4479 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4480 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4481 break;
4483 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4484 *strict_overflow_p = true;
4486 /* Check for an unsigned range which has wrapped around the maximum
4487 value thus making n_high < n_low, and normalize it. */
4488 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4490 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4491 integer_one_node, 0);
4492 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4493 integer_one_node, 0);
4495 /* If the range is of the form +/- [ x+1, x ], we won't
4496 be able to normalize it. But then, it represents the
4497 whole range or the empty set, so make it
4498 +/- [ -, - ]. */
4499 if (tree_int_cst_equal (n_low, low)
4500 && tree_int_cst_equal (n_high, high))
4501 low = high = 0;
4502 else
4503 in_p = ! in_p;
4505 else
4506 low = n_low, high = n_high;
4508 exp = arg0;
4509 continue;
4511 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4512 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4513 break;
4515 if (! INTEGRAL_TYPE_P (arg0_type)
4516 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4517 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4518 break;
4520 n_low = low, n_high = high;
4522 if (n_low != 0)
4523 n_low = fold_convert (arg0_type, n_low);
4525 if (n_high != 0)
4526 n_high = fold_convert (arg0_type, n_high);
4529 /* If we're converting arg0 from an unsigned type, to exp,
4530 a signed type, we will be doing the comparison as unsigned.
4531 The tests above have already verified that LOW and HIGH
4532 are both positive.
4534 So we have to ensure that we will handle large unsigned
4535 values the same way that the current signed bounds treat
4536 negative values. */
4538 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4540 tree high_positive;
4541 tree equiv_type;
4542 /* For fixed-point modes, we need to pass the saturating flag
4543 as the 2nd parameter. */
4544 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4545 equiv_type = lang_hooks.types.type_for_mode
4546 (TYPE_MODE (arg0_type),
4547 TYPE_SATURATING (arg0_type));
4548 else
4549 equiv_type = lang_hooks.types.type_for_mode
4550 (TYPE_MODE (arg0_type), 1);
4552 /* A range without an upper bound is, naturally, unbounded.
4553 Since convert would have cropped a very large value, use
4554 the max value for the destination type. */
4555 high_positive
4556 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4557 : TYPE_MAX_VALUE (arg0_type);
4559 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4560 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4561 fold_convert (arg0_type,
4562 high_positive),
4563 build_int_cst (arg0_type, 1));
4565 /* If the low bound is specified, "and" the range with the
4566 range for which the original unsigned value will be
4567 positive. */
4568 if (low != 0)
4570 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4571 1, n_low, n_high, 1,
4572 fold_convert (arg0_type,
4573 integer_zero_node),
4574 high_positive))
4575 break;
4577 in_p = (n_in_p == in_p);
4579 else
4581 /* Otherwise, "or" the range with the range of the input
4582 that will be interpreted as negative. */
4583 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4584 0, n_low, n_high, 1,
4585 fold_convert (arg0_type,
4586 integer_zero_node),
4587 high_positive))
4588 break;
4590 in_p = (in_p != n_in_p);
4594 exp = arg0;
4595 low = n_low, high = n_high;
4596 continue;
4598 default:
4599 break;
4602 break;
4605 /* If EXP is a constant, we can evaluate whether this is true or false. */
4606 if (TREE_CODE (exp) == INTEGER_CST)
4608 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4609 exp, 0, low, 0))
4610 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4611 exp, 1, high, 1)));
4612 low = high = 0;
4613 exp = 0;
4616 *pin_p = in_p, *plow = low, *phigh = high;
4617 return exp;
4620 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4621 type, TYPE, return an expression to test if EXP is in (or out of, depending
4622 on IN_P) the range. Return 0 if the test couldn't be created. */
4624 static tree
4625 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4627 tree etype = TREE_TYPE (exp);
4628 tree value;
4630 #ifdef HAVE_canonicalize_funcptr_for_compare
4631 /* Disable this optimization for function pointer expressions
4632 on targets that require function pointer canonicalization. */
4633 if (HAVE_canonicalize_funcptr_for_compare
4634 && TREE_CODE (etype) == POINTER_TYPE
4635 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4636 return NULL_TREE;
4637 #endif
4639 if (! in_p)
4641 value = build_range_check (type, exp, 1, low, high);
4642 if (value != 0)
4643 return invert_truthvalue (value);
4645 return 0;
4648 if (low == 0 && high == 0)
4649 return build_int_cst (type, 1);
4651 if (low == 0)
4652 return fold_build2 (LE_EXPR, type, exp,
4653 fold_convert (etype, high));
4655 if (high == 0)
4656 return fold_build2 (GE_EXPR, type, exp,
4657 fold_convert (etype, low));
4659 if (operand_equal_p (low, high, 0))
4660 return fold_build2 (EQ_EXPR, type, exp,
4661 fold_convert (etype, low));
4663 if (integer_zerop (low))
4665 if (! TYPE_UNSIGNED (etype))
4667 etype = unsigned_type_for (etype);
4668 high = fold_convert (etype, high);
4669 exp = fold_convert (etype, exp);
4671 return build_range_check (type, exp, 1, 0, high);
4674 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4675 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4677 unsigned HOST_WIDE_INT lo;
4678 HOST_WIDE_INT hi;
4679 int prec;
4681 prec = TYPE_PRECISION (etype);
4682 if (prec <= HOST_BITS_PER_WIDE_INT)
4684 hi = 0;
4685 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4687 else
4689 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4690 lo = (unsigned HOST_WIDE_INT) -1;
4693 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4695 if (TYPE_UNSIGNED (etype))
4697 etype = signed_type_for (etype);
4698 exp = fold_convert (etype, exp);
4700 return fold_build2 (GT_EXPR, type, exp,
4701 build_int_cst (etype, 0));
4705 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4706 This requires wrap-around arithmetics for the type of the expression. */
4707 switch (TREE_CODE (etype))
4709 case INTEGER_TYPE:
4710 /* There is no requirement that LOW be within the range of ETYPE
4711 if the latter is a subtype. It must, however, be within the base
4712 type of ETYPE. So be sure we do the subtraction in that type. */
4713 if (TREE_TYPE (etype))
4714 etype = TREE_TYPE (etype);
4715 break;
4717 case ENUMERAL_TYPE:
4718 case BOOLEAN_TYPE:
4719 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4720 TYPE_UNSIGNED (etype));
4721 break;
4723 default:
4724 break;
4727 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4728 if (TREE_CODE (etype) == INTEGER_TYPE
4729 && !TYPE_OVERFLOW_WRAPS (etype))
4731 tree utype, minv, maxv;
4733 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4734 for the type in question, as we rely on this here. */
4735 utype = unsigned_type_for (etype);
4736 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4737 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4738 integer_one_node, 1);
4739 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4741 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4742 minv, 1, maxv, 1)))
4743 etype = utype;
4744 else
4745 return 0;
4748 high = fold_convert (etype, high);
4749 low = fold_convert (etype, low);
4750 exp = fold_convert (etype, exp);
4752 value = const_binop (MINUS_EXPR, high, low, 0);
4755 if (POINTER_TYPE_P (etype))
4757 if (value != 0 && !TREE_OVERFLOW (value))
4759 low = fold_convert (sizetype, low);
4760 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4761 return build_range_check (type,
4762 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4763 1, build_int_cst (etype, 0), value);
4765 return 0;
4768 if (value != 0 && !TREE_OVERFLOW (value))
4769 return build_range_check (type,
4770 fold_build2 (MINUS_EXPR, etype, exp, low),
4771 1, build_int_cst (etype, 0), value);
4773 return 0;
4776 /* Return the predecessor of VAL in its type, handling the infinite case. */
4778 static tree
4779 range_predecessor (tree val)
4781 tree type = TREE_TYPE (val);
4783 if (INTEGRAL_TYPE_P (type)
4784 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4785 return 0;
4786 else
4787 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4790 /* Return the successor of VAL in its type, handling the infinite case. */
4792 static tree
4793 range_successor (tree val)
4795 tree type = TREE_TYPE (val);
4797 if (INTEGRAL_TYPE_P (type)
4798 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4799 return 0;
4800 else
4801 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4804 /* Given two ranges, see if we can merge them into one. Return 1 if we
4805 can, 0 if we can't. Set the output range into the specified parameters. */
4807 static int
4808 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4809 tree high0, int in1_p, tree low1, tree high1)
4811 int no_overlap;
4812 int subset;
4813 int temp;
4814 tree tem;
4815 int in_p;
4816 tree low, high;
4817 int lowequal = ((low0 == 0 && low1 == 0)
4818 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4819 low0, 0, low1, 0)));
4820 int highequal = ((high0 == 0 && high1 == 0)
4821 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4822 high0, 1, high1, 1)));
4824 /* Make range 0 be the range that starts first, or ends last if they
4825 start at the same value. Swap them if it isn't. */
4826 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4827 low0, 0, low1, 0))
4828 || (lowequal
4829 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4830 high1, 1, high0, 1))))
4832 temp = in0_p, in0_p = in1_p, in1_p = temp;
4833 tem = low0, low0 = low1, low1 = tem;
4834 tem = high0, high0 = high1, high1 = tem;
4837 /* Now flag two cases, whether the ranges are disjoint or whether the
4838 second range is totally subsumed in the first. Note that the tests
4839 below are simplified by the ones above. */
4840 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4841 high0, 1, low1, 0));
4842 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4843 high1, 1, high0, 1));
4845 /* We now have four cases, depending on whether we are including or
4846 excluding the two ranges. */
4847 if (in0_p && in1_p)
4849 /* If they don't overlap, the result is false. If the second range
4850 is a subset it is the result. Otherwise, the range is from the start
4851 of the second to the end of the first. */
4852 if (no_overlap)
4853 in_p = 0, low = high = 0;
4854 else if (subset)
4855 in_p = 1, low = low1, high = high1;
4856 else
4857 in_p = 1, low = low1, high = high0;
4860 else if (in0_p && ! in1_p)
4862 /* If they don't overlap, the result is the first range. If they are
4863 equal, the result is false. If the second range is a subset of the
4864 first, and the ranges begin at the same place, we go from just after
4865 the end of the second range to the end of the first. If the second
4866 range is not a subset of the first, or if it is a subset and both
4867 ranges end at the same place, the range starts at the start of the
4868 first range and ends just before the second range.
4869 Otherwise, we can't describe this as a single range. */
4870 if (no_overlap)
4871 in_p = 1, low = low0, high = high0;
4872 else if (lowequal && highequal)
4873 in_p = 0, low = high = 0;
4874 else if (subset && lowequal)
4876 low = range_successor (high1);
4877 high = high0;
4878 in_p = 1;
4879 if (low == 0)
4881 /* We are in the weird situation where high0 > high1 but
4882 high1 has no successor. Punt. */
4883 return 0;
4886 else if (! subset || highequal)
4888 low = low0;
4889 high = range_predecessor (low1);
4890 in_p = 1;
4891 if (high == 0)
4893 /* low0 < low1 but low1 has no predecessor. Punt. */
4894 return 0;
4897 else
4898 return 0;
4901 else if (! in0_p && in1_p)
4903 /* If they don't overlap, the result is the second range. If the second
4904 is a subset of the first, the result is false. Otherwise,
4905 the range starts just after the first range and ends at the
4906 end of the second. */
4907 if (no_overlap)
4908 in_p = 1, low = low1, high = high1;
4909 else if (subset || highequal)
4910 in_p = 0, low = high = 0;
4911 else
4913 low = range_successor (high0);
4914 high = high1;
4915 in_p = 1;
4916 if (low == 0)
4918 /* high1 > high0 but high0 has no successor. Punt. */
4919 return 0;
4924 else
4926 /* The case where we are excluding both ranges. Here the complex case
4927 is if they don't overlap. In that case, the only time we have a
4928 range is if they are adjacent. If the second is a subset of the
4929 first, the result is the first. Otherwise, the range to exclude
4930 starts at the beginning of the first range and ends at the end of the
4931 second. */
4932 if (no_overlap)
4934 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4935 range_successor (high0),
4936 1, low1, 0)))
4937 in_p = 0, low = low0, high = high1;
4938 else
4940 /* Canonicalize - [min, x] into - [-, x]. */
4941 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4942 switch (TREE_CODE (TREE_TYPE (low0)))
4944 case ENUMERAL_TYPE:
4945 if (TYPE_PRECISION (TREE_TYPE (low0))
4946 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4947 break;
4948 /* FALLTHROUGH */
4949 case INTEGER_TYPE:
4950 if (tree_int_cst_equal (low0,
4951 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4952 low0 = 0;
4953 break;
4954 case POINTER_TYPE:
4955 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4956 && integer_zerop (low0))
4957 low0 = 0;
4958 break;
4959 default:
4960 break;
4963 /* Canonicalize - [x, max] into - [x, -]. */
4964 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4965 switch (TREE_CODE (TREE_TYPE (high1)))
4967 case ENUMERAL_TYPE:
4968 if (TYPE_PRECISION (TREE_TYPE (high1))
4969 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4970 break;
4971 /* FALLTHROUGH */
4972 case INTEGER_TYPE:
4973 if (tree_int_cst_equal (high1,
4974 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4975 high1 = 0;
4976 break;
4977 case POINTER_TYPE:
4978 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4979 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4980 high1, 1,
4981 integer_one_node, 1)))
4982 high1 = 0;
4983 break;
4984 default:
4985 break;
4988 /* The ranges might be also adjacent between the maximum and
4989 minimum values of the given type. For
4990 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4991 return + [x + 1, y - 1]. */
4992 if (low0 == 0 && high1 == 0)
4994 low = range_successor (high0);
4995 high = range_predecessor (low1);
4996 if (low == 0 || high == 0)
4997 return 0;
4999 in_p = 1;
5001 else
5002 return 0;
5005 else if (subset)
5006 in_p = 0, low = low0, high = high0;
5007 else
5008 in_p = 0, low = low0, high = high1;
5011 *pin_p = in_p, *plow = low, *phigh = high;
5012 return 1;
5016 /* Subroutine of fold, looking inside expressions of the form
5017 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5018 of the COND_EXPR. This function is being used also to optimize
5019 A op B ? C : A, by reversing the comparison first.
5021 Return a folded expression whose code is not a COND_EXPR
5022 anymore, or NULL_TREE if no folding opportunity is found. */
5024 static tree
5025 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5027 enum tree_code comp_code = TREE_CODE (arg0);
5028 tree arg00 = TREE_OPERAND (arg0, 0);
5029 tree arg01 = TREE_OPERAND (arg0, 1);
5030 tree arg1_type = TREE_TYPE (arg1);
5031 tree tem;
5033 STRIP_NOPS (arg1);
5034 STRIP_NOPS (arg2);
5036 /* If we have A op 0 ? A : -A, consider applying the following
5037 transformations:
5039 A == 0? A : -A same as -A
5040 A != 0? A : -A same as A
5041 A >= 0? A : -A same as abs (A)
5042 A > 0? A : -A same as abs (A)
5043 A <= 0? A : -A same as -abs (A)
5044 A < 0? A : -A same as -abs (A)
5046 None of these transformations work for modes with signed
5047 zeros. If A is +/-0, the first two transformations will
5048 change the sign of the result (from +0 to -0, or vice
5049 versa). The last four will fix the sign of the result,
5050 even though the original expressions could be positive or
5051 negative, depending on the sign of A.
5053 Note that all these transformations are correct if A is
5054 NaN, since the two alternatives (A and -A) are also NaNs. */
5055 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
5056 ? real_zerop (arg01)
5057 : integer_zerop (arg01))
5058 && ((TREE_CODE (arg2) == NEGATE_EXPR
5059 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5060 /* In the case that A is of the form X-Y, '-A' (arg2) may
5061 have already been folded to Y-X, check for that. */
5062 || (TREE_CODE (arg1) == MINUS_EXPR
5063 && TREE_CODE (arg2) == MINUS_EXPR
5064 && operand_equal_p (TREE_OPERAND (arg1, 0),
5065 TREE_OPERAND (arg2, 1), 0)
5066 && operand_equal_p (TREE_OPERAND (arg1, 1),
5067 TREE_OPERAND (arg2, 0), 0))))
5068 switch (comp_code)
5070 case EQ_EXPR:
5071 case UNEQ_EXPR:
5072 tem = fold_convert (arg1_type, arg1);
5073 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5074 case NE_EXPR:
5075 case LTGT_EXPR:
5076 return pedantic_non_lvalue (fold_convert (type, arg1));
5077 case UNGE_EXPR:
5078 case UNGT_EXPR:
5079 if (flag_trapping_math)
5080 break;
5081 /* Fall through. */
5082 case GE_EXPR:
5083 case GT_EXPR:
5084 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5085 arg1 = fold_convert (signed_type_for
5086 (TREE_TYPE (arg1)), arg1);
5087 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5088 return pedantic_non_lvalue (fold_convert (type, tem));
5089 case UNLE_EXPR:
5090 case UNLT_EXPR:
5091 if (flag_trapping_math)
5092 break;
5093 case LE_EXPR:
5094 case LT_EXPR:
5095 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5096 arg1 = fold_convert (signed_type_for
5097 (TREE_TYPE (arg1)), arg1);
5098 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5099 return negate_expr (fold_convert (type, tem));
5100 default:
5101 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5102 break;
5105 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5106 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5107 both transformations are correct when A is NaN: A != 0
5108 is then true, and A == 0 is false. */
5110 if (integer_zerop (arg01) && integer_zerop (arg2))
5112 if (comp_code == NE_EXPR)
5113 return pedantic_non_lvalue (fold_convert (type, arg1));
5114 else if (comp_code == EQ_EXPR)
5115 return build_int_cst (type, 0);
5118 /* Try some transformations of A op B ? A : B.
5120 A == B? A : B same as B
5121 A != B? A : B same as A
5122 A >= B? A : B same as max (A, B)
5123 A > B? A : B same as max (B, A)
5124 A <= B? A : B same as min (A, B)
5125 A < B? A : B same as min (B, A)
5127 As above, these transformations don't work in the presence
5128 of signed zeros. For example, if A and B are zeros of
5129 opposite sign, the first two transformations will change
5130 the sign of the result. In the last four, the original
5131 expressions give different results for (A=+0, B=-0) and
5132 (A=-0, B=+0), but the transformed expressions do not.
5134 The first two transformations are correct if either A or B
5135 is a NaN. In the first transformation, the condition will
5136 be false, and B will indeed be chosen. In the case of the
5137 second transformation, the condition A != B will be true,
5138 and A will be chosen.
5140 The conversions to max() and min() are not correct if B is
5141 a number and A is not. The conditions in the original
5142 expressions will be false, so all four give B. The min()
5143 and max() versions would give a NaN instead. */
5144 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
5145 /* Avoid these transformations if the COND_EXPR may be used
5146 as an lvalue in the C++ front-end. PR c++/19199. */
5147 && (in_gimple_form
5148 || (strcmp (lang_hooks.name, "GNU C++") != 0
5149 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5150 || ! maybe_lvalue_p (arg1)
5151 || ! maybe_lvalue_p (arg2)))
5153 tree comp_op0 = arg00;
5154 tree comp_op1 = arg01;
5155 tree comp_type = TREE_TYPE (comp_op0);
5157 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5158 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5160 comp_type = type;
5161 comp_op0 = arg1;
5162 comp_op1 = arg2;
5165 switch (comp_code)
5167 case EQ_EXPR:
5168 return pedantic_non_lvalue (fold_convert (type, arg2));
5169 case NE_EXPR:
5170 return pedantic_non_lvalue (fold_convert (type, arg1));
5171 case LE_EXPR:
5172 case LT_EXPR:
5173 case UNLE_EXPR:
5174 case UNLT_EXPR:
5175 /* In C++ a ?: expression can be an lvalue, so put the
5176 operand which will be used if they are equal first
5177 so that we can convert this back to the
5178 corresponding COND_EXPR. */
5179 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5181 comp_op0 = fold_convert (comp_type, comp_op0);
5182 comp_op1 = fold_convert (comp_type, comp_op1);
5183 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5184 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5185 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5186 return pedantic_non_lvalue (fold_convert (type, tem));
5188 break;
5189 case GE_EXPR:
5190 case GT_EXPR:
5191 case UNGE_EXPR:
5192 case UNGT_EXPR:
5193 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5195 comp_op0 = fold_convert (comp_type, comp_op0);
5196 comp_op1 = fold_convert (comp_type, comp_op1);
5197 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5198 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5199 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5200 return pedantic_non_lvalue (fold_convert (type, tem));
5202 break;
5203 case UNEQ_EXPR:
5204 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5205 return pedantic_non_lvalue (fold_convert (type, arg2));
5206 break;
5207 case LTGT_EXPR:
5208 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5209 return pedantic_non_lvalue (fold_convert (type, arg1));
5210 break;
5211 default:
5212 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5213 break;
5217 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5218 we might still be able to simplify this. For example,
5219 if C1 is one less or one more than C2, this might have started
5220 out as a MIN or MAX and been transformed by this function.
5221 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5223 if (INTEGRAL_TYPE_P (type)
5224 && TREE_CODE (arg01) == INTEGER_CST
5225 && TREE_CODE (arg2) == INTEGER_CST)
5226 switch (comp_code)
5228 case EQ_EXPR:
5229 /* We can replace A with C1 in this case. */
5230 arg1 = fold_convert (type, arg01);
5231 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5233 case LT_EXPR:
5234 /* If C1 is C2 + 1, this is min(A, C2). */
5235 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5236 OEP_ONLY_CONST)
5237 && operand_equal_p (arg01,
5238 const_binop (PLUS_EXPR, arg2,
5239 build_int_cst (type, 1), 0),
5240 OEP_ONLY_CONST))
5241 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5242 type,
5243 fold_convert (type, arg1),
5244 arg2));
5245 break;
5247 case LE_EXPR:
5248 /* If C1 is C2 - 1, this is min(A, C2). */
5249 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5250 OEP_ONLY_CONST)
5251 && operand_equal_p (arg01,
5252 const_binop (MINUS_EXPR, arg2,
5253 build_int_cst (type, 1), 0),
5254 OEP_ONLY_CONST))
5255 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5256 type,
5257 fold_convert (type, arg1),
5258 arg2));
5259 break;
5261 case GT_EXPR:
5262 /* If C1 is C2 - 1, this is max(A, C2). */
5263 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5264 OEP_ONLY_CONST)
5265 && operand_equal_p (arg01,
5266 const_binop (MINUS_EXPR, arg2,
5267 build_int_cst (type, 1), 0),
5268 OEP_ONLY_CONST))
5269 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5270 type,
5271 fold_convert (type, arg1),
5272 arg2));
5273 break;
5275 case GE_EXPR:
5276 /* If C1 is C2 + 1, this is max(A, C2). */
5277 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5278 OEP_ONLY_CONST)
5279 && operand_equal_p (arg01,
5280 const_binop (PLUS_EXPR, arg2,
5281 build_int_cst (type, 1), 0),
5282 OEP_ONLY_CONST))
5283 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5284 type,
5285 fold_convert (type, arg1),
5286 arg2));
5287 break;
5288 case NE_EXPR:
5289 break;
5290 default:
5291 gcc_unreachable ();
5294 return NULL_TREE;
5299 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5300 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5301 #endif
5303 /* EXP is some logical combination of boolean tests. See if we can
5304 merge it into some range test. Return the new tree if so. */
5306 static tree
5307 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5309 int or_op = (code == TRUTH_ORIF_EXPR
5310 || code == TRUTH_OR_EXPR);
5311 int in0_p, in1_p, in_p;
5312 tree low0, low1, low, high0, high1, high;
5313 bool strict_overflow_p = false;
5314 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5315 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5316 tree tem;
5317 const char * const warnmsg = G_("assuming signed overflow does not occur "
5318 "when simplifying range test");
5320 /* If this is an OR operation, invert both sides; we will invert
5321 again at the end. */
5322 if (or_op)
5323 in0_p = ! in0_p, in1_p = ! in1_p;
5325 /* If both expressions are the same, if we can merge the ranges, and we
5326 can build the range test, return it or it inverted. If one of the
5327 ranges is always true or always false, consider it to be the same
5328 expression as the other. */
5329 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5330 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5331 in1_p, low1, high1)
5332 && 0 != (tem = (build_range_check (type,
5333 lhs != 0 ? lhs
5334 : rhs != 0 ? rhs : integer_zero_node,
5335 in_p, low, high))))
5337 if (strict_overflow_p)
5338 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5339 return or_op ? invert_truthvalue (tem) : tem;
5342 /* On machines where the branch cost is expensive, if this is a
5343 short-circuited branch and the underlying object on both sides
5344 is the same, make a non-short-circuit operation. */
5345 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5346 && lhs != 0 && rhs != 0
5347 && (code == TRUTH_ANDIF_EXPR
5348 || code == TRUTH_ORIF_EXPR)
5349 && operand_equal_p (lhs, rhs, 0))
5351 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5352 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5353 which cases we can't do this. */
5354 if (simple_operand_p (lhs))
5355 return build2 (code == TRUTH_ANDIF_EXPR
5356 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5357 type, op0, op1);
5359 else if (lang_hooks.decls.global_bindings_p () == 0
5360 && ! CONTAINS_PLACEHOLDER_P (lhs))
5362 tree common = save_expr (lhs);
5364 if (0 != (lhs = build_range_check (type, common,
5365 or_op ? ! in0_p : in0_p,
5366 low0, high0))
5367 && (0 != (rhs = build_range_check (type, common,
5368 or_op ? ! in1_p : in1_p,
5369 low1, high1))))
5371 if (strict_overflow_p)
5372 fold_overflow_warning (warnmsg,
5373 WARN_STRICT_OVERFLOW_COMPARISON);
5374 return build2 (code == TRUTH_ANDIF_EXPR
5375 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5376 type, lhs, rhs);
5381 return 0;
5384 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5385 bit value. Arrange things so the extra bits will be set to zero if and
5386 only if C is signed-extended to its full width. If MASK is nonzero,
5387 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5389 static tree
5390 unextend (tree c, int p, int unsignedp, tree mask)
5392 tree type = TREE_TYPE (c);
5393 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5394 tree temp;
5396 if (p == modesize || unsignedp)
5397 return c;
5399 /* We work by getting just the sign bit into the low-order bit, then
5400 into the high-order bit, then sign-extend. We then XOR that value
5401 with C. */
5402 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5403 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5405 /* We must use a signed type in order to get an arithmetic right shift.
5406 However, we must also avoid introducing accidental overflows, so that
5407 a subsequent call to integer_zerop will work. Hence we must
5408 do the type conversion here. At this point, the constant is either
5409 zero or one, and the conversion to a signed type can never overflow.
5410 We could get an overflow if this conversion is done anywhere else. */
5411 if (TYPE_UNSIGNED (type))
5412 temp = fold_convert (signed_type_for (type), temp);
5414 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5415 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5416 if (mask != 0)
5417 temp = const_binop (BIT_AND_EXPR, temp,
5418 fold_convert (TREE_TYPE (c), mask), 0);
5419 /* If necessary, convert the type back to match the type of C. */
5420 if (TYPE_UNSIGNED (type))
5421 temp = fold_convert (type, temp);
5423 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5426 /* Find ways of folding logical expressions of LHS and RHS:
5427 Try to merge two comparisons to the same innermost item.
5428 Look for range tests like "ch >= '0' && ch <= '9'".
5429 Look for combinations of simple terms on machines with expensive branches
5430 and evaluate the RHS unconditionally.
5432 For example, if we have p->a == 2 && p->b == 4 and we can make an
5433 object large enough to span both A and B, we can do this with a comparison
5434 against the object ANDed with the a mask.
5436 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5437 operations to do this with one comparison.
5439 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5440 function and the one above.
5442 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5443 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5445 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5446 two operands.
5448 We return the simplified tree or 0 if no optimization is possible. */
5450 static tree
5451 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5453 /* If this is the "or" of two comparisons, we can do something if
5454 the comparisons are NE_EXPR. If this is the "and", we can do something
5455 if the comparisons are EQ_EXPR. I.e.,
5456 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5458 WANTED_CODE is this operation code. For single bit fields, we can
5459 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5460 comparison for one-bit fields. */
5462 enum tree_code wanted_code;
5463 enum tree_code lcode, rcode;
5464 tree ll_arg, lr_arg, rl_arg, rr_arg;
5465 tree ll_inner, lr_inner, rl_inner, rr_inner;
5466 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5467 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5468 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5469 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5470 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5471 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5472 enum machine_mode lnmode, rnmode;
5473 tree ll_mask, lr_mask, rl_mask, rr_mask;
5474 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5475 tree l_const, r_const;
5476 tree lntype, rntype, result;
5477 int first_bit, end_bit;
5478 int volatilep;
5479 tree orig_lhs = lhs, orig_rhs = rhs;
5480 enum tree_code orig_code = code;
5482 /* Start by getting the comparison codes. Fail if anything is volatile.
5483 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5484 it were surrounded with a NE_EXPR. */
5486 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5487 return 0;
5489 lcode = TREE_CODE (lhs);
5490 rcode = TREE_CODE (rhs);
5492 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5494 lhs = build2 (NE_EXPR, truth_type, lhs,
5495 build_int_cst (TREE_TYPE (lhs), 0));
5496 lcode = NE_EXPR;
5499 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5501 rhs = build2 (NE_EXPR, truth_type, rhs,
5502 build_int_cst (TREE_TYPE (rhs), 0));
5503 rcode = NE_EXPR;
5506 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5507 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5508 return 0;
5510 ll_arg = TREE_OPERAND (lhs, 0);
5511 lr_arg = TREE_OPERAND (lhs, 1);
5512 rl_arg = TREE_OPERAND (rhs, 0);
5513 rr_arg = TREE_OPERAND (rhs, 1);
5515 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5516 if (simple_operand_p (ll_arg)
5517 && simple_operand_p (lr_arg))
5519 tree result;
5520 if (operand_equal_p (ll_arg, rl_arg, 0)
5521 && operand_equal_p (lr_arg, rr_arg, 0))
5523 result = combine_comparisons (code, lcode, rcode,
5524 truth_type, ll_arg, lr_arg);
5525 if (result)
5526 return result;
5528 else if (operand_equal_p (ll_arg, rr_arg, 0)
5529 && operand_equal_p (lr_arg, rl_arg, 0))
5531 result = combine_comparisons (code, lcode,
5532 swap_tree_comparison (rcode),
5533 truth_type, ll_arg, lr_arg);
5534 if (result)
5535 return result;
5539 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5540 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5542 /* If the RHS can be evaluated unconditionally and its operands are
5543 simple, it wins to evaluate the RHS unconditionally on machines
5544 with expensive branches. In this case, this isn't a comparison
5545 that can be merged. Avoid doing this if the RHS is a floating-point
5546 comparison since those can trap. */
5548 if (BRANCH_COST >= 2
5549 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5550 && simple_operand_p (rl_arg)
5551 && simple_operand_p (rr_arg))
5553 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5554 if (code == TRUTH_OR_EXPR
5555 && lcode == NE_EXPR && integer_zerop (lr_arg)
5556 && rcode == NE_EXPR && integer_zerop (rr_arg)
5557 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5558 return build2 (NE_EXPR, truth_type,
5559 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5560 ll_arg, rl_arg),
5561 build_int_cst (TREE_TYPE (ll_arg), 0));
5563 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5564 if (code == TRUTH_AND_EXPR
5565 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5566 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5567 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5568 return build2 (EQ_EXPR, truth_type,
5569 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5570 ll_arg, rl_arg),
5571 build_int_cst (TREE_TYPE (ll_arg), 0));
5573 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5575 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5576 return build2 (code, truth_type, lhs, rhs);
5577 return NULL_TREE;
5581 /* See if the comparisons can be merged. Then get all the parameters for
5582 each side. */
5584 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5585 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5586 return 0;
5588 volatilep = 0;
5589 ll_inner = decode_field_reference (ll_arg,
5590 &ll_bitsize, &ll_bitpos, &ll_mode,
5591 &ll_unsignedp, &volatilep, &ll_mask,
5592 &ll_and_mask);
5593 lr_inner = decode_field_reference (lr_arg,
5594 &lr_bitsize, &lr_bitpos, &lr_mode,
5595 &lr_unsignedp, &volatilep, &lr_mask,
5596 &lr_and_mask);
5597 rl_inner = decode_field_reference (rl_arg,
5598 &rl_bitsize, &rl_bitpos, &rl_mode,
5599 &rl_unsignedp, &volatilep, &rl_mask,
5600 &rl_and_mask);
5601 rr_inner = decode_field_reference (rr_arg,
5602 &rr_bitsize, &rr_bitpos, &rr_mode,
5603 &rr_unsignedp, &volatilep, &rr_mask,
5604 &rr_and_mask);
5606 /* It must be true that the inner operation on the lhs of each
5607 comparison must be the same if we are to be able to do anything.
5608 Then see if we have constants. If not, the same must be true for
5609 the rhs's. */
5610 if (volatilep || ll_inner == 0 || rl_inner == 0
5611 || ! operand_equal_p (ll_inner, rl_inner, 0))
5612 return 0;
5614 if (TREE_CODE (lr_arg) == INTEGER_CST
5615 && TREE_CODE (rr_arg) == INTEGER_CST)
5616 l_const = lr_arg, r_const = rr_arg;
5617 else if (lr_inner == 0 || rr_inner == 0
5618 || ! operand_equal_p (lr_inner, rr_inner, 0))
5619 return 0;
5620 else
5621 l_const = r_const = 0;
5623 /* If either comparison code is not correct for our logical operation,
5624 fail. However, we can convert a one-bit comparison against zero into
5625 the opposite comparison against that bit being set in the field. */
5627 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5628 if (lcode != wanted_code)
5630 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5632 /* Make the left operand unsigned, since we are only interested
5633 in the value of one bit. Otherwise we are doing the wrong
5634 thing below. */
5635 ll_unsignedp = 1;
5636 l_const = ll_mask;
5638 else
5639 return 0;
5642 /* This is analogous to the code for l_const above. */
5643 if (rcode != wanted_code)
5645 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5647 rl_unsignedp = 1;
5648 r_const = rl_mask;
5650 else
5651 return 0;
5654 /* See if we can find a mode that contains both fields being compared on
5655 the left. If we can't, fail. Otherwise, update all constants and masks
5656 to be relative to a field of that size. */
5657 first_bit = MIN (ll_bitpos, rl_bitpos);
5658 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5659 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5660 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5661 volatilep);
5662 if (lnmode == VOIDmode)
5663 return 0;
5665 lnbitsize = GET_MODE_BITSIZE (lnmode);
5666 lnbitpos = first_bit & ~ (lnbitsize - 1);
5667 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5668 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5670 if (BYTES_BIG_ENDIAN)
5672 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5673 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5676 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5677 size_int (xll_bitpos), 0);
5678 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5679 size_int (xrl_bitpos), 0);
5681 if (l_const)
5683 l_const = fold_convert (lntype, l_const);
5684 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5685 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5686 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5687 fold_build1 (BIT_NOT_EXPR,
5688 lntype, ll_mask),
5689 0)))
5691 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5693 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5696 if (r_const)
5698 r_const = fold_convert (lntype, r_const);
5699 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5700 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5701 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5702 fold_build1 (BIT_NOT_EXPR,
5703 lntype, rl_mask),
5704 0)))
5706 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5708 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5712 /* If the right sides are not constant, do the same for it. Also,
5713 disallow this optimization if a size or signedness mismatch occurs
5714 between the left and right sides. */
5715 if (l_const == 0)
5717 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5718 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5719 /* Make sure the two fields on the right
5720 correspond to the left without being swapped. */
5721 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5722 return 0;
5724 first_bit = MIN (lr_bitpos, rr_bitpos);
5725 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5726 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5727 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5728 volatilep);
5729 if (rnmode == VOIDmode)
5730 return 0;
5732 rnbitsize = GET_MODE_BITSIZE (rnmode);
5733 rnbitpos = first_bit & ~ (rnbitsize - 1);
5734 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5735 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5737 if (BYTES_BIG_ENDIAN)
5739 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5740 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5743 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5744 size_int (xlr_bitpos), 0);
5745 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5746 size_int (xrr_bitpos), 0);
5748 /* Make a mask that corresponds to both fields being compared.
5749 Do this for both items being compared. If the operands are the
5750 same size and the bits being compared are in the same position
5751 then we can do this by masking both and comparing the masked
5752 results. */
5753 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5754 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5755 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5757 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5758 ll_unsignedp || rl_unsignedp);
5759 if (! all_ones_mask_p (ll_mask, lnbitsize))
5760 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5762 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5763 lr_unsignedp || rr_unsignedp);
5764 if (! all_ones_mask_p (lr_mask, rnbitsize))
5765 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5767 return build2 (wanted_code, truth_type, lhs, rhs);
5770 /* There is still another way we can do something: If both pairs of
5771 fields being compared are adjacent, we may be able to make a wider
5772 field containing them both.
5774 Note that we still must mask the lhs/rhs expressions. Furthermore,
5775 the mask must be shifted to account for the shift done by
5776 make_bit_field_ref. */
5777 if ((ll_bitsize + ll_bitpos == rl_bitpos
5778 && lr_bitsize + lr_bitpos == rr_bitpos)
5779 || (ll_bitpos == rl_bitpos + rl_bitsize
5780 && lr_bitpos == rr_bitpos + rr_bitsize))
5782 tree type;
5784 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5785 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5786 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5787 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5789 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5790 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5791 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5792 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5794 /* Convert to the smaller type before masking out unwanted bits. */
5795 type = lntype;
5796 if (lntype != rntype)
5798 if (lnbitsize > rnbitsize)
5800 lhs = fold_convert (rntype, lhs);
5801 ll_mask = fold_convert (rntype, ll_mask);
5802 type = rntype;
5804 else if (lnbitsize < rnbitsize)
5806 rhs = fold_convert (lntype, rhs);
5807 lr_mask = fold_convert (lntype, lr_mask);
5808 type = lntype;
5812 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5813 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5815 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5816 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5818 return build2 (wanted_code, truth_type, lhs, rhs);
5821 return 0;
5824 /* Handle the case of comparisons with constants. If there is something in
5825 common between the masks, those bits of the constants must be the same.
5826 If not, the condition is always false. Test for this to avoid generating
5827 incorrect code below. */
5828 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5829 if (! integer_zerop (result)
5830 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5831 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5833 if (wanted_code == NE_EXPR)
5835 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5836 return constant_boolean_node (true, truth_type);
5838 else
5840 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5841 return constant_boolean_node (false, truth_type);
5845 /* Construct the expression we will return. First get the component
5846 reference we will make. Unless the mask is all ones the width of
5847 that field, perform the mask operation. Then compare with the
5848 merged constant. */
5849 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5850 ll_unsignedp || rl_unsignedp);
5852 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5853 if (! all_ones_mask_p (ll_mask, lnbitsize))
5854 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5856 return build2 (wanted_code, truth_type, result,
5857 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5860 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5861 constant. */
5863 static tree
5864 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5866 tree arg0 = op0;
5867 enum tree_code op_code;
5868 tree comp_const = op1;
5869 tree minmax_const;
5870 int consts_equal, consts_lt;
5871 tree inner;
5873 STRIP_SIGN_NOPS (arg0);
5875 op_code = TREE_CODE (arg0);
5876 minmax_const = TREE_OPERAND (arg0, 1);
5877 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5878 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5879 inner = TREE_OPERAND (arg0, 0);
5881 /* If something does not permit us to optimize, return the original tree. */
5882 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5883 || TREE_CODE (comp_const) != INTEGER_CST
5884 || TREE_OVERFLOW (comp_const)
5885 || TREE_CODE (minmax_const) != INTEGER_CST
5886 || TREE_OVERFLOW (minmax_const))
5887 return NULL_TREE;
5889 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5890 and GT_EXPR, doing the rest with recursive calls using logical
5891 simplifications. */
5892 switch (code)
5894 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5896 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5897 type, op0, op1);
5898 if (tem)
5899 return invert_truthvalue (tem);
5900 return NULL_TREE;
5903 case GE_EXPR:
5904 return
5905 fold_build2 (TRUTH_ORIF_EXPR, type,
5906 optimize_minmax_comparison
5907 (EQ_EXPR, type, arg0, comp_const),
5908 optimize_minmax_comparison
5909 (GT_EXPR, type, arg0, comp_const));
5911 case EQ_EXPR:
5912 if (op_code == MAX_EXPR && consts_equal)
5913 /* MAX (X, 0) == 0 -> X <= 0 */
5914 return fold_build2 (LE_EXPR, type, inner, comp_const);
5916 else if (op_code == MAX_EXPR && consts_lt)
5917 /* MAX (X, 0) == 5 -> X == 5 */
5918 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5920 else if (op_code == MAX_EXPR)
5921 /* MAX (X, 0) == -1 -> false */
5922 return omit_one_operand (type, integer_zero_node, inner);
5924 else if (consts_equal)
5925 /* MIN (X, 0) == 0 -> X >= 0 */
5926 return fold_build2 (GE_EXPR, type, inner, comp_const);
5928 else if (consts_lt)
5929 /* MIN (X, 0) == 5 -> false */
5930 return omit_one_operand (type, integer_zero_node, inner);
5932 else
5933 /* MIN (X, 0) == -1 -> X == -1 */
5934 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5936 case GT_EXPR:
5937 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5938 /* MAX (X, 0) > 0 -> X > 0
5939 MAX (X, 0) > 5 -> X > 5 */
5940 return fold_build2 (GT_EXPR, type, inner, comp_const);
5942 else if (op_code == MAX_EXPR)
5943 /* MAX (X, 0) > -1 -> true */
5944 return omit_one_operand (type, integer_one_node, inner);
5946 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5947 /* MIN (X, 0) > 0 -> false
5948 MIN (X, 0) > 5 -> false */
5949 return omit_one_operand (type, integer_zero_node, inner);
5951 else
5952 /* MIN (X, 0) > -1 -> X > -1 */
5953 return fold_build2 (GT_EXPR, type, inner, comp_const);
5955 default:
5956 return NULL_TREE;
5960 /* T is an integer expression that is being multiplied, divided, or taken a
5961 modulus (CODE says which and what kind of divide or modulus) by a
5962 constant C. See if we can eliminate that operation by folding it with
5963 other operations already in T. WIDE_TYPE, if non-null, is a type that
5964 should be used for the computation if wider than our type.
5966 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5967 (X * 2) + (Y * 4). We must, however, be assured that either the original
5968 expression would not overflow or that overflow is undefined for the type
5969 in the language in question.
5971 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5972 the machine has a multiply-accumulate insn or that this is part of an
5973 addressing calculation.
5975 If we return a non-null expression, it is an equivalent form of the
5976 original computation, but need not be in the original type.
5978 We set *STRICT_OVERFLOW_P to true if the return values depends on
5979 signed overflow being undefined. Otherwise we do not change
5980 *STRICT_OVERFLOW_P. */
5982 static tree
5983 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5984 bool *strict_overflow_p)
5986 /* To avoid exponential search depth, refuse to allow recursion past
5987 three levels. Beyond that (1) it's highly unlikely that we'll find
5988 something interesting and (2) we've probably processed it before
5989 when we built the inner expression. */
5991 static int depth;
5992 tree ret;
5994 if (depth > 3)
5995 return NULL;
5997 depth++;
5998 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5999 depth--;
6001 return ret;
6004 static tree
6005 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6006 bool *strict_overflow_p)
6008 tree type = TREE_TYPE (t);
6009 enum tree_code tcode = TREE_CODE (t);
6010 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6011 > GET_MODE_SIZE (TYPE_MODE (type)))
6012 ? wide_type : type);
6013 tree t1, t2;
6014 int same_p = tcode == code;
6015 tree op0 = NULL_TREE, op1 = NULL_TREE;
6016 bool sub_strict_overflow_p;
6018 /* Don't deal with constants of zero here; they confuse the code below. */
6019 if (integer_zerop (c))
6020 return NULL_TREE;
6022 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6023 op0 = TREE_OPERAND (t, 0);
6025 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6026 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6028 /* Note that we need not handle conditional operations here since fold
6029 already handles those cases. So just do arithmetic here. */
6030 switch (tcode)
6032 case INTEGER_CST:
6033 /* For a constant, we can always simplify if we are a multiply
6034 or (for divide and modulus) if it is a multiple of our constant. */
6035 if (code == MULT_EXPR
6036 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6037 return const_binop (code, fold_convert (ctype, t),
6038 fold_convert (ctype, c), 0);
6039 break;
6041 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
6042 /* If op0 is an expression ... */
6043 if ((COMPARISON_CLASS_P (op0)
6044 || UNARY_CLASS_P (op0)
6045 || BINARY_CLASS_P (op0)
6046 || VL_EXP_CLASS_P (op0)
6047 || EXPRESSION_CLASS_P (op0))
6048 /* ... and is unsigned, and its type is smaller than ctype,
6049 then we cannot pass through as widening. */
6050 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
6051 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6052 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6053 && (GET_MODE_SIZE (TYPE_MODE (ctype))
6054 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
6055 /* ... or this is a truncation (t is narrower than op0),
6056 then we cannot pass through this narrowing. */
6057 || (GET_MODE_SIZE (TYPE_MODE (type))
6058 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
6059 /* ... or signedness changes for division or modulus,
6060 then we cannot pass through this conversion. */
6061 || (code != MULT_EXPR
6062 && (TYPE_UNSIGNED (ctype)
6063 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
6064 break;
6066 /* Pass the constant down and see if we can make a simplification. If
6067 we can, replace this expression with the inner simplification for
6068 possible later conversion to our or some other type. */
6069 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6070 && TREE_CODE (t2) == INTEGER_CST
6071 && !TREE_OVERFLOW (t2)
6072 && (0 != (t1 = extract_muldiv (op0, t2, code,
6073 code == MULT_EXPR
6074 ? ctype : NULL_TREE,
6075 strict_overflow_p))))
6076 return t1;
6077 break;
6079 case ABS_EXPR:
6080 /* If widening the type changes it from signed to unsigned, then we
6081 must avoid building ABS_EXPR itself as unsigned. */
6082 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6084 tree cstype = (*signed_type_for) (ctype);
6085 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6086 != 0)
6088 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6089 return fold_convert (ctype, t1);
6091 break;
6093 /* FALLTHROUGH */
6094 case NEGATE_EXPR:
6095 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6096 != 0)
6097 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6098 break;
6100 case MIN_EXPR: case MAX_EXPR:
6101 /* If widening the type changes the signedness, then we can't perform
6102 this optimization as that changes the result. */
6103 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6104 break;
6106 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6107 sub_strict_overflow_p = false;
6108 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6109 &sub_strict_overflow_p)) != 0
6110 && (t2 = extract_muldiv (op1, c, code, wide_type,
6111 &sub_strict_overflow_p)) != 0)
6113 if (tree_int_cst_sgn (c) < 0)
6114 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6115 if (sub_strict_overflow_p)
6116 *strict_overflow_p = true;
6117 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6118 fold_convert (ctype, t2));
6120 break;
6122 case LSHIFT_EXPR: case RSHIFT_EXPR:
6123 /* If the second operand is constant, this is a multiplication
6124 or floor division, by a power of two, so we can treat it that
6125 way unless the multiplier or divisor overflows. Signed
6126 left-shift overflow is implementation-defined rather than
6127 undefined in C90, so do not convert signed left shift into
6128 multiplication. */
6129 if (TREE_CODE (op1) == INTEGER_CST
6130 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6131 /* const_binop may not detect overflow correctly,
6132 so check for it explicitly here. */
6133 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6134 && TREE_INT_CST_HIGH (op1) == 0
6135 && 0 != (t1 = fold_convert (ctype,
6136 const_binop (LSHIFT_EXPR,
6137 size_one_node,
6138 op1, 0)))
6139 && !TREE_OVERFLOW (t1))
6140 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6141 ? MULT_EXPR : FLOOR_DIV_EXPR,
6142 ctype, fold_convert (ctype, op0), t1),
6143 c, code, wide_type, strict_overflow_p);
6144 break;
6146 case PLUS_EXPR: case MINUS_EXPR:
6147 /* See if we can eliminate the operation on both sides. If we can, we
6148 can return a new PLUS or MINUS. If we can't, the only remaining
6149 cases where we can do anything are if the second operand is a
6150 constant. */
6151 sub_strict_overflow_p = false;
6152 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6153 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6154 if (t1 != 0 && t2 != 0
6155 && (code == MULT_EXPR
6156 /* If not multiplication, we can only do this if both operands
6157 are divisible by c. */
6158 || (multiple_of_p (ctype, op0, c)
6159 && multiple_of_p (ctype, op1, c))))
6161 if (sub_strict_overflow_p)
6162 *strict_overflow_p = true;
6163 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6164 fold_convert (ctype, t2));
6167 /* If this was a subtraction, negate OP1 and set it to be an addition.
6168 This simplifies the logic below. */
6169 if (tcode == MINUS_EXPR)
6170 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6172 if (TREE_CODE (op1) != INTEGER_CST)
6173 break;
6175 /* If either OP1 or C are negative, this optimization is not safe for
6176 some of the division and remainder types while for others we need
6177 to change the code. */
6178 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6180 if (code == CEIL_DIV_EXPR)
6181 code = FLOOR_DIV_EXPR;
6182 else if (code == FLOOR_DIV_EXPR)
6183 code = CEIL_DIV_EXPR;
6184 else if (code != MULT_EXPR
6185 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6186 break;
6189 /* If it's a multiply or a division/modulus operation of a multiple
6190 of our constant, do the operation and verify it doesn't overflow. */
6191 if (code == MULT_EXPR
6192 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6194 op1 = const_binop (code, fold_convert (ctype, op1),
6195 fold_convert (ctype, c), 0);
6196 /* We allow the constant to overflow with wrapping semantics. */
6197 if (op1 == 0
6198 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6199 break;
6201 else
6202 break;
6204 /* If we have an unsigned type is not a sizetype, we cannot widen
6205 the operation since it will change the result if the original
6206 computation overflowed. */
6207 if (TYPE_UNSIGNED (ctype)
6208 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6209 && ctype != type)
6210 break;
6212 /* If we were able to eliminate our operation from the first side,
6213 apply our operation to the second side and reform the PLUS. */
6214 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6215 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6217 /* The last case is if we are a multiply. In that case, we can
6218 apply the distributive law to commute the multiply and addition
6219 if the multiplication of the constants doesn't overflow. */
6220 if (code == MULT_EXPR)
6221 return fold_build2 (tcode, ctype,
6222 fold_build2 (code, ctype,
6223 fold_convert (ctype, op0),
6224 fold_convert (ctype, c)),
6225 op1);
6227 break;
6229 case MULT_EXPR:
6230 /* We have a special case here if we are doing something like
6231 (C * 8) % 4 since we know that's zero. */
6232 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6233 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6234 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6235 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6236 return omit_one_operand (type, integer_zero_node, op0);
6238 /* ... fall through ... */
6240 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6241 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6242 /* If we can extract our operation from the LHS, do so and return a
6243 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6244 do something only if the second operand is a constant. */
6245 if (same_p
6246 && (t1 = extract_muldiv (op0, c, code, wide_type,
6247 strict_overflow_p)) != 0)
6248 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6249 fold_convert (ctype, op1));
6250 else if (tcode == MULT_EXPR && code == MULT_EXPR
6251 && (t1 = extract_muldiv (op1, c, code, wide_type,
6252 strict_overflow_p)) != 0)
6253 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6254 fold_convert (ctype, t1));
6255 else if (TREE_CODE (op1) != INTEGER_CST)
6256 return 0;
6258 /* If these are the same operation types, we can associate them
6259 assuming no overflow. */
6260 if (tcode == code
6261 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
6262 fold_convert (ctype, c), 0))
6263 && !TREE_OVERFLOW (t1))
6264 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6266 /* If these operations "cancel" each other, we have the main
6267 optimizations of this pass, which occur when either constant is a
6268 multiple of the other, in which case we replace this with either an
6269 operation or CODE or TCODE.
6271 If we have an unsigned type that is not a sizetype, we cannot do
6272 this since it will change the result if the original computation
6273 overflowed. */
6274 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6275 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6276 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6277 || (tcode == MULT_EXPR
6278 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6279 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6280 && code != MULT_EXPR)))
6282 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6284 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6285 *strict_overflow_p = true;
6286 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6287 fold_convert (ctype,
6288 const_binop (TRUNC_DIV_EXPR,
6289 op1, c, 0)));
6291 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6293 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6294 *strict_overflow_p = true;
6295 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6296 fold_convert (ctype,
6297 const_binop (TRUNC_DIV_EXPR,
6298 c, op1, 0)));
6301 break;
6303 default:
6304 break;
6307 return 0;
6310 /* Return a node which has the indicated constant VALUE (either 0 or
6311 1), and is of the indicated TYPE. */
6313 tree
6314 constant_boolean_node (int value, tree type)
6316 if (type == integer_type_node)
6317 return value ? integer_one_node : integer_zero_node;
6318 else if (type == boolean_type_node)
6319 return value ? boolean_true_node : boolean_false_node;
6320 else
6321 return build_int_cst (type, value);
6325 /* Return true if expr looks like an ARRAY_REF and set base and
6326 offset to the appropriate trees. If there is no offset,
6327 offset is set to NULL_TREE. Base will be canonicalized to
6328 something you can get the element type from using
6329 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
6330 in bytes to the base in sizetype. */
6332 static bool
6333 extract_array_ref (tree expr, tree *base, tree *offset)
6335 /* One canonical form is a PLUS_EXPR with the first
6336 argument being an ADDR_EXPR with a possible NOP_EXPR
6337 attached. */
6338 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
6340 tree op0 = TREE_OPERAND (expr, 0);
6341 tree inner_base, dummy1;
6342 /* Strip NOP_EXPRs here because the C frontends and/or
6343 folders present us (int *)&x.a p+ 4 possibly. */
6344 STRIP_NOPS (op0);
6345 if (extract_array_ref (op0, &inner_base, &dummy1))
6347 *base = inner_base;
6348 *offset = fold_convert (sizetype, TREE_OPERAND (expr, 1));
6349 if (dummy1 != NULL_TREE)
6350 *offset = fold_build2 (PLUS_EXPR, sizetype,
6351 dummy1, *offset);
6352 return true;
6355 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
6356 which we transform into an ADDR_EXPR with appropriate
6357 offset. For other arguments to the ADDR_EXPR we assume
6358 zero offset and as such do not care about the ADDR_EXPR
6359 type and strip possible nops from it. */
6360 else if (TREE_CODE (expr) == ADDR_EXPR)
6362 tree op0 = TREE_OPERAND (expr, 0);
6363 if (TREE_CODE (op0) == ARRAY_REF)
6365 tree idx = TREE_OPERAND (op0, 1);
6366 *base = TREE_OPERAND (op0, 0);
6367 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6368 array_ref_element_size (op0));
6369 *offset = fold_convert (sizetype, *offset);
6371 else
6373 /* Handle array-to-pointer decay as &a. */
6374 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6375 *base = TREE_OPERAND (expr, 0);
6376 else
6377 *base = expr;
6378 *offset = NULL_TREE;
6380 return true;
6382 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6383 else if (SSA_VAR_P (expr)
6384 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6386 *base = expr;
6387 *offset = NULL_TREE;
6388 return true;
6391 return false;
6395 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6396 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6397 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6398 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6399 COND is the first argument to CODE; otherwise (as in the example
6400 given here), it is the second argument. TYPE is the type of the
6401 original expression. Return NULL_TREE if no simplification is
6402 possible. */
6404 static tree
6405 fold_binary_op_with_conditional_arg (enum tree_code code,
6406 tree type, tree op0, tree op1,
6407 tree cond, tree arg, int cond_first_p)
6409 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6410 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6411 tree test, true_value, false_value;
6412 tree lhs = NULL_TREE;
6413 tree rhs = NULL_TREE;
6415 /* This transformation is only worthwhile if we don't have to wrap
6416 arg in a SAVE_EXPR, and the operation can be simplified on at least
6417 one of the branches once its pushed inside the COND_EXPR. */
6418 if (!TREE_CONSTANT (arg))
6419 return NULL_TREE;
6421 if (TREE_CODE (cond) == COND_EXPR)
6423 test = TREE_OPERAND (cond, 0);
6424 true_value = TREE_OPERAND (cond, 1);
6425 false_value = TREE_OPERAND (cond, 2);
6426 /* If this operand throws an expression, then it does not make
6427 sense to try to perform a logical or arithmetic operation
6428 involving it. */
6429 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6430 lhs = true_value;
6431 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6432 rhs = false_value;
6434 else
6436 tree testtype = TREE_TYPE (cond);
6437 test = cond;
6438 true_value = constant_boolean_node (true, testtype);
6439 false_value = constant_boolean_node (false, testtype);
6442 arg = fold_convert (arg_type, arg);
6443 if (lhs == 0)
6445 true_value = fold_convert (cond_type, true_value);
6446 if (cond_first_p)
6447 lhs = fold_build2 (code, type, true_value, arg);
6448 else
6449 lhs = fold_build2 (code, type, arg, true_value);
6451 if (rhs == 0)
6453 false_value = fold_convert (cond_type, false_value);
6454 if (cond_first_p)
6455 rhs = fold_build2 (code, type, false_value, arg);
6456 else
6457 rhs = fold_build2 (code, type, arg, false_value);
6460 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6461 return fold_convert (type, test);
6465 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6467 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6468 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6469 ADDEND is the same as X.
6471 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6472 and finite. The problematic cases are when X is zero, and its mode
6473 has signed zeros. In the case of rounding towards -infinity,
6474 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6475 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6477 static bool
6478 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6480 if (!real_zerop (addend))
6481 return false;
6483 /* Don't allow the fold with -fsignaling-nans. */
6484 if (HONOR_SNANS (TYPE_MODE (type)))
6485 return false;
6487 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6489 return true;
6491 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6492 if (TREE_CODE (addend) == REAL_CST
6493 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6494 negate = !negate;
6496 /* The mode has signed zeros, and we have to honor their sign.
6497 In this situation, there is only one case we can return true for.
6498 X - 0 is the same as X unless rounding towards -infinity is
6499 supported. */
6500 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6503 /* Subroutine of fold() that checks comparisons of built-in math
6504 functions against real constants.
6506 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6507 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6508 is the type of the result and ARG0 and ARG1 are the operands of the
6509 comparison. ARG1 must be a TREE_REAL_CST.
6511 The function returns the constant folded tree if a simplification
6512 can be made, and NULL_TREE otherwise. */
6514 static tree
6515 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6516 tree type, tree arg0, tree arg1)
6518 REAL_VALUE_TYPE c;
6520 if (BUILTIN_SQRT_P (fcode))
6522 tree arg = CALL_EXPR_ARG (arg0, 0);
6523 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6525 c = TREE_REAL_CST (arg1);
6526 if (REAL_VALUE_NEGATIVE (c))
6528 /* sqrt(x) < y is always false, if y is negative. */
6529 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6530 return omit_one_operand (type, integer_zero_node, arg);
6532 /* sqrt(x) > y is always true, if y is negative and we
6533 don't care about NaNs, i.e. negative values of x. */
6534 if (code == NE_EXPR || !HONOR_NANS (mode))
6535 return omit_one_operand (type, integer_one_node, arg);
6537 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6538 return fold_build2 (GE_EXPR, type, arg,
6539 build_real (TREE_TYPE (arg), dconst0));
6541 else if (code == GT_EXPR || code == GE_EXPR)
6543 REAL_VALUE_TYPE c2;
6545 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6546 real_convert (&c2, mode, &c2);
6548 if (REAL_VALUE_ISINF (c2))
6550 /* sqrt(x) > y is x == +Inf, when y is very large. */
6551 if (HONOR_INFINITIES (mode))
6552 return fold_build2 (EQ_EXPR, type, arg,
6553 build_real (TREE_TYPE (arg), c2));
6555 /* sqrt(x) > y is always false, when y is very large
6556 and we don't care about infinities. */
6557 return omit_one_operand (type, integer_zero_node, arg);
6560 /* sqrt(x) > c is the same as x > c*c. */
6561 return fold_build2 (code, type, arg,
6562 build_real (TREE_TYPE (arg), c2));
6564 else if (code == LT_EXPR || code == LE_EXPR)
6566 REAL_VALUE_TYPE c2;
6568 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6569 real_convert (&c2, mode, &c2);
6571 if (REAL_VALUE_ISINF (c2))
6573 /* sqrt(x) < y is always true, when y is a very large
6574 value and we don't care about NaNs or Infinities. */
6575 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6576 return omit_one_operand (type, integer_one_node, arg);
6578 /* sqrt(x) < y is x != +Inf when y is very large and we
6579 don't care about NaNs. */
6580 if (! HONOR_NANS (mode))
6581 return fold_build2 (NE_EXPR, type, arg,
6582 build_real (TREE_TYPE (arg), c2));
6584 /* sqrt(x) < y is x >= 0 when y is very large and we
6585 don't care about Infinities. */
6586 if (! HONOR_INFINITIES (mode))
6587 return fold_build2 (GE_EXPR, type, arg,
6588 build_real (TREE_TYPE (arg), dconst0));
6590 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6591 if (lang_hooks.decls.global_bindings_p () != 0
6592 || CONTAINS_PLACEHOLDER_P (arg))
6593 return NULL_TREE;
6595 arg = save_expr (arg);
6596 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6597 fold_build2 (GE_EXPR, type, arg,
6598 build_real (TREE_TYPE (arg),
6599 dconst0)),
6600 fold_build2 (NE_EXPR, type, arg,
6601 build_real (TREE_TYPE (arg),
6602 c2)));
6605 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6606 if (! HONOR_NANS (mode))
6607 return fold_build2 (code, type, arg,
6608 build_real (TREE_TYPE (arg), c2));
6610 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6611 if (lang_hooks.decls.global_bindings_p () == 0
6612 && ! CONTAINS_PLACEHOLDER_P (arg))
6614 arg = save_expr (arg);
6615 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6616 fold_build2 (GE_EXPR, type, arg,
6617 build_real (TREE_TYPE (arg),
6618 dconst0)),
6619 fold_build2 (code, type, arg,
6620 build_real (TREE_TYPE (arg),
6621 c2)));
6626 return NULL_TREE;
6629 /* Subroutine of fold() that optimizes comparisons against Infinities,
6630 either +Inf or -Inf.
6632 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6633 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6634 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6636 The function returns the constant folded tree if a simplification
6637 can be made, and NULL_TREE otherwise. */
6639 static tree
6640 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6642 enum machine_mode mode;
6643 REAL_VALUE_TYPE max;
6644 tree temp;
6645 bool neg;
6647 mode = TYPE_MODE (TREE_TYPE (arg0));
6649 /* For negative infinity swap the sense of the comparison. */
6650 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6651 if (neg)
6652 code = swap_tree_comparison (code);
6654 switch (code)
6656 case GT_EXPR:
6657 /* x > +Inf is always false, if with ignore sNANs. */
6658 if (HONOR_SNANS (mode))
6659 return NULL_TREE;
6660 return omit_one_operand (type, integer_zero_node, arg0);
6662 case LE_EXPR:
6663 /* x <= +Inf is always true, if we don't case about NaNs. */
6664 if (! HONOR_NANS (mode))
6665 return omit_one_operand (type, integer_one_node, arg0);
6667 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6668 if (lang_hooks.decls.global_bindings_p () == 0
6669 && ! CONTAINS_PLACEHOLDER_P (arg0))
6671 arg0 = save_expr (arg0);
6672 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6674 break;
6676 case EQ_EXPR:
6677 case GE_EXPR:
6678 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6679 real_maxval (&max, neg, mode);
6680 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6681 arg0, build_real (TREE_TYPE (arg0), max));
6683 case LT_EXPR:
6684 /* x < +Inf is always equal to x <= DBL_MAX. */
6685 real_maxval (&max, neg, mode);
6686 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6687 arg0, build_real (TREE_TYPE (arg0), max));
6689 case NE_EXPR:
6690 /* x != +Inf is always equal to !(x > DBL_MAX). */
6691 real_maxval (&max, neg, mode);
6692 if (! HONOR_NANS (mode))
6693 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6694 arg0, build_real (TREE_TYPE (arg0), max));
6696 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6697 arg0, build_real (TREE_TYPE (arg0), max));
6698 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6700 default:
6701 break;
6704 return NULL_TREE;
6707 /* Subroutine of fold() that optimizes comparisons of a division by
6708 a nonzero integer constant against an integer constant, i.e.
6709 X/C1 op C2.
6711 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6712 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6713 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6715 The function returns the constant folded tree if a simplification
6716 can be made, and NULL_TREE otherwise. */
6718 static tree
6719 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6721 tree prod, tmp, hi, lo;
6722 tree arg00 = TREE_OPERAND (arg0, 0);
6723 tree arg01 = TREE_OPERAND (arg0, 1);
6724 unsigned HOST_WIDE_INT lpart;
6725 HOST_WIDE_INT hpart;
6726 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6727 bool neg_overflow;
6728 int overflow;
6730 /* We have to do this the hard way to detect unsigned overflow.
6731 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6732 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6733 TREE_INT_CST_HIGH (arg01),
6734 TREE_INT_CST_LOW (arg1),
6735 TREE_INT_CST_HIGH (arg1),
6736 &lpart, &hpart, unsigned_p);
6737 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6738 -1, overflow);
6739 neg_overflow = false;
6741 if (unsigned_p)
6743 tmp = int_const_binop (MINUS_EXPR, arg01,
6744 build_int_cst (TREE_TYPE (arg01), 1), 0);
6745 lo = prod;
6747 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6748 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6749 TREE_INT_CST_HIGH (prod),
6750 TREE_INT_CST_LOW (tmp),
6751 TREE_INT_CST_HIGH (tmp),
6752 &lpart, &hpart, unsigned_p);
6753 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6754 -1, overflow | TREE_OVERFLOW (prod));
6756 else if (tree_int_cst_sgn (arg01) >= 0)
6758 tmp = int_const_binop (MINUS_EXPR, arg01,
6759 build_int_cst (TREE_TYPE (arg01), 1), 0);
6760 switch (tree_int_cst_sgn (arg1))
6762 case -1:
6763 neg_overflow = true;
6764 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6765 hi = prod;
6766 break;
6768 case 0:
6769 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6770 hi = tmp;
6771 break;
6773 case 1:
6774 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6775 lo = prod;
6776 break;
6778 default:
6779 gcc_unreachable ();
6782 else
6784 /* A negative divisor reverses the relational operators. */
6785 code = swap_tree_comparison (code);
6787 tmp = int_const_binop (PLUS_EXPR, arg01,
6788 build_int_cst (TREE_TYPE (arg01), 1), 0);
6789 switch (tree_int_cst_sgn (arg1))
6791 case -1:
6792 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6793 lo = prod;
6794 break;
6796 case 0:
6797 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6798 lo = tmp;
6799 break;
6801 case 1:
6802 neg_overflow = true;
6803 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6804 hi = prod;
6805 break;
6807 default:
6808 gcc_unreachable ();
6812 switch (code)
6814 case EQ_EXPR:
6815 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6816 return omit_one_operand (type, integer_zero_node, arg00);
6817 if (TREE_OVERFLOW (hi))
6818 return fold_build2 (GE_EXPR, type, arg00, lo);
6819 if (TREE_OVERFLOW (lo))
6820 return fold_build2 (LE_EXPR, type, arg00, hi);
6821 return build_range_check (type, arg00, 1, lo, hi);
6823 case NE_EXPR:
6824 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6825 return omit_one_operand (type, integer_one_node, arg00);
6826 if (TREE_OVERFLOW (hi))
6827 return fold_build2 (LT_EXPR, type, arg00, lo);
6828 if (TREE_OVERFLOW (lo))
6829 return fold_build2 (GT_EXPR, type, arg00, hi);
6830 return build_range_check (type, arg00, 0, lo, hi);
6832 case LT_EXPR:
6833 if (TREE_OVERFLOW (lo))
6835 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6836 return omit_one_operand (type, tmp, arg00);
6838 return fold_build2 (LT_EXPR, type, arg00, lo);
6840 case LE_EXPR:
6841 if (TREE_OVERFLOW (hi))
6843 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6844 return omit_one_operand (type, tmp, arg00);
6846 return fold_build2 (LE_EXPR, type, arg00, hi);
6848 case GT_EXPR:
6849 if (TREE_OVERFLOW (hi))
6851 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6852 return omit_one_operand (type, tmp, arg00);
6854 return fold_build2 (GT_EXPR, type, arg00, hi);
6856 case GE_EXPR:
6857 if (TREE_OVERFLOW (lo))
6859 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6860 return omit_one_operand (type, tmp, arg00);
6862 return fold_build2 (GE_EXPR, type, arg00, lo);
6864 default:
6865 break;
6868 return NULL_TREE;
6872 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6873 equality/inequality test, then return a simplified form of the test
6874 using a sign testing. Otherwise return NULL. TYPE is the desired
6875 result type. */
6877 static tree
6878 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6879 tree result_type)
6881 /* If this is testing a single bit, we can optimize the test. */
6882 if ((code == NE_EXPR || code == EQ_EXPR)
6883 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6884 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6886 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6887 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6888 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6890 if (arg00 != NULL_TREE
6891 /* This is only a win if casting to a signed type is cheap,
6892 i.e. when arg00's type is not a partial mode. */
6893 && TYPE_PRECISION (TREE_TYPE (arg00))
6894 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6896 tree stype = signed_type_for (TREE_TYPE (arg00));
6897 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6898 result_type, fold_convert (stype, arg00),
6899 build_int_cst (stype, 0));
6903 return NULL_TREE;
6906 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6907 equality/inequality test, then return a simplified form of
6908 the test using shifts and logical operations. Otherwise return
6909 NULL. TYPE is the desired result type. */
6911 tree
6912 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6913 tree result_type)
6915 /* If this is testing a single bit, we can optimize the test. */
6916 if ((code == NE_EXPR || code == EQ_EXPR)
6917 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6918 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6920 tree inner = TREE_OPERAND (arg0, 0);
6921 tree type = TREE_TYPE (arg0);
6922 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6923 enum machine_mode operand_mode = TYPE_MODE (type);
6924 int ops_unsigned;
6925 tree signed_type, unsigned_type, intermediate_type;
6926 tree tem, one;
6928 /* First, see if we can fold the single bit test into a sign-bit
6929 test. */
6930 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6931 result_type);
6932 if (tem)
6933 return tem;
6935 /* Otherwise we have (A & C) != 0 where C is a single bit,
6936 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6937 Similarly for (A & C) == 0. */
6939 /* If INNER is a right shift of a constant and it plus BITNUM does
6940 not overflow, adjust BITNUM and INNER. */
6941 if (TREE_CODE (inner) == RSHIFT_EXPR
6942 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6943 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6944 && bitnum < TYPE_PRECISION (type)
6945 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6946 bitnum - TYPE_PRECISION (type)))
6948 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6949 inner = TREE_OPERAND (inner, 0);
6952 /* If we are going to be able to omit the AND below, we must do our
6953 operations as unsigned. If we must use the AND, we have a choice.
6954 Normally unsigned is faster, but for some machines signed is. */
6955 #ifdef LOAD_EXTEND_OP
6956 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6957 && !flag_syntax_only) ? 0 : 1;
6958 #else
6959 ops_unsigned = 1;
6960 #endif
6962 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6963 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6964 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6965 inner = fold_convert (intermediate_type, inner);
6967 if (bitnum != 0)
6968 inner = build2 (RSHIFT_EXPR, intermediate_type,
6969 inner, size_int (bitnum));
6971 one = build_int_cst (intermediate_type, 1);
6973 if (code == EQ_EXPR)
6974 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6976 /* Put the AND last so it can combine with more things. */
6977 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6979 /* Make sure to return the proper type. */
6980 inner = fold_convert (result_type, inner);
6982 return inner;
6984 return NULL_TREE;
6987 /* Check whether we are allowed to reorder operands arg0 and arg1,
6988 such that the evaluation of arg1 occurs before arg0. */
6990 static bool
6991 reorder_operands_p (const_tree arg0, const_tree arg1)
6993 if (! flag_evaluation_order)
6994 return true;
6995 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6996 return true;
6997 return ! TREE_SIDE_EFFECTS (arg0)
6998 && ! TREE_SIDE_EFFECTS (arg1);
7001 /* Test whether it is preferable two swap two operands, ARG0 and
7002 ARG1, for example because ARG0 is an integer constant and ARG1
7003 isn't. If REORDER is true, only recommend swapping if we can
7004 evaluate the operands in reverse order. */
7006 bool
7007 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7009 STRIP_SIGN_NOPS (arg0);
7010 STRIP_SIGN_NOPS (arg1);
7012 if (TREE_CODE (arg1) == INTEGER_CST)
7013 return 0;
7014 if (TREE_CODE (arg0) == INTEGER_CST)
7015 return 1;
7017 if (TREE_CODE (arg1) == REAL_CST)
7018 return 0;
7019 if (TREE_CODE (arg0) == REAL_CST)
7020 return 1;
7022 if (TREE_CODE (arg1) == FIXED_CST)
7023 return 0;
7024 if (TREE_CODE (arg0) == FIXED_CST)
7025 return 1;
7027 if (TREE_CODE (arg1) == COMPLEX_CST)
7028 return 0;
7029 if (TREE_CODE (arg0) == COMPLEX_CST)
7030 return 1;
7032 if (TREE_CONSTANT (arg1))
7033 return 0;
7034 if (TREE_CONSTANT (arg0))
7035 return 1;
7037 if (optimize_size)
7038 return 0;
7040 if (reorder && flag_evaluation_order
7041 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7042 return 0;
7044 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7045 for commutative and comparison operators. Ensuring a canonical
7046 form allows the optimizers to find additional redundancies without
7047 having to explicitly check for both orderings. */
7048 if (TREE_CODE (arg0) == SSA_NAME
7049 && TREE_CODE (arg1) == SSA_NAME
7050 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7051 return 1;
7053 /* Put SSA_NAMEs last. */
7054 if (TREE_CODE (arg1) == SSA_NAME)
7055 return 0;
7056 if (TREE_CODE (arg0) == SSA_NAME)
7057 return 1;
7059 /* Put variables last. */
7060 if (DECL_P (arg1))
7061 return 0;
7062 if (DECL_P (arg0))
7063 return 1;
7065 return 0;
7068 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7069 ARG0 is extended to a wider type. */
7071 static tree
7072 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7074 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7075 tree arg1_unw;
7076 tree shorter_type, outer_type;
7077 tree min, max;
7078 bool above, below;
7080 if (arg0_unw == arg0)
7081 return NULL_TREE;
7082 shorter_type = TREE_TYPE (arg0_unw);
7084 #ifdef HAVE_canonicalize_funcptr_for_compare
7085 /* Disable this optimization if we're casting a function pointer
7086 type on targets that require function pointer canonicalization. */
7087 if (HAVE_canonicalize_funcptr_for_compare
7088 && TREE_CODE (shorter_type) == POINTER_TYPE
7089 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7090 return NULL_TREE;
7091 #endif
7093 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7094 return NULL_TREE;
7096 arg1_unw = get_unwidened (arg1, shorter_type);
7098 /* If possible, express the comparison in the shorter mode. */
7099 if ((code == EQ_EXPR || code == NE_EXPR
7100 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7101 && (TREE_TYPE (arg1_unw) == shorter_type
7102 || (TREE_CODE (arg1_unw) == INTEGER_CST
7103 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7104 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7105 && int_fits_type_p (arg1_unw, shorter_type))))
7106 return fold_build2 (code, type, arg0_unw,
7107 fold_convert (shorter_type, arg1_unw));
7109 if (TREE_CODE (arg1_unw) != INTEGER_CST
7110 || TREE_CODE (shorter_type) != INTEGER_TYPE
7111 || !int_fits_type_p (arg1_unw, shorter_type))
7112 return NULL_TREE;
7114 /* If we are comparing with the integer that does not fit into the range
7115 of the shorter type, the result is known. */
7116 outer_type = TREE_TYPE (arg1_unw);
7117 min = lower_bound_in_type (outer_type, shorter_type);
7118 max = upper_bound_in_type (outer_type, shorter_type);
7120 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7121 max, arg1_unw));
7122 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7123 arg1_unw, min));
7125 switch (code)
7127 case EQ_EXPR:
7128 if (above || below)
7129 return omit_one_operand (type, integer_zero_node, arg0);
7130 break;
7132 case NE_EXPR:
7133 if (above || below)
7134 return omit_one_operand (type, integer_one_node, arg0);
7135 break;
7137 case LT_EXPR:
7138 case LE_EXPR:
7139 if (above)
7140 return omit_one_operand (type, integer_one_node, arg0);
7141 else if (below)
7142 return omit_one_operand (type, integer_zero_node, arg0);
7144 case GT_EXPR:
7145 case GE_EXPR:
7146 if (above)
7147 return omit_one_operand (type, integer_zero_node, arg0);
7148 else if (below)
7149 return omit_one_operand (type, integer_one_node, arg0);
7151 default:
7152 break;
7155 return NULL_TREE;
7158 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7159 ARG0 just the signedness is changed. */
7161 static tree
7162 fold_sign_changed_comparison (enum tree_code code, tree type,
7163 tree arg0, tree arg1)
7165 tree arg0_inner;
7166 tree inner_type, outer_type;
7168 if (TREE_CODE (arg0) != NOP_EXPR
7169 && TREE_CODE (arg0) != CONVERT_EXPR)
7170 return NULL_TREE;
7172 outer_type = TREE_TYPE (arg0);
7173 arg0_inner = TREE_OPERAND (arg0, 0);
7174 inner_type = TREE_TYPE (arg0_inner);
7176 #ifdef HAVE_canonicalize_funcptr_for_compare
7177 /* Disable this optimization if we're casting a function pointer
7178 type on targets that require function pointer canonicalization. */
7179 if (HAVE_canonicalize_funcptr_for_compare
7180 && TREE_CODE (inner_type) == POINTER_TYPE
7181 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7182 return NULL_TREE;
7183 #endif
7185 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7186 return NULL_TREE;
7188 if (TREE_CODE (arg1) != INTEGER_CST
7189 && !((TREE_CODE (arg1) == NOP_EXPR
7190 || TREE_CODE (arg1) == CONVERT_EXPR)
7191 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7192 return NULL_TREE;
7194 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7195 && code != NE_EXPR
7196 && code != EQ_EXPR)
7197 return NULL_TREE;
7199 if (TREE_CODE (arg1) == INTEGER_CST)
7200 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7201 TREE_INT_CST_HIGH (arg1), 0,
7202 TREE_OVERFLOW (arg1));
7203 else
7204 arg1 = fold_convert (inner_type, arg1);
7206 return fold_build2 (code, type, arg0_inner, arg1);
7209 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7210 step of the array. Reconstructs s and delta in the case of s * delta
7211 being an integer constant (and thus already folded).
7212 ADDR is the address. MULT is the multiplicative expression.
7213 If the function succeeds, the new address expression is returned. Otherwise
7214 NULL_TREE is returned. */
7216 static tree
7217 try_move_mult_to_index (tree addr, tree op1)
7219 tree s, delta, step;
7220 tree ref = TREE_OPERAND (addr, 0), pref;
7221 tree ret, pos;
7222 tree itype;
7223 bool mdim = false;
7225 /* Strip the nops that might be added when converting op1 to sizetype. */
7226 STRIP_NOPS (op1);
7228 /* Canonicalize op1 into a possibly non-constant delta
7229 and an INTEGER_CST s. */
7230 if (TREE_CODE (op1) == MULT_EXPR)
7232 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7234 STRIP_NOPS (arg0);
7235 STRIP_NOPS (arg1);
7237 if (TREE_CODE (arg0) == INTEGER_CST)
7239 s = arg0;
7240 delta = arg1;
7242 else if (TREE_CODE (arg1) == INTEGER_CST)
7244 s = arg1;
7245 delta = arg0;
7247 else
7248 return NULL_TREE;
7250 else if (TREE_CODE (op1) == INTEGER_CST)
7252 delta = op1;
7253 s = NULL_TREE;
7255 else
7257 /* Simulate we are delta * 1. */
7258 delta = op1;
7259 s = integer_one_node;
7262 for (;; ref = TREE_OPERAND (ref, 0))
7264 if (TREE_CODE (ref) == ARRAY_REF)
7266 /* Remember if this was a multi-dimensional array. */
7267 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7268 mdim = true;
7270 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7271 if (! itype)
7272 continue;
7274 step = array_ref_element_size (ref);
7275 if (TREE_CODE (step) != INTEGER_CST)
7276 continue;
7278 if (s)
7280 if (! tree_int_cst_equal (step, s))
7281 continue;
7283 else
7285 /* Try if delta is a multiple of step. */
7286 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
7287 if (! tmp)
7288 continue;
7289 delta = tmp;
7292 /* Only fold here if we can verify we do not overflow one
7293 dimension of a multi-dimensional array. */
7294 if (mdim)
7296 tree tmp;
7298 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7299 || !INTEGRAL_TYPE_P (itype)
7300 || !TYPE_MAX_VALUE (itype)
7301 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7302 continue;
7304 tmp = fold_binary (PLUS_EXPR, itype,
7305 fold_convert (itype,
7306 TREE_OPERAND (ref, 1)),
7307 fold_convert (itype, delta));
7308 if (!tmp
7309 || TREE_CODE (tmp) != INTEGER_CST
7310 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7311 continue;
7314 break;
7316 else
7317 mdim = false;
7319 if (!handled_component_p (ref))
7320 return NULL_TREE;
7323 /* We found the suitable array reference. So copy everything up to it,
7324 and replace the index. */
7326 pref = TREE_OPERAND (addr, 0);
7327 ret = copy_node (pref);
7328 pos = ret;
7330 while (pref != ref)
7332 pref = TREE_OPERAND (pref, 0);
7333 TREE_OPERAND (pos, 0) = copy_node (pref);
7334 pos = TREE_OPERAND (pos, 0);
7337 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7338 fold_convert (itype,
7339 TREE_OPERAND (pos, 1)),
7340 fold_convert (itype, delta));
7342 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7346 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7347 means A >= Y && A != MAX, but in this case we know that
7348 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7350 static tree
7351 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7353 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7355 if (TREE_CODE (bound) == LT_EXPR)
7356 a = TREE_OPERAND (bound, 0);
7357 else if (TREE_CODE (bound) == GT_EXPR)
7358 a = TREE_OPERAND (bound, 1);
7359 else
7360 return NULL_TREE;
7362 typea = TREE_TYPE (a);
7363 if (!INTEGRAL_TYPE_P (typea)
7364 && !POINTER_TYPE_P (typea))
7365 return NULL_TREE;
7367 if (TREE_CODE (ineq) == LT_EXPR)
7369 a1 = TREE_OPERAND (ineq, 1);
7370 y = TREE_OPERAND (ineq, 0);
7372 else if (TREE_CODE (ineq) == GT_EXPR)
7374 a1 = TREE_OPERAND (ineq, 0);
7375 y = TREE_OPERAND (ineq, 1);
7377 else
7378 return NULL_TREE;
7380 if (TREE_TYPE (a1) != typea)
7381 return NULL_TREE;
7383 if (POINTER_TYPE_P (typea))
7385 /* Convert the pointer types into integer before taking the difference. */
7386 tree ta = fold_convert (ssizetype, a);
7387 tree ta1 = fold_convert (ssizetype, a1);
7388 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7390 else
7391 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7393 if (!diff || !integer_onep (diff))
7394 return NULL_TREE;
7396 return fold_build2 (GE_EXPR, type, a, y);
7399 /* Fold a sum or difference of at least one multiplication.
7400 Returns the folded tree or NULL if no simplification could be made. */
7402 static tree
7403 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7405 tree arg00, arg01, arg10, arg11;
7406 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7408 /* (A * C) +- (B * C) -> (A+-B) * C.
7409 (A * C) +- A -> A * (C+-1).
7410 We are most concerned about the case where C is a constant,
7411 but other combinations show up during loop reduction. Since
7412 it is not difficult, try all four possibilities. */
7414 if (TREE_CODE (arg0) == MULT_EXPR)
7416 arg00 = TREE_OPERAND (arg0, 0);
7417 arg01 = TREE_OPERAND (arg0, 1);
7419 else if (TREE_CODE (arg0) == INTEGER_CST)
7421 arg00 = build_one_cst (type);
7422 arg01 = arg0;
7424 else
7426 /* We cannot generate constant 1 for fract. */
7427 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7428 return NULL_TREE;
7429 arg00 = arg0;
7430 arg01 = build_one_cst (type);
7432 if (TREE_CODE (arg1) == MULT_EXPR)
7434 arg10 = TREE_OPERAND (arg1, 0);
7435 arg11 = TREE_OPERAND (arg1, 1);
7437 else if (TREE_CODE (arg1) == INTEGER_CST)
7439 arg10 = build_one_cst (type);
7440 arg11 = arg1;
7442 else
7444 /* We cannot generate constant 1 for fract. */
7445 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7446 return NULL_TREE;
7447 arg10 = arg1;
7448 arg11 = build_one_cst (type);
7450 same = NULL_TREE;
7452 if (operand_equal_p (arg01, arg11, 0))
7453 same = arg01, alt0 = arg00, alt1 = arg10;
7454 else if (operand_equal_p (arg00, arg10, 0))
7455 same = arg00, alt0 = arg01, alt1 = arg11;
7456 else if (operand_equal_p (arg00, arg11, 0))
7457 same = arg00, alt0 = arg01, alt1 = arg10;
7458 else if (operand_equal_p (arg01, arg10, 0))
7459 same = arg01, alt0 = arg00, alt1 = arg11;
7461 /* No identical multiplicands; see if we can find a common
7462 power-of-two factor in non-power-of-two multiplies. This
7463 can help in multi-dimensional array access. */
7464 else if (host_integerp (arg01, 0)
7465 && host_integerp (arg11, 0))
7467 HOST_WIDE_INT int01, int11, tmp;
7468 bool swap = false;
7469 tree maybe_same;
7470 int01 = TREE_INT_CST_LOW (arg01);
7471 int11 = TREE_INT_CST_LOW (arg11);
7473 /* Move min of absolute values to int11. */
7474 if ((int01 >= 0 ? int01 : -int01)
7475 < (int11 >= 0 ? int11 : -int11))
7477 tmp = int01, int01 = int11, int11 = tmp;
7478 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7479 maybe_same = arg01;
7480 swap = true;
7482 else
7483 maybe_same = arg11;
7485 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7487 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7488 build_int_cst (TREE_TYPE (arg00),
7489 int01 / int11));
7490 alt1 = arg10;
7491 same = maybe_same;
7492 if (swap)
7493 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7497 if (same)
7498 return fold_build2 (MULT_EXPR, type,
7499 fold_build2 (code, type,
7500 fold_convert (type, alt0),
7501 fold_convert (type, alt1)),
7502 fold_convert (type, same));
7504 return NULL_TREE;
7507 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7508 specified by EXPR into the buffer PTR of length LEN bytes.
7509 Return the number of bytes placed in the buffer, or zero
7510 upon failure. */
7512 static int
7513 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7515 tree type = TREE_TYPE (expr);
7516 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7517 int byte, offset, word, words;
7518 unsigned char value;
7520 if (total_bytes > len)
7521 return 0;
7522 words = total_bytes / UNITS_PER_WORD;
7524 for (byte = 0; byte < total_bytes; byte++)
7526 int bitpos = byte * BITS_PER_UNIT;
7527 if (bitpos < HOST_BITS_PER_WIDE_INT)
7528 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7529 else
7530 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7531 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7533 if (total_bytes > UNITS_PER_WORD)
7535 word = byte / UNITS_PER_WORD;
7536 if (WORDS_BIG_ENDIAN)
7537 word = (words - 1) - word;
7538 offset = word * UNITS_PER_WORD;
7539 if (BYTES_BIG_ENDIAN)
7540 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7541 else
7542 offset += byte % UNITS_PER_WORD;
7544 else
7545 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7546 ptr[offset] = value;
7548 return total_bytes;
7552 /* Subroutine of native_encode_expr. Encode the REAL_CST
7553 specified by EXPR into the buffer PTR of length LEN bytes.
7554 Return the number of bytes placed in the buffer, or zero
7555 upon failure. */
7557 static int
7558 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7560 tree type = TREE_TYPE (expr);
7561 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7562 int byte, offset, word, words, bitpos;
7563 unsigned char value;
7565 /* There are always 32 bits in each long, no matter the size of
7566 the hosts long. We handle floating point representations with
7567 up to 192 bits. */
7568 long tmp[6];
7570 if (total_bytes > len)
7571 return 0;
7572 words = 32 / UNITS_PER_WORD;
7574 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7576 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7577 bitpos += BITS_PER_UNIT)
7579 byte = (bitpos / BITS_PER_UNIT) & 3;
7580 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7582 if (UNITS_PER_WORD < 4)
7584 word = byte / UNITS_PER_WORD;
7585 if (WORDS_BIG_ENDIAN)
7586 word = (words - 1) - word;
7587 offset = word * UNITS_PER_WORD;
7588 if (BYTES_BIG_ENDIAN)
7589 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7590 else
7591 offset += byte % UNITS_PER_WORD;
7593 else
7594 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7595 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7597 return total_bytes;
7600 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7601 specified by EXPR into the buffer PTR of length LEN bytes.
7602 Return the number of bytes placed in the buffer, or zero
7603 upon failure. */
7605 static int
7606 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7608 int rsize, isize;
7609 tree part;
7611 part = TREE_REALPART (expr);
7612 rsize = native_encode_expr (part, ptr, len);
7613 if (rsize == 0)
7614 return 0;
7615 part = TREE_IMAGPART (expr);
7616 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7617 if (isize != rsize)
7618 return 0;
7619 return rsize + isize;
7623 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7624 specified by EXPR into the buffer PTR of length LEN bytes.
7625 Return the number of bytes placed in the buffer, or zero
7626 upon failure. */
7628 static int
7629 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7631 int i, size, offset, count;
7632 tree itype, elem, elements;
7634 offset = 0;
7635 elements = TREE_VECTOR_CST_ELTS (expr);
7636 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7637 itype = TREE_TYPE (TREE_TYPE (expr));
7638 size = GET_MODE_SIZE (TYPE_MODE (itype));
7639 for (i = 0; i < count; i++)
7641 if (elements)
7643 elem = TREE_VALUE (elements);
7644 elements = TREE_CHAIN (elements);
7646 else
7647 elem = NULL_TREE;
7649 if (elem)
7651 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7652 return 0;
7654 else
7656 if (offset + size > len)
7657 return 0;
7658 memset (ptr+offset, 0, size);
7660 offset += size;
7662 return offset;
7666 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7667 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7668 buffer PTR of length LEN bytes. Return the number of bytes
7669 placed in the buffer, or zero upon failure. */
7672 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7674 switch (TREE_CODE (expr))
7676 case INTEGER_CST:
7677 return native_encode_int (expr, ptr, len);
7679 case REAL_CST:
7680 return native_encode_real (expr, ptr, len);
7682 case COMPLEX_CST:
7683 return native_encode_complex (expr, ptr, len);
7685 case VECTOR_CST:
7686 return native_encode_vector (expr, ptr, len);
7688 default:
7689 return 0;
7694 /* Subroutine of native_interpret_expr. Interpret the contents of
7695 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7696 If the buffer cannot be interpreted, return NULL_TREE. */
7698 static tree
7699 native_interpret_int (tree type, const unsigned char *ptr, int len)
7701 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7702 int byte, offset, word, words;
7703 unsigned char value;
7704 unsigned int HOST_WIDE_INT lo = 0;
7705 HOST_WIDE_INT hi = 0;
7707 if (total_bytes > len)
7708 return NULL_TREE;
7709 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7710 return NULL_TREE;
7711 words = total_bytes / UNITS_PER_WORD;
7713 for (byte = 0; byte < total_bytes; byte++)
7715 int bitpos = byte * BITS_PER_UNIT;
7716 if (total_bytes > UNITS_PER_WORD)
7718 word = byte / UNITS_PER_WORD;
7719 if (WORDS_BIG_ENDIAN)
7720 word = (words - 1) - word;
7721 offset = word * UNITS_PER_WORD;
7722 if (BYTES_BIG_ENDIAN)
7723 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7724 else
7725 offset += byte % UNITS_PER_WORD;
7727 else
7728 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7729 value = ptr[offset];
7731 if (bitpos < HOST_BITS_PER_WIDE_INT)
7732 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7733 else
7734 hi |= (unsigned HOST_WIDE_INT) value
7735 << (bitpos - HOST_BITS_PER_WIDE_INT);
7738 return build_int_cst_wide_type (type, lo, hi);
7742 /* Subroutine of native_interpret_expr. Interpret the contents of
7743 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7744 If the buffer cannot be interpreted, return NULL_TREE. */
7746 static tree
7747 native_interpret_real (tree type, const unsigned char *ptr, int len)
7749 enum machine_mode mode = TYPE_MODE (type);
7750 int total_bytes = GET_MODE_SIZE (mode);
7751 int byte, offset, word, words, bitpos;
7752 unsigned char value;
7753 /* There are always 32 bits in each long, no matter the size of
7754 the hosts long. We handle floating point representations with
7755 up to 192 bits. */
7756 REAL_VALUE_TYPE r;
7757 long tmp[6];
7759 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7760 if (total_bytes > len || total_bytes > 24)
7761 return NULL_TREE;
7762 words = 32 / UNITS_PER_WORD;
7764 memset (tmp, 0, sizeof (tmp));
7765 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7766 bitpos += BITS_PER_UNIT)
7768 byte = (bitpos / BITS_PER_UNIT) & 3;
7769 if (UNITS_PER_WORD < 4)
7771 word = byte / UNITS_PER_WORD;
7772 if (WORDS_BIG_ENDIAN)
7773 word = (words - 1) - word;
7774 offset = word * UNITS_PER_WORD;
7775 if (BYTES_BIG_ENDIAN)
7776 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7777 else
7778 offset += byte % UNITS_PER_WORD;
7780 else
7781 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7782 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7784 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7787 real_from_target (&r, tmp, mode);
7788 return build_real (type, r);
7792 /* Subroutine of native_interpret_expr. Interpret the contents of
7793 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7794 If the buffer cannot be interpreted, return NULL_TREE. */
7796 static tree
7797 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7799 tree etype, rpart, ipart;
7800 int size;
7802 etype = TREE_TYPE (type);
7803 size = GET_MODE_SIZE (TYPE_MODE (etype));
7804 if (size * 2 > len)
7805 return NULL_TREE;
7806 rpart = native_interpret_expr (etype, ptr, size);
7807 if (!rpart)
7808 return NULL_TREE;
7809 ipart = native_interpret_expr (etype, ptr+size, size);
7810 if (!ipart)
7811 return NULL_TREE;
7812 return build_complex (type, rpart, ipart);
7816 /* Subroutine of native_interpret_expr. Interpret the contents of
7817 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7818 If the buffer cannot be interpreted, return NULL_TREE. */
7820 static tree
7821 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7823 tree etype, elem, elements;
7824 int i, size, count;
7826 etype = TREE_TYPE (type);
7827 size = GET_MODE_SIZE (TYPE_MODE (etype));
7828 count = TYPE_VECTOR_SUBPARTS (type);
7829 if (size * count > len)
7830 return NULL_TREE;
7832 elements = NULL_TREE;
7833 for (i = count - 1; i >= 0; i--)
7835 elem = native_interpret_expr (etype, ptr+(i*size), size);
7836 if (!elem)
7837 return NULL_TREE;
7838 elements = tree_cons (NULL_TREE, elem, elements);
7840 return build_vector (type, elements);
7844 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7845 the buffer PTR of length LEN as a constant of type TYPE. For
7846 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7847 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7848 return NULL_TREE. */
7850 tree
7851 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7853 switch (TREE_CODE (type))
7855 case INTEGER_TYPE:
7856 case ENUMERAL_TYPE:
7857 case BOOLEAN_TYPE:
7858 return native_interpret_int (type, ptr, len);
7860 case REAL_TYPE:
7861 return native_interpret_real (type, ptr, len);
7863 case COMPLEX_TYPE:
7864 return native_interpret_complex (type, ptr, len);
7866 case VECTOR_TYPE:
7867 return native_interpret_vector (type, ptr, len);
7869 default:
7870 return NULL_TREE;
7875 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7876 TYPE at compile-time. If we're unable to perform the conversion
7877 return NULL_TREE. */
7879 static tree
7880 fold_view_convert_expr (tree type, tree expr)
7882 /* We support up to 512-bit values (for V8DFmode). */
7883 unsigned char buffer[64];
7884 int len;
7886 /* Check that the host and target are sane. */
7887 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7888 return NULL_TREE;
7890 len = native_encode_expr (expr, buffer, sizeof (buffer));
7891 if (len == 0)
7892 return NULL_TREE;
7894 return native_interpret_expr (type, buffer, len);
7897 /* Build an expression for the address of T. Folds away INDIRECT_REF
7898 to avoid confusing the gimplify process. When IN_FOLD is true
7899 avoid modifications of T. */
7901 static tree
7902 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7904 /* The size of the object is not relevant when talking about its address. */
7905 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7906 t = TREE_OPERAND (t, 0);
7908 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7909 if (TREE_CODE (t) == INDIRECT_REF
7910 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7912 t = TREE_OPERAND (t, 0);
7914 if (TREE_TYPE (t) != ptrtype)
7915 t = build1 (NOP_EXPR, ptrtype, t);
7917 else if (!in_fold)
7919 tree base = t;
7921 while (handled_component_p (base))
7922 base = TREE_OPERAND (base, 0);
7924 if (DECL_P (base))
7925 TREE_ADDRESSABLE (base) = 1;
7927 t = build1 (ADDR_EXPR, ptrtype, t);
7929 else
7930 t = build1 (ADDR_EXPR, ptrtype, t);
7932 return t;
7935 /* Build an expression for the address of T with type PTRTYPE. This
7936 function modifies the input parameter 'T' by sometimes setting the
7937 TREE_ADDRESSABLE flag. */
7939 tree
7940 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7942 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7945 /* Build an expression for the address of T. This function modifies
7946 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7947 flag. When called from fold functions, use fold_addr_expr instead. */
7949 tree
7950 build_fold_addr_expr (tree t)
7952 return build_fold_addr_expr_with_type_1 (t,
7953 build_pointer_type (TREE_TYPE (t)),
7954 false);
7957 /* Same as build_fold_addr_expr, builds an expression for the address
7958 of T, but avoids touching the input node 't'. Fold functions
7959 should use this version. */
7961 static tree
7962 fold_addr_expr (tree t)
7964 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7966 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7969 /* Fold a unary expression of code CODE and type TYPE with operand
7970 OP0. Return the folded expression if folding is successful.
7971 Otherwise, return NULL_TREE. */
7973 tree
7974 fold_unary (enum tree_code code, tree type, tree op0)
7976 tree tem;
7977 tree arg0;
7978 enum tree_code_class kind = TREE_CODE_CLASS (code);
7980 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7981 && TREE_CODE_LENGTH (code) == 1);
7983 arg0 = op0;
7984 if (arg0)
7986 if (code == NOP_EXPR || code == CONVERT_EXPR
7987 || code == FLOAT_EXPR || code == ABS_EXPR)
7989 /* Don't use STRIP_NOPS, because signedness of argument type
7990 matters. */
7991 STRIP_SIGN_NOPS (arg0);
7993 else
7995 /* Strip any conversions that don't change the mode. This
7996 is safe for every expression, except for a comparison
7997 expression because its signedness is derived from its
7998 operands.
8000 Note that this is done as an internal manipulation within
8001 the constant folder, in order to find the simplest
8002 representation of the arguments so that their form can be
8003 studied. In any cases, the appropriate type conversions
8004 should be put back in the tree that will get out of the
8005 constant folder. */
8006 STRIP_NOPS (arg0);
8010 if (TREE_CODE_CLASS (code) == tcc_unary)
8012 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8013 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8014 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
8015 else if (TREE_CODE (arg0) == COND_EXPR)
8017 tree arg01 = TREE_OPERAND (arg0, 1);
8018 tree arg02 = TREE_OPERAND (arg0, 2);
8019 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8020 arg01 = fold_build1 (code, type, arg01);
8021 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8022 arg02 = fold_build1 (code, type, arg02);
8023 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8024 arg01, arg02);
8026 /* If this was a conversion, and all we did was to move into
8027 inside the COND_EXPR, bring it back out. But leave it if
8028 it is a conversion from integer to integer and the
8029 result precision is no wider than a word since such a
8030 conversion is cheap and may be optimized away by combine,
8031 while it couldn't if it were outside the COND_EXPR. Then return
8032 so we don't get into an infinite recursion loop taking the
8033 conversion out and then back in. */
8035 if ((code == NOP_EXPR || code == CONVERT_EXPR
8036 || code == NON_LVALUE_EXPR)
8037 && TREE_CODE (tem) == COND_EXPR
8038 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8039 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8040 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8041 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8042 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8043 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8044 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8045 && (INTEGRAL_TYPE_P
8046 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8047 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8048 || flag_syntax_only))
8049 tem = build1 (code, type,
8050 build3 (COND_EXPR,
8051 TREE_TYPE (TREE_OPERAND
8052 (TREE_OPERAND (tem, 1), 0)),
8053 TREE_OPERAND (tem, 0),
8054 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8055 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8056 return tem;
8058 else if (COMPARISON_CLASS_P (arg0))
8060 if (TREE_CODE (type) == BOOLEAN_TYPE)
8062 arg0 = copy_node (arg0);
8063 TREE_TYPE (arg0) = type;
8064 return arg0;
8066 else if (TREE_CODE (type) != INTEGER_TYPE)
8067 return fold_build3 (COND_EXPR, type, arg0,
8068 fold_build1 (code, type,
8069 integer_one_node),
8070 fold_build1 (code, type,
8071 integer_zero_node));
8075 switch (code)
8077 case NOP_EXPR:
8078 case FLOAT_EXPR:
8079 case CONVERT_EXPR:
8080 case FIX_TRUNC_EXPR:
8081 if (TREE_TYPE (op0) == type)
8082 return op0;
8084 /* If we have (type) (a CMP b) and type is an integral type, return
8085 new expression involving the new type. */
8086 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8087 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8088 TREE_OPERAND (op0, 1));
8090 /* Handle cases of two conversions in a row. */
8091 if (TREE_CODE (op0) == NOP_EXPR
8092 || TREE_CODE (op0) == CONVERT_EXPR)
8094 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8095 tree inter_type = TREE_TYPE (op0);
8096 int inside_int = INTEGRAL_TYPE_P (inside_type);
8097 int inside_ptr = POINTER_TYPE_P (inside_type);
8098 int inside_float = FLOAT_TYPE_P (inside_type);
8099 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8100 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8101 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8102 int inter_int = INTEGRAL_TYPE_P (inter_type);
8103 int inter_ptr = POINTER_TYPE_P (inter_type);
8104 int inter_float = FLOAT_TYPE_P (inter_type);
8105 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8106 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8107 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8108 int final_int = INTEGRAL_TYPE_P (type);
8109 int final_ptr = POINTER_TYPE_P (type);
8110 int final_float = FLOAT_TYPE_P (type);
8111 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8112 unsigned int final_prec = TYPE_PRECISION (type);
8113 int final_unsignedp = TYPE_UNSIGNED (type);
8115 /* In addition to the cases of two conversions in a row
8116 handled below, if we are converting something to its own
8117 type via an object of identical or wider precision, neither
8118 conversion is needed. */
8119 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8120 && (((inter_int || inter_ptr) && final_int)
8121 || (inter_float && final_float))
8122 && inter_prec >= final_prec)
8123 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8125 /* Likewise, if the intermediate and final types are either both
8126 float or both integer, we don't need the middle conversion if
8127 it is wider than the final type and doesn't change the signedness
8128 (for integers). Avoid this if the final type is a pointer
8129 since then we sometimes need the inner conversion. Likewise if
8130 the outer has a precision not equal to the size of its mode. */
8131 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
8132 || (inter_float && inside_float)
8133 || (inter_vec && inside_vec))
8134 && inter_prec >= inside_prec
8135 && (inter_float || inter_vec
8136 || inter_unsignedp == inside_unsignedp)
8137 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8138 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8139 && ! final_ptr
8140 && (! final_vec || inter_prec == inside_prec))
8141 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8143 /* If we have a sign-extension of a zero-extended value, we can
8144 replace that by a single zero-extension. */
8145 if (inside_int && inter_int && final_int
8146 && inside_prec < inter_prec && inter_prec < final_prec
8147 && inside_unsignedp && !inter_unsignedp)
8148 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8150 /* Two conversions in a row are not needed unless:
8151 - some conversion is floating-point (overstrict for now), or
8152 - some conversion is a vector (overstrict for now), or
8153 - the intermediate type is narrower than both initial and
8154 final, or
8155 - the intermediate type and innermost type differ in signedness,
8156 and the outermost type is wider than the intermediate, or
8157 - the initial type is a pointer type and the precisions of the
8158 intermediate and final types differ, or
8159 - the final type is a pointer type and the precisions of the
8160 initial and intermediate types differ.
8161 - the final type is a pointer type and the initial type not
8162 - the initial type is a pointer to an array and the final type
8163 not. */
8164 if (! inside_float && ! inter_float && ! final_float
8165 && ! inside_vec && ! inter_vec && ! final_vec
8166 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8167 && ! (inside_int && inter_int
8168 && inter_unsignedp != inside_unsignedp
8169 && inter_prec < final_prec)
8170 && ((inter_unsignedp && inter_prec > inside_prec)
8171 == (final_unsignedp && final_prec > inter_prec))
8172 && ! (inside_ptr && inter_prec != final_prec)
8173 && ! (final_ptr && inside_prec != inter_prec)
8174 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8175 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8176 && final_ptr == inside_ptr
8177 && ! (inside_ptr
8178 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
8179 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
8180 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8183 /* Handle (T *)&A.B.C for A being of type T and B and C
8184 living at offset zero. This occurs frequently in
8185 C++ upcasting and then accessing the base. */
8186 if (TREE_CODE (op0) == ADDR_EXPR
8187 && POINTER_TYPE_P (type)
8188 && handled_component_p (TREE_OPERAND (op0, 0)))
8190 HOST_WIDE_INT bitsize, bitpos;
8191 tree offset;
8192 enum machine_mode mode;
8193 int unsignedp, volatilep;
8194 tree base = TREE_OPERAND (op0, 0);
8195 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8196 &mode, &unsignedp, &volatilep, false);
8197 /* If the reference was to a (constant) zero offset, we can use
8198 the address of the base if it has the same base type
8199 as the result type. */
8200 if (! offset && bitpos == 0
8201 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8202 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8203 return fold_convert (type, fold_addr_expr (base));
8206 if ((TREE_CODE (op0) == MODIFY_EXPR
8207 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
8208 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
8209 /* Detect assigning a bitfield. */
8210 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
8211 && DECL_BIT_FIELD
8212 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
8214 /* Don't leave an assignment inside a conversion
8215 unless assigning a bitfield. */
8216 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
8217 /* First do the assignment, then return converted constant. */
8218 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8219 TREE_NO_WARNING (tem) = 1;
8220 TREE_USED (tem) = 1;
8221 return tem;
8224 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8225 constants (if x has signed type, the sign bit cannot be set
8226 in c). This folds extension into the BIT_AND_EXPR. */
8227 if (INTEGRAL_TYPE_P (type)
8228 && TREE_CODE (type) != BOOLEAN_TYPE
8229 && TREE_CODE (op0) == BIT_AND_EXPR
8230 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8232 tree and = op0;
8233 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8234 int change = 0;
8236 if (TYPE_UNSIGNED (TREE_TYPE (and))
8237 || (TYPE_PRECISION (type)
8238 <= TYPE_PRECISION (TREE_TYPE (and))))
8239 change = 1;
8240 else if (TYPE_PRECISION (TREE_TYPE (and1))
8241 <= HOST_BITS_PER_WIDE_INT
8242 && host_integerp (and1, 1))
8244 unsigned HOST_WIDE_INT cst;
8246 cst = tree_low_cst (and1, 1);
8247 cst &= (HOST_WIDE_INT) -1
8248 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8249 change = (cst == 0);
8250 #ifdef LOAD_EXTEND_OP
8251 if (change
8252 && !flag_syntax_only
8253 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8254 == ZERO_EXTEND))
8256 tree uns = unsigned_type_for (TREE_TYPE (and0));
8257 and0 = fold_convert (uns, and0);
8258 and1 = fold_convert (uns, and1);
8260 #endif
8262 if (change)
8264 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8265 TREE_INT_CST_HIGH (and1), 0,
8266 TREE_OVERFLOW (and1));
8267 return fold_build2 (BIT_AND_EXPR, type,
8268 fold_convert (type, and0), tem);
8272 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8273 when one of the new casts will fold away. Conservatively we assume
8274 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8275 if (POINTER_TYPE_P (type)
8276 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8277 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8278 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8279 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8281 tree arg00 = TREE_OPERAND (arg0, 0);
8282 tree arg01 = TREE_OPERAND (arg0, 1);
8284 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8285 fold_convert (sizetype, arg01));
8288 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8289 of the same precision, and X is an integer type not narrower than
8290 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8291 if (INTEGRAL_TYPE_P (type)
8292 && TREE_CODE (op0) == BIT_NOT_EXPR
8293 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8294 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
8295 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
8296 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8298 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8299 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8300 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8301 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8304 tem = fold_convert_const (code, type, op0);
8305 return tem ? tem : NULL_TREE;
8307 case FIXED_CONVERT_EXPR:
8308 tem = fold_convert_const (code, type, arg0);
8309 return tem ? tem : NULL_TREE;
8311 case VIEW_CONVERT_EXPR:
8312 if (TREE_TYPE (op0) == type)
8313 return op0;
8314 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8315 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8316 return fold_view_convert_expr (type, op0);
8318 case NEGATE_EXPR:
8319 tem = fold_negate_expr (arg0);
8320 if (tem)
8321 return fold_convert (type, tem);
8322 return NULL_TREE;
8324 case ABS_EXPR:
8325 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8326 return fold_abs_const (arg0, type);
8327 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8328 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8329 /* Convert fabs((double)float) into (double)fabsf(float). */
8330 else if (TREE_CODE (arg0) == NOP_EXPR
8331 && TREE_CODE (type) == REAL_TYPE)
8333 tree targ0 = strip_float_extensions (arg0);
8334 if (targ0 != arg0)
8335 return fold_convert (type, fold_build1 (ABS_EXPR,
8336 TREE_TYPE (targ0),
8337 targ0));
8339 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8340 else if (TREE_CODE (arg0) == ABS_EXPR)
8341 return arg0;
8342 else if (tree_expr_nonnegative_p (arg0))
8343 return arg0;
8345 /* Strip sign ops from argument. */
8346 if (TREE_CODE (type) == REAL_TYPE)
8348 tem = fold_strip_sign_ops (arg0);
8349 if (tem)
8350 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8352 return NULL_TREE;
8354 case CONJ_EXPR:
8355 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8356 return fold_convert (type, arg0);
8357 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8359 tree itype = TREE_TYPE (type);
8360 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8361 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8362 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8364 if (TREE_CODE (arg0) == COMPLEX_CST)
8366 tree itype = TREE_TYPE (type);
8367 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8368 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8369 return build_complex (type, rpart, negate_expr (ipart));
8371 if (TREE_CODE (arg0) == CONJ_EXPR)
8372 return fold_convert (type, TREE_OPERAND (arg0, 0));
8373 return NULL_TREE;
8375 case BIT_NOT_EXPR:
8376 if (TREE_CODE (arg0) == INTEGER_CST)
8377 return fold_not_const (arg0, type);
8378 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8379 return TREE_OPERAND (arg0, 0);
8380 /* Convert ~ (-A) to A - 1. */
8381 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8382 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
8383 build_int_cst (type, 1));
8384 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8385 else if (INTEGRAL_TYPE_P (type)
8386 && ((TREE_CODE (arg0) == MINUS_EXPR
8387 && integer_onep (TREE_OPERAND (arg0, 1)))
8388 || (TREE_CODE (arg0) == PLUS_EXPR
8389 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8390 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8391 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8392 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8393 && (tem = fold_unary (BIT_NOT_EXPR, type,
8394 fold_convert (type,
8395 TREE_OPERAND (arg0, 0)))))
8396 return fold_build2 (BIT_XOR_EXPR, type, tem,
8397 fold_convert (type, TREE_OPERAND (arg0, 1)));
8398 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8399 && (tem = fold_unary (BIT_NOT_EXPR, type,
8400 fold_convert (type,
8401 TREE_OPERAND (arg0, 1)))))
8402 return fold_build2 (BIT_XOR_EXPR, type,
8403 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8404 /* Perform BIT_NOT_EXPR on each element individually. */
8405 else if (TREE_CODE (arg0) == VECTOR_CST)
8407 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8408 int count = TYPE_VECTOR_SUBPARTS (type), i;
8410 for (i = 0; i < count; i++)
8412 if (elements)
8414 elem = TREE_VALUE (elements);
8415 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8416 if (elem == NULL_TREE)
8417 break;
8418 elements = TREE_CHAIN (elements);
8420 else
8421 elem = build_int_cst (TREE_TYPE (type), -1);
8422 list = tree_cons (NULL_TREE, elem, list);
8424 if (i == count)
8425 return build_vector (type, nreverse (list));
8428 return NULL_TREE;
8430 case TRUTH_NOT_EXPR:
8431 /* The argument to invert_truthvalue must have Boolean type. */
8432 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8433 arg0 = fold_convert (boolean_type_node, arg0);
8435 /* Note that the operand of this must be an int
8436 and its values must be 0 or 1.
8437 ("true" is a fixed value perhaps depending on the language,
8438 but we don't handle values other than 1 correctly yet.) */
8439 tem = fold_truth_not_expr (arg0);
8440 if (!tem)
8441 return NULL_TREE;
8442 return fold_convert (type, tem);
8444 case REALPART_EXPR:
8445 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8446 return fold_convert (type, arg0);
8447 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8448 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8449 TREE_OPERAND (arg0, 1));
8450 if (TREE_CODE (arg0) == COMPLEX_CST)
8451 return fold_convert (type, TREE_REALPART (arg0));
8452 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8454 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8455 tem = fold_build2 (TREE_CODE (arg0), itype,
8456 fold_build1 (REALPART_EXPR, itype,
8457 TREE_OPERAND (arg0, 0)),
8458 fold_build1 (REALPART_EXPR, itype,
8459 TREE_OPERAND (arg0, 1)));
8460 return fold_convert (type, tem);
8462 if (TREE_CODE (arg0) == CONJ_EXPR)
8464 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8465 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8466 return fold_convert (type, tem);
8468 if (TREE_CODE (arg0) == CALL_EXPR)
8470 tree fn = get_callee_fndecl (arg0);
8471 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8472 switch (DECL_FUNCTION_CODE (fn))
8474 CASE_FLT_FN (BUILT_IN_CEXPI):
8475 fn = mathfn_built_in (type, BUILT_IN_COS);
8476 if (fn)
8477 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8478 break;
8480 default:
8481 break;
8484 return NULL_TREE;
8486 case IMAGPART_EXPR:
8487 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8488 return fold_convert (type, integer_zero_node);
8489 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8490 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8491 TREE_OPERAND (arg0, 0));
8492 if (TREE_CODE (arg0) == COMPLEX_CST)
8493 return fold_convert (type, TREE_IMAGPART (arg0));
8494 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8496 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8497 tem = fold_build2 (TREE_CODE (arg0), itype,
8498 fold_build1 (IMAGPART_EXPR, itype,
8499 TREE_OPERAND (arg0, 0)),
8500 fold_build1 (IMAGPART_EXPR, itype,
8501 TREE_OPERAND (arg0, 1)));
8502 return fold_convert (type, tem);
8504 if (TREE_CODE (arg0) == CONJ_EXPR)
8506 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8507 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8508 return fold_convert (type, negate_expr (tem));
8510 if (TREE_CODE (arg0) == CALL_EXPR)
8512 tree fn = get_callee_fndecl (arg0);
8513 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8514 switch (DECL_FUNCTION_CODE (fn))
8516 CASE_FLT_FN (BUILT_IN_CEXPI):
8517 fn = mathfn_built_in (type, BUILT_IN_SIN);
8518 if (fn)
8519 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8520 break;
8522 default:
8523 break;
8526 return NULL_TREE;
8528 default:
8529 return NULL_TREE;
8530 } /* switch (code) */
8533 /* Fold a binary expression of code CODE and type TYPE with operands
8534 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8535 Return the folded expression if folding is successful. Otherwise,
8536 return NULL_TREE. */
8538 static tree
8539 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8541 enum tree_code compl_code;
8543 if (code == MIN_EXPR)
8544 compl_code = MAX_EXPR;
8545 else if (code == MAX_EXPR)
8546 compl_code = MIN_EXPR;
8547 else
8548 gcc_unreachable ();
8550 /* MIN (MAX (a, b), b) == b. */
8551 if (TREE_CODE (op0) == compl_code
8552 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8553 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8555 /* MIN (MAX (b, a), b) == b. */
8556 if (TREE_CODE (op0) == compl_code
8557 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8558 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8559 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8561 /* MIN (a, MAX (a, b)) == a. */
8562 if (TREE_CODE (op1) == compl_code
8563 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8564 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8565 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8567 /* MIN (a, MAX (b, a)) == a. */
8568 if (TREE_CODE (op1) == compl_code
8569 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8570 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8571 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8573 return NULL_TREE;
8576 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8577 by changing CODE to reduce the magnitude of constants involved in
8578 ARG0 of the comparison.
8579 Returns a canonicalized comparison tree if a simplification was
8580 possible, otherwise returns NULL_TREE.
8581 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8582 valid if signed overflow is undefined. */
8584 static tree
8585 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8586 tree arg0, tree arg1,
8587 bool *strict_overflow_p)
8589 enum tree_code code0 = TREE_CODE (arg0);
8590 tree t, cst0 = NULL_TREE;
8591 int sgn0;
8592 bool swap = false;
8594 /* Match A +- CST code arg1 and CST code arg1. */
8595 if (!(((code0 == MINUS_EXPR
8596 || code0 == PLUS_EXPR)
8597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8598 || code0 == INTEGER_CST))
8599 return NULL_TREE;
8601 /* Identify the constant in arg0 and its sign. */
8602 if (code0 == INTEGER_CST)
8603 cst0 = arg0;
8604 else
8605 cst0 = TREE_OPERAND (arg0, 1);
8606 sgn0 = tree_int_cst_sgn (cst0);
8608 /* Overflowed constants and zero will cause problems. */
8609 if (integer_zerop (cst0)
8610 || TREE_OVERFLOW (cst0))
8611 return NULL_TREE;
8613 /* See if we can reduce the magnitude of the constant in
8614 arg0 by changing the comparison code. */
8615 if (code0 == INTEGER_CST)
8617 /* CST <= arg1 -> CST-1 < arg1. */
8618 if (code == LE_EXPR && sgn0 == 1)
8619 code = LT_EXPR;
8620 /* -CST < arg1 -> -CST-1 <= arg1. */
8621 else if (code == LT_EXPR && sgn0 == -1)
8622 code = LE_EXPR;
8623 /* CST > arg1 -> CST-1 >= arg1. */
8624 else if (code == GT_EXPR && sgn0 == 1)
8625 code = GE_EXPR;
8626 /* -CST >= arg1 -> -CST-1 > arg1. */
8627 else if (code == GE_EXPR && sgn0 == -1)
8628 code = GT_EXPR;
8629 else
8630 return NULL_TREE;
8631 /* arg1 code' CST' might be more canonical. */
8632 swap = true;
8634 else
8636 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8637 if (code == LT_EXPR
8638 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8639 code = LE_EXPR;
8640 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8641 else if (code == GT_EXPR
8642 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8643 code = GE_EXPR;
8644 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8645 else if (code == LE_EXPR
8646 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8647 code = LT_EXPR;
8648 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8649 else if (code == GE_EXPR
8650 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8651 code = GT_EXPR;
8652 else
8653 return NULL_TREE;
8654 *strict_overflow_p = true;
8657 /* Now build the constant reduced in magnitude. */
8658 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8659 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8660 if (code0 != INTEGER_CST)
8661 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8663 /* If swapping might yield to a more canonical form, do so. */
8664 if (swap)
8665 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8666 else
8667 return fold_build2 (code, type, t, arg1);
8670 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8671 overflow further. Try to decrease the magnitude of constants involved
8672 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8673 and put sole constants at the second argument position.
8674 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8676 static tree
8677 maybe_canonicalize_comparison (enum tree_code code, tree type,
8678 tree arg0, tree arg1)
8680 tree t;
8681 bool strict_overflow_p;
8682 const char * const warnmsg = G_("assuming signed overflow does not occur "
8683 "when reducing constant in comparison");
8685 /* In principle pointers also have undefined overflow behavior,
8686 but that causes problems elsewhere. */
8687 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8688 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8689 return NULL_TREE;
8691 /* Try canonicalization by simplifying arg0. */
8692 strict_overflow_p = false;
8693 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8694 &strict_overflow_p);
8695 if (t)
8697 if (strict_overflow_p)
8698 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8699 return t;
8702 /* Try canonicalization by simplifying arg1 using the swapped
8703 comparison. */
8704 code = swap_tree_comparison (code);
8705 strict_overflow_p = false;
8706 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8707 &strict_overflow_p);
8708 if (t && strict_overflow_p)
8709 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8710 return t;
8713 /* Subroutine of fold_binary. This routine performs all of the
8714 transformations that are common to the equality/inequality
8715 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8716 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8717 fold_binary should call fold_binary. Fold a comparison with
8718 tree code CODE and type TYPE with operands OP0 and OP1. Return
8719 the folded comparison or NULL_TREE. */
8721 static tree
8722 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8724 tree arg0, arg1, tem;
8726 arg0 = op0;
8727 arg1 = op1;
8729 STRIP_SIGN_NOPS (arg0);
8730 STRIP_SIGN_NOPS (arg1);
8732 tem = fold_relational_const (code, type, arg0, arg1);
8733 if (tem != NULL_TREE)
8734 return tem;
8736 /* If one arg is a real or integer constant, put it last. */
8737 if (tree_swap_operands_p (arg0, arg1, true))
8738 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8740 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8741 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8742 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8743 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8744 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8745 && (TREE_CODE (arg1) == INTEGER_CST
8746 && !TREE_OVERFLOW (arg1)))
8748 tree const1 = TREE_OPERAND (arg0, 1);
8749 tree const2 = arg1;
8750 tree variable = TREE_OPERAND (arg0, 0);
8751 tree lhs;
8752 int lhs_add;
8753 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8755 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8756 TREE_TYPE (arg1), const2, const1);
8758 /* If the constant operation overflowed this can be
8759 simplified as a comparison against INT_MAX/INT_MIN. */
8760 if (TREE_CODE (lhs) == INTEGER_CST
8761 && TREE_OVERFLOW (lhs))
8763 int const1_sgn = tree_int_cst_sgn (const1);
8764 enum tree_code code2 = code;
8766 /* Get the sign of the constant on the lhs if the
8767 operation were VARIABLE + CONST1. */
8768 if (TREE_CODE (arg0) == MINUS_EXPR)
8769 const1_sgn = -const1_sgn;
8771 /* The sign of the constant determines if we overflowed
8772 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8773 Canonicalize to the INT_MIN overflow by swapping the comparison
8774 if necessary. */
8775 if (const1_sgn == -1)
8776 code2 = swap_tree_comparison (code);
8778 /* We now can look at the canonicalized case
8779 VARIABLE + 1 CODE2 INT_MIN
8780 and decide on the result. */
8781 if (code2 == LT_EXPR
8782 || code2 == LE_EXPR
8783 || code2 == EQ_EXPR)
8784 return omit_one_operand (type, boolean_false_node, variable);
8785 else if (code2 == NE_EXPR
8786 || code2 == GE_EXPR
8787 || code2 == GT_EXPR)
8788 return omit_one_operand (type, boolean_true_node, variable);
8791 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8792 && (TREE_CODE (lhs) != INTEGER_CST
8793 || !TREE_OVERFLOW (lhs)))
8795 fold_overflow_warning (("assuming signed overflow does not occur "
8796 "when changing X +- C1 cmp C2 to "
8797 "X cmp C1 +- C2"),
8798 WARN_STRICT_OVERFLOW_COMPARISON);
8799 return fold_build2 (code, type, variable, lhs);
8803 /* For comparisons of pointers we can decompose it to a compile time
8804 comparison of the base objects and the offsets into the object.
8805 This requires at least one operand being an ADDR_EXPR to do more
8806 than the operand_equal_p test below. */
8807 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8808 && (TREE_CODE (arg0) == ADDR_EXPR
8809 || TREE_CODE (arg1) == ADDR_EXPR))
8811 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8812 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8813 enum machine_mode mode;
8814 int volatilep, unsignedp;
8815 bool indirect_base0 = false;
8817 /* Get base and offset for the access. Strip ADDR_EXPR for
8818 get_inner_reference, but put it back by stripping INDIRECT_REF
8819 off the base object if possible. */
8820 base0 = arg0;
8821 if (TREE_CODE (arg0) == ADDR_EXPR)
8823 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8824 &bitsize, &bitpos0, &offset0, &mode,
8825 &unsignedp, &volatilep, false);
8826 if (TREE_CODE (base0) == INDIRECT_REF)
8827 base0 = TREE_OPERAND (base0, 0);
8828 else
8829 indirect_base0 = true;
8832 base1 = arg1;
8833 if (TREE_CODE (arg1) == ADDR_EXPR)
8835 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8836 &bitsize, &bitpos1, &offset1, &mode,
8837 &unsignedp, &volatilep, false);
8838 /* We have to make sure to have an indirect/non-indirect base1
8839 just the same as we did for base0. */
8840 if (TREE_CODE (base1) == INDIRECT_REF
8841 && !indirect_base0)
8842 base1 = TREE_OPERAND (base1, 0);
8843 else if (!indirect_base0)
8844 base1 = NULL_TREE;
8846 else if (indirect_base0)
8847 base1 = NULL_TREE;
8849 /* If we have equivalent bases we might be able to simplify. */
8850 if (base0 && base1
8851 && operand_equal_p (base0, base1, 0))
8853 /* We can fold this expression to a constant if the non-constant
8854 offset parts are equal. */
8855 if (offset0 == offset1
8856 || (offset0 && offset1
8857 && operand_equal_p (offset0, offset1, 0)))
8859 switch (code)
8861 case EQ_EXPR:
8862 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8863 case NE_EXPR:
8864 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8865 case LT_EXPR:
8866 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8867 case LE_EXPR:
8868 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8869 case GE_EXPR:
8870 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8871 case GT_EXPR:
8872 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8873 default:;
8876 /* We can simplify the comparison to a comparison of the variable
8877 offset parts if the constant offset parts are equal.
8878 Be careful to use signed size type here because otherwise we
8879 mess with array offsets in the wrong way. This is possible
8880 because pointer arithmetic is restricted to retain within an
8881 object and overflow on pointer differences is undefined as of
8882 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8883 else if (bitpos0 == bitpos1)
8885 tree signed_size_type_node;
8886 signed_size_type_node = signed_type_for (size_type_node);
8888 /* By converting to signed size type we cover middle-end pointer
8889 arithmetic which operates on unsigned pointer types of size
8890 type size and ARRAY_REF offsets which are properly sign or
8891 zero extended from their type in case it is narrower than
8892 size type. */
8893 if (offset0 == NULL_TREE)
8894 offset0 = build_int_cst (signed_size_type_node, 0);
8895 else
8896 offset0 = fold_convert (signed_size_type_node, offset0);
8897 if (offset1 == NULL_TREE)
8898 offset1 = build_int_cst (signed_size_type_node, 0);
8899 else
8900 offset1 = fold_convert (signed_size_type_node, offset1);
8902 return fold_build2 (code, type, offset0, offset1);
8907 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8908 same object, then we can fold this to a comparison of the two offsets in
8909 signed size type. This is possible because pointer arithmetic is
8910 restricted to retain within an object and overflow on pointer differences
8911 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8913 We check flag_wrapv directly because pointers types are unsigned,
8914 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8915 normally what we want to avoid certain odd overflow cases, but
8916 not here. */
8917 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8918 && !flag_wrapv
8919 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8921 tree base0, offset0, base1, offset1;
8923 if (extract_array_ref (arg0, &base0, &offset0)
8924 && extract_array_ref (arg1, &base1, &offset1)
8925 && operand_equal_p (base0, base1, 0))
8927 tree signed_size_type_node;
8928 signed_size_type_node = signed_type_for (size_type_node);
8930 /* By converting to signed size type we cover middle-end pointer
8931 arithmetic which operates on unsigned pointer types of size
8932 type size and ARRAY_REF offsets which are properly sign or
8933 zero extended from their type in case it is narrower than
8934 size type. */
8935 if (offset0 == NULL_TREE)
8936 offset0 = build_int_cst (signed_size_type_node, 0);
8937 else
8938 offset0 = fold_convert (signed_size_type_node, offset0);
8939 if (offset1 == NULL_TREE)
8940 offset1 = build_int_cst (signed_size_type_node, 0);
8941 else
8942 offset1 = fold_convert (signed_size_type_node, offset1);
8944 return fold_build2 (code, type, offset0, offset1);
8948 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8949 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8950 the resulting offset is smaller in absolute value than the
8951 original one. */
8952 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8953 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8954 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8955 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8956 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8957 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8958 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8960 tree const1 = TREE_OPERAND (arg0, 1);
8961 tree const2 = TREE_OPERAND (arg1, 1);
8962 tree variable1 = TREE_OPERAND (arg0, 0);
8963 tree variable2 = TREE_OPERAND (arg1, 0);
8964 tree cst;
8965 const char * const warnmsg = G_("assuming signed overflow does not "
8966 "occur when combining constants around "
8967 "a comparison");
8969 /* Put the constant on the side where it doesn't overflow and is
8970 of lower absolute value than before. */
8971 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8972 ? MINUS_EXPR : PLUS_EXPR,
8973 const2, const1, 0);
8974 if (!TREE_OVERFLOW (cst)
8975 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8977 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8978 return fold_build2 (code, type,
8979 variable1,
8980 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8981 variable2, cst));
8984 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8985 ? MINUS_EXPR : PLUS_EXPR,
8986 const1, const2, 0);
8987 if (!TREE_OVERFLOW (cst)
8988 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8990 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8991 return fold_build2 (code, type,
8992 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8993 variable1, cst),
8994 variable2);
8998 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8999 signed arithmetic case. That form is created by the compiler
9000 often enough for folding it to be of value. One example is in
9001 computing loop trip counts after Operator Strength Reduction. */
9002 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9003 && TREE_CODE (arg0) == MULT_EXPR
9004 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9005 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9006 && integer_zerop (arg1))
9008 tree const1 = TREE_OPERAND (arg0, 1);
9009 tree const2 = arg1; /* zero */
9010 tree variable1 = TREE_OPERAND (arg0, 0);
9011 enum tree_code cmp_code = code;
9013 gcc_assert (!integer_zerop (const1));
9015 fold_overflow_warning (("assuming signed overflow does not occur when "
9016 "eliminating multiplication in comparison "
9017 "with zero"),
9018 WARN_STRICT_OVERFLOW_COMPARISON);
9020 /* If const1 is negative we swap the sense of the comparison. */
9021 if (tree_int_cst_sgn (const1) < 0)
9022 cmp_code = swap_tree_comparison (cmp_code);
9024 return fold_build2 (cmp_code, type, variable1, const2);
9027 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9028 if (tem)
9029 return tem;
9031 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9033 tree targ0 = strip_float_extensions (arg0);
9034 tree targ1 = strip_float_extensions (arg1);
9035 tree newtype = TREE_TYPE (targ0);
9037 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9038 newtype = TREE_TYPE (targ1);
9040 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9041 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9042 return fold_build2 (code, type, fold_convert (newtype, targ0),
9043 fold_convert (newtype, targ1));
9045 /* (-a) CMP (-b) -> b CMP a */
9046 if (TREE_CODE (arg0) == NEGATE_EXPR
9047 && TREE_CODE (arg1) == NEGATE_EXPR)
9048 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9049 TREE_OPERAND (arg0, 0));
9051 if (TREE_CODE (arg1) == REAL_CST)
9053 REAL_VALUE_TYPE cst;
9054 cst = TREE_REAL_CST (arg1);
9056 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9057 if (TREE_CODE (arg0) == NEGATE_EXPR)
9058 return fold_build2 (swap_tree_comparison (code), type,
9059 TREE_OPERAND (arg0, 0),
9060 build_real (TREE_TYPE (arg1),
9061 REAL_VALUE_NEGATE (cst)));
9063 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9064 /* a CMP (-0) -> a CMP 0 */
9065 if (REAL_VALUE_MINUS_ZERO (cst))
9066 return fold_build2 (code, type, arg0,
9067 build_real (TREE_TYPE (arg1), dconst0));
9069 /* x != NaN is always true, other ops are always false. */
9070 if (REAL_VALUE_ISNAN (cst)
9071 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9073 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9074 return omit_one_operand (type, tem, arg0);
9077 /* Fold comparisons against infinity. */
9078 if (REAL_VALUE_ISINF (cst))
9080 tem = fold_inf_compare (code, type, arg0, arg1);
9081 if (tem != NULL_TREE)
9082 return tem;
9086 /* If this is a comparison of a real constant with a PLUS_EXPR
9087 or a MINUS_EXPR of a real constant, we can convert it into a
9088 comparison with a revised real constant as long as no overflow
9089 occurs when unsafe_math_optimizations are enabled. */
9090 if (flag_unsafe_math_optimizations
9091 && TREE_CODE (arg1) == REAL_CST
9092 && (TREE_CODE (arg0) == PLUS_EXPR
9093 || TREE_CODE (arg0) == MINUS_EXPR)
9094 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9095 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9096 ? MINUS_EXPR : PLUS_EXPR,
9097 arg1, TREE_OPERAND (arg0, 1), 0))
9098 && !TREE_OVERFLOW (tem))
9099 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9101 /* Likewise, we can simplify a comparison of a real constant with
9102 a MINUS_EXPR whose first operand is also a real constant, i.e.
9103 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9104 floating-point types only if -fassociative-math is set. */
9105 if (flag_associative_math
9106 && TREE_CODE (arg1) == REAL_CST
9107 && TREE_CODE (arg0) == MINUS_EXPR
9108 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9109 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9110 arg1, 0))
9111 && !TREE_OVERFLOW (tem))
9112 return fold_build2 (swap_tree_comparison (code), type,
9113 TREE_OPERAND (arg0, 1), tem);
9115 /* Fold comparisons against built-in math functions. */
9116 if (TREE_CODE (arg1) == REAL_CST
9117 && flag_unsafe_math_optimizations
9118 && ! flag_errno_math)
9120 enum built_in_function fcode = builtin_mathfn_code (arg0);
9122 if (fcode != END_BUILTINS)
9124 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9125 if (tem != NULL_TREE)
9126 return tem;
9131 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9132 && (TREE_CODE (arg0) == NOP_EXPR
9133 || TREE_CODE (arg0) == CONVERT_EXPR))
9135 /* If we are widening one operand of an integer comparison,
9136 see if the other operand is similarly being widened. Perhaps we
9137 can do the comparison in the narrower type. */
9138 tem = fold_widened_comparison (code, type, arg0, arg1);
9139 if (tem)
9140 return tem;
9142 /* Or if we are changing signedness. */
9143 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9144 if (tem)
9145 return tem;
9148 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9149 constant, we can simplify it. */
9150 if (TREE_CODE (arg1) == INTEGER_CST
9151 && (TREE_CODE (arg0) == MIN_EXPR
9152 || TREE_CODE (arg0) == MAX_EXPR)
9153 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9155 tem = optimize_minmax_comparison (code, type, op0, op1);
9156 if (tem)
9157 return tem;
9160 /* Simplify comparison of something with itself. (For IEEE
9161 floating-point, we can only do some of these simplifications.) */
9162 if (operand_equal_p (arg0, arg1, 0))
9164 switch (code)
9166 case EQ_EXPR:
9167 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9168 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9169 return constant_boolean_node (1, type);
9170 break;
9172 case GE_EXPR:
9173 case LE_EXPR:
9174 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9175 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9176 return constant_boolean_node (1, type);
9177 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9179 case NE_EXPR:
9180 /* For NE, we can only do this simplification if integer
9181 or we don't honor IEEE floating point NaNs. */
9182 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9183 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9184 break;
9185 /* ... fall through ... */
9186 case GT_EXPR:
9187 case LT_EXPR:
9188 return constant_boolean_node (0, type);
9189 default:
9190 gcc_unreachable ();
9194 /* If we are comparing an expression that just has comparisons
9195 of two integer values, arithmetic expressions of those comparisons,
9196 and constants, we can simplify it. There are only three cases
9197 to check: the two values can either be equal, the first can be
9198 greater, or the second can be greater. Fold the expression for
9199 those three values. Since each value must be 0 or 1, we have
9200 eight possibilities, each of which corresponds to the constant 0
9201 or 1 or one of the six possible comparisons.
9203 This handles common cases like (a > b) == 0 but also handles
9204 expressions like ((x > y) - (y > x)) > 0, which supposedly
9205 occur in macroized code. */
9207 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9209 tree cval1 = 0, cval2 = 0;
9210 int save_p = 0;
9212 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9213 /* Don't handle degenerate cases here; they should already
9214 have been handled anyway. */
9215 && cval1 != 0 && cval2 != 0
9216 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9217 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9218 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9219 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9220 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9221 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9222 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9224 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9225 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9227 /* We can't just pass T to eval_subst in case cval1 or cval2
9228 was the same as ARG1. */
9230 tree high_result
9231 = fold_build2 (code, type,
9232 eval_subst (arg0, cval1, maxval,
9233 cval2, minval),
9234 arg1);
9235 tree equal_result
9236 = fold_build2 (code, type,
9237 eval_subst (arg0, cval1, maxval,
9238 cval2, maxval),
9239 arg1);
9240 tree low_result
9241 = fold_build2 (code, type,
9242 eval_subst (arg0, cval1, minval,
9243 cval2, maxval),
9244 arg1);
9246 /* All three of these results should be 0 or 1. Confirm they are.
9247 Then use those values to select the proper code to use. */
9249 if (TREE_CODE (high_result) == INTEGER_CST
9250 && TREE_CODE (equal_result) == INTEGER_CST
9251 && TREE_CODE (low_result) == INTEGER_CST)
9253 /* Make a 3-bit mask with the high-order bit being the
9254 value for `>', the next for '=', and the low for '<'. */
9255 switch ((integer_onep (high_result) * 4)
9256 + (integer_onep (equal_result) * 2)
9257 + integer_onep (low_result))
9259 case 0:
9260 /* Always false. */
9261 return omit_one_operand (type, integer_zero_node, arg0);
9262 case 1:
9263 code = LT_EXPR;
9264 break;
9265 case 2:
9266 code = EQ_EXPR;
9267 break;
9268 case 3:
9269 code = LE_EXPR;
9270 break;
9271 case 4:
9272 code = GT_EXPR;
9273 break;
9274 case 5:
9275 code = NE_EXPR;
9276 break;
9277 case 6:
9278 code = GE_EXPR;
9279 break;
9280 case 7:
9281 /* Always true. */
9282 return omit_one_operand (type, integer_one_node, arg0);
9285 if (save_p)
9286 return save_expr (build2 (code, type, cval1, cval2));
9287 return fold_build2 (code, type, cval1, cval2);
9292 /* Fold a comparison of the address of COMPONENT_REFs with the same
9293 type and component to a comparison of the address of the base
9294 object. In short, &x->a OP &y->a to x OP y and
9295 &x->a OP &y.a to x OP &y */
9296 if (TREE_CODE (arg0) == ADDR_EXPR
9297 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9298 && TREE_CODE (arg1) == ADDR_EXPR
9299 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9301 tree cref0 = TREE_OPERAND (arg0, 0);
9302 tree cref1 = TREE_OPERAND (arg1, 0);
9303 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9305 tree op0 = TREE_OPERAND (cref0, 0);
9306 tree op1 = TREE_OPERAND (cref1, 0);
9307 return fold_build2 (code, type,
9308 fold_addr_expr (op0),
9309 fold_addr_expr (op1));
9313 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9314 into a single range test. */
9315 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9316 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9317 && TREE_CODE (arg1) == INTEGER_CST
9318 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9319 && !integer_zerop (TREE_OPERAND (arg0, 1))
9320 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9321 && !TREE_OVERFLOW (arg1))
9323 tem = fold_div_compare (code, type, arg0, arg1);
9324 if (tem != NULL_TREE)
9325 return tem;
9328 /* Fold ~X op ~Y as Y op X. */
9329 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9330 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9332 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9333 return fold_build2 (code, type,
9334 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9335 TREE_OPERAND (arg0, 0));
9338 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9339 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9340 && TREE_CODE (arg1) == INTEGER_CST)
9342 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9343 return fold_build2 (swap_tree_comparison (code), type,
9344 TREE_OPERAND (arg0, 0),
9345 fold_build1 (BIT_NOT_EXPR, cmp_type,
9346 fold_convert (cmp_type, arg1)));
9349 return NULL_TREE;
9353 /* Subroutine of fold_binary. Optimize complex multiplications of the
9354 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9355 argument EXPR represents the expression "z" of type TYPE. */
9357 static tree
9358 fold_mult_zconjz (tree type, tree expr)
9360 tree itype = TREE_TYPE (type);
9361 tree rpart, ipart, tem;
9363 if (TREE_CODE (expr) == COMPLEX_EXPR)
9365 rpart = TREE_OPERAND (expr, 0);
9366 ipart = TREE_OPERAND (expr, 1);
9368 else if (TREE_CODE (expr) == COMPLEX_CST)
9370 rpart = TREE_REALPART (expr);
9371 ipart = TREE_IMAGPART (expr);
9373 else
9375 expr = save_expr (expr);
9376 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9377 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9380 rpart = save_expr (rpart);
9381 ipart = save_expr (ipart);
9382 tem = fold_build2 (PLUS_EXPR, itype,
9383 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9384 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9385 return fold_build2 (COMPLEX_EXPR, type, tem,
9386 fold_convert (itype, integer_zero_node));
9390 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9391 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9392 guarantees that P and N have the same least significant log2(M) bits.
9393 N is not otherwise constrained. In particular, N is not normalized to
9394 0 <= N < M as is common. In general, the precise value of P is unknown.
9395 M is chosen as large as possible such that constant N can be determined.
9397 Returns M and sets *RESIDUE to N. */
9399 static unsigned HOST_WIDE_INT
9400 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9402 enum tree_code code;
9404 *residue = 0;
9406 code = TREE_CODE (expr);
9407 if (code == ADDR_EXPR)
9409 expr = TREE_OPERAND (expr, 0);
9410 if (handled_component_p (expr))
9412 HOST_WIDE_INT bitsize, bitpos;
9413 tree offset;
9414 enum machine_mode mode;
9415 int unsignedp, volatilep;
9417 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9418 &mode, &unsignedp, &volatilep, false);
9419 *residue = bitpos / BITS_PER_UNIT;
9420 if (offset)
9422 if (TREE_CODE (offset) == INTEGER_CST)
9423 *residue += TREE_INT_CST_LOW (offset);
9424 else
9425 /* We don't handle more complicated offset expressions. */
9426 return 1;
9430 if (DECL_P (expr))
9431 return DECL_ALIGN_UNIT (expr);
9433 else if (code == POINTER_PLUS_EXPR)
9435 tree op0, op1;
9436 unsigned HOST_WIDE_INT modulus;
9437 enum tree_code inner_code;
9439 op0 = TREE_OPERAND (expr, 0);
9440 STRIP_NOPS (op0);
9441 modulus = get_pointer_modulus_and_residue (op0, residue);
9443 op1 = TREE_OPERAND (expr, 1);
9444 STRIP_NOPS (op1);
9445 inner_code = TREE_CODE (op1);
9446 if (inner_code == INTEGER_CST)
9448 *residue += TREE_INT_CST_LOW (op1);
9449 return modulus;
9451 else if (inner_code == MULT_EXPR)
9453 op1 = TREE_OPERAND (op1, 1);
9454 if (TREE_CODE (op1) == INTEGER_CST)
9456 unsigned HOST_WIDE_INT align;
9458 /* Compute the greatest power-of-2 divisor of op1. */
9459 align = TREE_INT_CST_LOW (op1);
9460 align &= -align;
9462 /* If align is non-zero and less than *modulus, replace
9463 *modulus with align., If align is 0, then either op1 is 0
9464 or the greatest power-of-2 divisor of op1 doesn't fit in an
9465 unsigned HOST_WIDE_INT. In either case, no additional
9466 constraint is imposed. */
9467 if (align)
9468 modulus = MIN (modulus, align);
9470 return modulus;
9475 /* If we get here, we were unable to determine anything useful about the
9476 expression. */
9477 return 1;
9481 /* Fold a binary expression of code CODE and type TYPE with operands
9482 OP0 and OP1. Return the folded expression if folding is
9483 successful. Otherwise, return NULL_TREE. */
9485 tree
9486 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9488 enum tree_code_class kind = TREE_CODE_CLASS (code);
9489 tree arg0, arg1, tem;
9490 tree t1 = NULL_TREE;
9491 bool strict_overflow_p;
9493 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9494 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9495 && TREE_CODE_LENGTH (code) == 2
9496 && op0 != NULL_TREE
9497 && op1 != NULL_TREE);
9499 arg0 = op0;
9500 arg1 = op1;
9502 /* Strip any conversions that don't change the mode. This is
9503 safe for every expression, except for a comparison expression
9504 because its signedness is derived from its operands. So, in
9505 the latter case, only strip conversions that don't change the
9506 signedness.
9508 Note that this is done as an internal manipulation within the
9509 constant folder, in order to find the simplest representation
9510 of the arguments so that their form can be studied. In any
9511 cases, the appropriate type conversions should be put back in
9512 the tree that will get out of the constant folder. */
9514 if (kind == tcc_comparison)
9516 STRIP_SIGN_NOPS (arg0);
9517 STRIP_SIGN_NOPS (arg1);
9519 else
9521 STRIP_NOPS (arg0);
9522 STRIP_NOPS (arg1);
9525 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9526 constant but we can't do arithmetic on them. */
9527 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9528 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9529 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9530 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9531 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9532 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9534 if (kind == tcc_binary)
9536 /* Make sure type and arg0 have the same saturating flag. */
9537 gcc_assert (TYPE_SATURATING (type)
9538 == TYPE_SATURATING (TREE_TYPE (arg0)));
9539 tem = const_binop (code, arg0, arg1, 0);
9541 else if (kind == tcc_comparison)
9542 tem = fold_relational_const (code, type, arg0, arg1);
9543 else
9544 tem = NULL_TREE;
9546 if (tem != NULL_TREE)
9548 if (TREE_TYPE (tem) != type)
9549 tem = fold_convert (type, tem);
9550 return tem;
9554 /* If this is a commutative operation, and ARG0 is a constant, move it
9555 to ARG1 to reduce the number of tests below. */
9556 if (commutative_tree_code (code)
9557 && tree_swap_operands_p (arg0, arg1, true))
9558 return fold_build2 (code, type, op1, op0);
9560 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9562 First check for cases where an arithmetic operation is applied to a
9563 compound, conditional, or comparison operation. Push the arithmetic
9564 operation inside the compound or conditional to see if any folding
9565 can then be done. Convert comparison to conditional for this purpose.
9566 The also optimizes non-constant cases that used to be done in
9567 expand_expr.
9569 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9570 one of the operands is a comparison and the other is a comparison, a
9571 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9572 code below would make the expression more complex. Change it to a
9573 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9574 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9576 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9577 || code == EQ_EXPR || code == NE_EXPR)
9578 && ((truth_value_p (TREE_CODE (arg0))
9579 && (truth_value_p (TREE_CODE (arg1))
9580 || (TREE_CODE (arg1) == BIT_AND_EXPR
9581 && integer_onep (TREE_OPERAND (arg1, 1)))))
9582 || (truth_value_p (TREE_CODE (arg1))
9583 && (truth_value_p (TREE_CODE (arg0))
9584 || (TREE_CODE (arg0) == BIT_AND_EXPR
9585 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9587 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9588 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9589 : TRUTH_XOR_EXPR,
9590 boolean_type_node,
9591 fold_convert (boolean_type_node, arg0),
9592 fold_convert (boolean_type_node, arg1));
9594 if (code == EQ_EXPR)
9595 tem = invert_truthvalue (tem);
9597 return fold_convert (type, tem);
9600 if (TREE_CODE_CLASS (code) == tcc_binary
9601 || TREE_CODE_CLASS (code) == tcc_comparison)
9603 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9604 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9605 fold_build2 (code, type,
9606 TREE_OPERAND (arg0, 1), op1));
9607 if (TREE_CODE (arg1) == COMPOUND_EXPR
9608 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9609 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9610 fold_build2 (code, type,
9611 op0, TREE_OPERAND (arg1, 1)));
9613 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9615 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9616 arg0, arg1,
9617 /*cond_first_p=*/1);
9618 if (tem != NULL_TREE)
9619 return tem;
9622 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9624 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9625 arg1, arg0,
9626 /*cond_first_p=*/0);
9627 if (tem != NULL_TREE)
9628 return tem;
9632 switch (code)
9634 case POINTER_PLUS_EXPR:
9635 /* 0 +p index -> (type)index */
9636 if (integer_zerop (arg0))
9637 return non_lvalue (fold_convert (type, arg1));
9639 /* PTR +p 0 -> PTR */
9640 if (integer_zerop (arg1))
9641 return non_lvalue (fold_convert (type, arg0));
9643 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9644 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9645 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9646 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9647 fold_convert (sizetype, arg1),
9648 fold_convert (sizetype, arg0)));
9650 /* index +p PTR -> PTR +p index */
9651 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9652 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9653 return fold_build2 (POINTER_PLUS_EXPR, type,
9654 fold_convert (type, arg1),
9655 fold_convert (sizetype, arg0));
9657 /* (PTR +p B) +p A -> PTR +p (B + A) */
9658 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9660 tree inner;
9661 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9662 tree arg00 = TREE_OPERAND (arg0, 0);
9663 inner = fold_build2 (PLUS_EXPR, sizetype,
9664 arg01, fold_convert (sizetype, arg1));
9665 return fold_convert (type,
9666 fold_build2 (POINTER_PLUS_EXPR,
9667 TREE_TYPE (arg00), arg00, inner));
9670 /* PTR_CST +p CST -> CST1 */
9671 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9672 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9674 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9675 of the array. Loop optimizer sometimes produce this type of
9676 expressions. */
9677 if (TREE_CODE (arg0) == ADDR_EXPR)
9679 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9680 if (tem)
9681 return fold_convert (type, tem);
9684 return NULL_TREE;
9686 case PLUS_EXPR:
9687 /* PTR + INT -> (INT)(PTR p+ INT) */
9688 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9689 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9690 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9691 TREE_TYPE (arg0),
9692 arg0,
9693 fold_convert (sizetype, arg1)));
9694 /* INT + PTR -> (INT)(PTR p+ INT) */
9695 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9696 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9697 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9698 TREE_TYPE (arg1),
9699 arg1,
9700 fold_convert (sizetype, arg0)));
9701 /* A + (-B) -> A - B */
9702 if (TREE_CODE (arg1) == NEGATE_EXPR)
9703 return fold_build2 (MINUS_EXPR, type,
9704 fold_convert (type, arg0),
9705 fold_convert (type, TREE_OPERAND (arg1, 0)));
9706 /* (-A) + B -> B - A */
9707 if (TREE_CODE (arg0) == NEGATE_EXPR
9708 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9709 return fold_build2 (MINUS_EXPR, type,
9710 fold_convert (type, arg1),
9711 fold_convert (type, TREE_OPERAND (arg0, 0)));
9713 if (INTEGRAL_TYPE_P (type))
9715 /* Convert ~A + 1 to -A. */
9716 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9717 && integer_onep (arg1))
9718 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9720 /* ~X + X is -1. */
9721 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9722 && !TYPE_OVERFLOW_TRAPS (type))
9724 tree tem = TREE_OPERAND (arg0, 0);
9726 STRIP_NOPS (tem);
9727 if (operand_equal_p (tem, arg1, 0))
9729 t1 = build_int_cst_type (type, -1);
9730 return omit_one_operand (type, t1, arg1);
9734 /* X + ~X is -1. */
9735 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9736 && !TYPE_OVERFLOW_TRAPS (type))
9738 tree tem = TREE_OPERAND (arg1, 0);
9740 STRIP_NOPS (tem);
9741 if (operand_equal_p (arg0, tem, 0))
9743 t1 = build_int_cst_type (type, -1);
9744 return omit_one_operand (type, t1, arg0);
9749 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9750 same or one. Make sure type is not saturating.
9751 fold_plusminus_mult_expr will re-associate. */
9752 if ((TREE_CODE (arg0) == MULT_EXPR
9753 || TREE_CODE (arg1) == MULT_EXPR)
9754 && !TYPE_SATURATING (type)
9755 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9757 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9758 if (tem)
9759 return tem;
9762 if (! FLOAT_TYPE_P (type))
9764 if (integer_zerop (arg1))
9765 return non_lvalue (fold_convert (type, arg0));
9767 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9768 with a constant, and the two constants have no bits in common,
9769 we should treat this as a BIT_IOR_EXPR since this may produce more
9770 simplifications. */
9771 if (TREE_CODE (arg0) == BIT_AND_EXPR
9772 && TREE_CODE (arg1) == BIT_AND_EXPR
9773 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9774 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9775 && integer_zerop (const_binop (BIT_AND_EXPR,
9776 TREE_OPERAND (arg0, 1),
9777 TREE_OPERAND (arg1, 1), 0)))
9779 code = BIT_IOR_EXPR;
9780 goto bit_ior;
9783 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9784 (plus (plus (mult) (mult)) (foo)) so that we can
9785 take advantage of the factoring cases below. */
9786 if (((TREE_CODE (arg0) == PLUS_EXPR
9787 || TREE_CODE (arg0) == MINUS_EXPR)
9788 && TREE_CODE (arg1) == MULT_EXPR)
9789 || ((TREE_CODE (arg1) == PLUS_EXPR
9790 || TREE_CODE (arg1) == MINUS_EXPR)
9791 && TREE_CODE (arg0) == MULT_EXPR))
9793 tree parg0, parg1, parg, marg;
9794 enum tree_code pcode;
9796 if (TREE_CODE (arg1) == MULT_EXPR)
9797 parg = arg0, marg = arg1;
9798 else
9799 parg = arg1, marg = arg0;
9800 pcode = TREE_CODE (parg);
9801 parg0 = TREE_OPERAND (parg, 0);
9802 parg1 = TREE_OPERAND (parg, 1);
9803 STRIP_NOPS (parg0);
9804 STRIP_NOPS (parg1);
9806 if (TREE_CODE (parg0) == MULT_EXPR
9807 && TREE_CODE (parg1) != MULT_EXPR)
9808 return fold_build2 (pcode, type,
9809 fold_build2 (PLUS_EXPR, type,
9810 fold_convert (type, parg0),
9811 fold_convert (type, marg)),
9812 fold_convert (type, parg1));
9813 if (TREE_CODE (parg0) != MULT_EXPR
9814 && TREE_CODE (parg1) == MULT_EXPR)
9815 return fold_build2 (PLUS_EXPR, type,
9816 fold_convert (type, parg0),
9817 fold_build2 (pcode, type,
9818 fold_convert (type, marg),
9819 fold_convert (type,
9820 parg1)));
9823 else
9825 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9826 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9827 return non_lvalue (fold_convert (type, arg0));
9829 /* Likewise if the operands are reversed. */
9830 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9831 return non_lvalue (fold_convert (type, arg1));
9833 /* Convert X + -C into X - C. */
9834 if (TREE_CODE (arg1) == REAL_CST
9835 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9837 tem = fold_negate_const (arg1, type);
9838 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9839 return fold_build2 (MINUS_EXPR, type,
9840 fold_convert (type, arg0),
9841 fold_convert (type, tem));
9844 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9845 to __complex__ ( x, y ). This is not the same for SNaNs or
9846 if signed zeros are involved. */
9847 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9848 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9849 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9851 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9852 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9853 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9854 bool arg0rz = false, arg0iz = false;
9855 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9856 || (arg0i && (arg0iz = real_zerop (arg0i))))
9858 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9859 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9860 if (arg0rz && arg1i && real_zerop (arg1i))
9862 tree rp = arg1r ? arg1r
9863 : build1 (REALPART_EXPR, rtype, arg1);
9864 tree ip = arg0i ? arg0i
9865 : build1 (IMAGPART_EXPR, rtype, arg0);
9866 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9868 else if (arg0iz && arg1r && real_zerop (arg1r))
9870 tree rp = arg0r ? arg0r
9871 : build1 (REALPART_EXPR, rtype, arg0);
9872 tree ip = arg1i ? arg1i
9873 : build1 (IMAGPART_EXPR, rtype, arg1);
9874 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9879 if (flag_unsafe_math_optimizations
9880 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9881 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9882 && (tem = distribute_real_division (code, type, arg0, arg1)))
9883 return tem;
9885 /* Convert x+x into x*2.0. */
9886 if (operand_equal_p (arg0, arg1, 0)
9887 && SCALAR_FLOAT_TYPE_P (type))
9888 return fold_build2 (MULT_EXPR, type, arg0,
9889 build_real (type, dconst2));
9891 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9892 We associate floats only if the user has specified
9893 -fassociative-math. */
9894 if (flag_associative_math
9895 && TREE_CODE (arg1) == PLUS_EXPR
9896 && TREE_CODE (arg0) != MULT_EXPR)
9898 tree tree10 = TREE_OPERAND (arg1, 0);
9899 tree tree11 = TREE_OPERAND (arg1, 1);
9900 if (TREE_CODE (tree11) == MULT_EXPR
9901 && TREE_CODE (tree10) == MULT_EXPR)
9903 tree tree0;
9904 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9905 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9908 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9909 We associate floats only if the user has specified
9910 -fassociative-math. */
9911 if (flag_associative_math
9912 && TREE_CODE (arg0) == PLUS_EXPR
9913 && TREE_CODE (arg1) != MULT_EXPR)
9915 tree tree00 = TREE_OPERAND (arg0, 0);
9916 tree tree01 = TREE_OPERAND (arg0, 1);
9917 if (TREE_CODE (tree01) == MULT_EXPR
9918 && TREE_CODE (tree00) == MULT_EXPR)
9920 tree tree0;
9921 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9922 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9927 bit_rotate:
9928 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9929 is a rotate of A by C1 bits. */
9930 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9931 is a rotate of A by B bits. */
9933 enum tree_code code0, code1;
9934 code0 = TREE_CODE (arg0);
9935 code1 = TREE_CODE (arg1);
9936 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9937 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9938 && operand_equal_p (TREE_OPERAND (arg0, 0),
9939 TREE_OPERAND (arg1, 0), 0)
9940 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9942 tree tree01, tree11;
9943 enum tree_code code01, code11;
9945 tree01 = TREE_OPERAND (arg0, 1);
9946 tree11 = TREE_OPERAND (arg1, 1);
9947 STRIP_NOPS (tree01);
9948 STRIP_NOPS (tree11);
9949 code01 = TREE_CODE (tree01);
9950 code11 = TREE_CODE (tree11);
9951 if (code01 == INTEGER_CST
9952 && code11 == INTEGER_CST
9953 && TREE_INT_CST_HIGH (tree01) == 0
9954 && TREE_INT_CST_HIGH (tree11) == 0
9955 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9956 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9957 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9958 code0 == LSHIFT_EXPR ? tree01 : tree11);
9959 else if (code11 == MINUS_EXPR)
9961 tree tree110, tree111;
9962 tree110 = TREE_OPERAND (tree11, 0);
9963 tree111 = TREE_OPERAND (tree11, 1);
9964 STRIP_NOPS (tree110);
9965 STRIP_NOPS (tree111);
9966 if (TREE_CODE (tree110) == INTEGER_CST
9967 && 0 == compare_tree_int (tree110,
9968 TYPE_PRECISION
9969 (TREE_TYPE (TREE_OPERAND
9970 (arg0, 0))))
9971 && operand_equal_p (tree01, tree111, 0))
9972 return build2 ((code0 == LSHIFT_EXPR
9973 ? LROTATE_EXPR
9974 : RROTATE_EXPR),
9975 type, TREE_OPERAND (arg0, 0), tree01);
9977 else if (code01 == MINUS_EXPR)
9979 tree tree010, tree011;
9980 tree010 = TREE_OPERAND (tree01, 0);
9981 tree011 = TREE_OPERAND (tree01, 1);
9982 STRIP_NOPS (tree010);
9983 STRIP_NOPS (tree011);
9984 if (TREE_CODE (tree010) == INTEGER_CST
9985 && 0 == compare_tree_int (tree010,
9986 TYPE_PRECISION
9987 (TREE_TYPE (TREE_OPERAND
9988 (arg0, 0))))
9989 && operand_equal_p (tree11, tree011, 0))
9990 return build2 ((code0 != LSHIFT_EXPR
9991 ? LROTATE_EXPR
9992 : RROTATE_EXPR),
9993 type, TREE_OPERAND (arg0, 0), tree11);
9998 associate:
9999 /* In most languages, can't associate operations on floats through
10000 parentheses. Rather than remember where the parentheses were, we
10001 don't associate floats at all, unless the user has specified
10002 -fassociative-math.
10003 And, we need to make sure type is not saturating. */
10005 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10006 && !TYPE_SATURATING (type))
10008 tree var0, con0, lit0, minus_lit0;
10009 tree var1, con1, lit1, minus_lit1;
10010 bool ok = true;
10012 /* Split both trees into variables, constants, and literals. Then
10013 associate each group together, the constants with literals,
10014 then the result with variables. This increases the chances of
10015 literals being recombined later and of generating relocatable
10016 expressions for the sum of a constant and literal. */
10017 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10018 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10019 code == MINUS_EXPR);
10021 /* With undefined overflow we can only associate constants
10022 with one variable. */
10023 if ((POINTER_TYPE_P (type)
10024 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10025 && var0 && var1)
10027 tree tmp0 = var0;
10028 tree tmp1 = var1;
10030 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10031 tmp0 = TREE_OPERAND (tmp0, 0);
10032 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10033 tmp1 = TREE_OPERAND (tmp1, 0);
10034 /* The only case we can still associate with two variables
10035 is if they are the same, modulo negation. */
10036 if (!operand_equal_p (tmp0, tmp1, 0))
10037 ok = false;
10040 /* Only do something if we found more than two objects. Otherwise,
10041 nothing has changed and we risk infinite recursion. */
10042 if (ok
10043 && (2 < ((var0 != 0) + (var1 != 0)
10044 + (con0 != 0) + (con1 != 0)
10045 + (lit0 != 0) + (lit1 != 0)
10046 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10048 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10049 if (code == MINUS_EXPR)
10050 code = PLUS_EXPR;
10052 var0 = associate_trees (var0, var1, code, type);
10053 con0 = associate_trees (con0, con1, code, type);
10054 lit0 = associate_trees (lit0, lit1, code, type);
10055 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10057 /* Preserve the MINUS_EXPR if the negative part of the literal is
10058 greater than the positive part. Otherwise, the multiplicative
10059 folding code (i.e extract_muldiv) may be fooled in case
10060 unsigned constants are subtracted, like in the following
10061 example: ((X*2 + 4) - 8U)/2. */
10062 if (minus_lit0 && lit0)
10064 if (TREE_CODE (lit0) == INTEGER_CST
10065 && TREE_CODE (minus_lit0) == INTEGER_CST
10066 && tree_int_cst_lt (lit0, minus_lit0))
10068 minus_lit0 = associate_trees (minus_lit0, lit0,
10069 MINUS_EXPR, type);
10070 lit0 = 0;
10072 else
10074 lit0 = associate_trees (lit0, minus_lit0,
10075 MINUS_EXPR, type);
10076 minus_lit0 = 0;
10079 if (minus_lit0)
10081 if (con0 == 0)
10082 return fold_convert (type,
10083 associate_trees (var0, minus_lit0,
10084 MINUS_EXPR, type));
10085 else
10087 con0 = associate_trees (con0, minus_lit0,
10088 MINUS_EXPR, type);
10089 return fold_convert (type,
10090 associate_trees (var0, con0,
10091 PLUS_EXPR, type));
10095 con0 = associate_trees (con0, lit0, code, type);
10096 return fold_convert (type, associate_trees (var0, con0,
10097 code, type));
10101 return NULL_TREE;
10103 case MINUS_EXPR:
10104 /* Pointer simplifications for subtraction, simple reassociations. */
10105 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10107 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10108 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10109 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10111 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10112 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10113 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10114 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10115 return fold_build2 (PLUS_EXPR, type,
10116 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10117 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10119 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10120 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10122 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10123 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10124 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10125 if (tmp)
10126 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10129 /* A - (-B) -> A + B */
10130 if (TREE_CODE (arg1) == NEGATE_EXPR)
10131 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
10132 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10133 if (TREE_CODE (arg0) == NEGATE_EXPR
10134 && (FLOAT_TYPE_P (type)
10135 || INTEGRAL_TYPE_P (type))
10136 && negate_expr_p (arg1)
10137 && reorder_operands_p (arg0, arg1))
10138 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
10139 TREE_OPERAND (arg0, 0));
10140 /* Convert -A - 1 to ~A. */
10141 if (INTEGRAL_TYPE_P (type)
10142 && TREE_CODE (arg0) == NEGATE_EXPR
10143 && integer_onep (arg1)
10144 && !TYPE_OVERFLOW_TRAPS (type))
10145 return fold_build1 (BIT_NOT_EXPR, type,
10146 fold_convert (type, TREE_OPERAND (arg0, 0)));
10148 /* Convert -1 - A to ~A. */
10149 if (INTEGRAL_TYPE_P (type)
10150 && integer_all_onesp (arg0))
10151 return fold_build1 (BIT_NOT_EXPR, type, op1);
10153 if (! FLOAT_TYPE_P (type))
10155 if (integer_zerop (arg0))
10156 return negate_expr (fold_convert (type, arg1));
10157 if (integer_zerop (arg1))
10158 return non_lvalue (fold_convert (type, arg0));
10160 /* Fold A - (A & B) into ~B & A. */
10161 if (!TREE_SIDE_EFFECTS (arg0)
10162 && TREE_CODE (arg1) == BIT_AND_EXPR)
10164 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10166 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10167 return fold_build2 (BIT_AND_EXPR, type,
10168 fold_build1 (BIT_NOT_EXPR, type, arg10),
10169 fold_convert (type, arg0));
10171 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10173 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10174 return fold_build2 (BIT_AND_EXPR, type,
10175 fold_build1 (BIT_NOT_EXPR, type, arg11),
10176 fold_convert (type, arg0));
10180 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10181 any power of 2 minus 1. */
10182 if (TREE_CODE (arg0) == BIT_AND_EXPR
10183 && TREE_CODE (arg1) == BIT_AND_EXPR
10184 && operand_equal_p (TREE_OPERAND (arg0, 0),
10185 TREE_OPERAND (arg1, 0), 0))
10187 tree mask0 = TREE_OPERAND (arg0, 1);
10188 tree mask1 = TREE_OPERAND (arg1, 1);
10189 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10191 if (operand_equal_p (tem, mask1, 0))
10193 tem = fold_build2 (BIT_XOR_EXPR, type,
10194 TREE_OPERAND (arg0, 0), mask1);
10195 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10200 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10201 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10202 return non_lvalue (fold_convert (type, arg0));
10204 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10205 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10206 (-ARG1 + ARG0) reduces to -ARG1. */
10207 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10208 return negate_expr (fold_convert (type, arg1));
10210 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10211 __complex__ ( x, -y ). This is not the same for SNaNs or if
10212 signed zeros are involved. */
10213 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10214 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10215 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10217 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10218 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10219 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10220 bool arg0rz = false, arg0iz = false;
10221 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10222 || (arg0i && (arg0iz = real_zerop (arg0i))))
10224 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10225 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10226 if (arg0rz && arg1i && real_zerop (arg1i))
10228 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10229 arg1r ? arg1r
10230 : build1 (REALPART_EXPR, rtype, arg1));
10231 tree ip = arg0i ? arg0i
10232 : build1 (IMAGPART_EXPR, rtype, arg0);
10233 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10235 else if (arg0iz && arg1r && real_zerop (arg1r))
10237 tree rp = arg0r ? arg0r
10238 : build1 (REALPART_EXPR, rtype, arg0);
10239 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10240 arg1i ? arg1i
10241 : build1 (IMAGPART_EXPR, rtype, arg1));
10242 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10247 /* Fold &x - &x. This can happen from &x.foo - &x.
10248 This is unsafe for certain floats even in non-IEEE formats.
10249 In IEEE, it is unsafe because it does wrong for NaNs.
10250 Also note that operand_equal_p is always false if an operand
10251 is volatile. */
10253 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10254 && operand_equal_p (arg0, arg1, 0))
10255 return fold_convert (type, integer_zero_node);
10257 /* A - B -> A + (-B) if B is easily negatable. */
10258 if (negate_expr_p (arg1)
10259 && ((FLOAT_TYPE_P (type)
10260 /* Avoid this transformation if B is a positive REAL_CST. */
10261 && (TREE_CODE (arg1) != REAL_CST
10262 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10263 || INTEGRAL_TYPE_P (type)))
10264 return fold_build2 (PLUS_EXPR, type,
10265 fold_convert (type, arg0),
10266 fold_convert (type, negate_expr (arg1)));
10268 /* Try folding difference of addresses. */
10270 HOST_WIDE_INT diff;
10272 if ((TREE_CODE (arg0) == ADDR_EXPR
10273 || TREE_CODE (arg1) == ADDR_EXPR)
10274 && ptr_difference_const (arg0, arg1, &diff))
10275 return build_int_cst_type (type, diff);
10278 /* Fold &a[i] - &a[j] to i-j. */
10279 if (TREE_CODE (arg0) == ADDR_EXPR
10280 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10281 && TREE_CODE (arg1) == ADDR_EXPR
10282 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10284 tree aref0 = TREE_OPERAND (arg0, 0);
10285 tree aref1 = TREE_OPERAND (arg1, 0);
10286 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10287 TREE_OPERAND (aref1, 0), 0))
10289 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10290 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10291 tree esz = array_ref_element_size (aref0);
10292 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10293 return fold_build2 (MULT_EXPR, type, diff,
10294 fold_convert (type, esz));
10299 if (flag_unsafe_math_optimizations
10300 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10301 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10302 && (tem = distribute_real_division (code, type, arg0, arg1)))
10303 return tem;
10305 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10306 same or one. Make sure type is not saturating.
10307 fold_plusminus_mult_expr will re-associate. */
10308 if ((TREE_CODE (arg0) == MULT_EXPR
10309 || TREE_CODE (arg1) == MULT_EXPR)
10310 && !TYPE_SATURATING (type)
10311 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10313 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10314 if (tem)
10315 return tem;
10318 goto associate;
10320 case MULT_EXPR:
10321 /* (-A) * (-B) -> A * B */
10322 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10323 return fold_build2 (MULT_EXPR, type,
10324 fold_convert (type, TREE_OPERAND (arg0, 0)),
10325 fold_convert (type, negate_expr (arg1)));
10326 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10327 return fold_build2 (MULT_EXPR, type,
10328 fold_convert (type, negate_expr (arg0)),
10329 fold_convert (type, TREE_OPERAND (arg1, 0)));
10331 if (! FLOAT_TYPE_P (type))
10333 if (integer_zerop (arg1))
10334 return omit_one_operand (type, arg1, arg0);
10335 if (integer_onep (arg1))
10336 return non_lvalue (fold_convert (type, arg0));
10337 /* Transform x * -1 into -x. Make sure to do the negation
10338 on the original operand with conversions not stripped
10339 because we can only strip non-sign-changing conversions. */
10340 if (integer_all_onesp (arg1))
10341 return fold_convert (type, negate_expr (op0));
10342 /* Transform x * -C into -x * C if x is easily negatable. */
10343 if (TREE_CODE (arg1) == INTEGER_CST
10344 && tree_int_cst_sgn (arg1) == -1
10345 && negate_expr_p (arg0)
10346 && (tem = negate_expr (arg1)) != arg1
10347 && !TREE_OVERFLOW (tem))
10348 return fold_build2 (MULT_EXPR, type,
10349 negate_expr (arg0), tem);
10351 /* (a * (1 << b)) is (a << b) */
10352 if (TREE_CODE (arg1) == LSHIFT_EXPR
10353 && integer_onep (TREE_OPERAND (arg1, 0)))
10354 return fold_build2 (LSHIFT_EXPR, type, arg0,
10355 TREE_OPERAND (arg1, 1));
10356 if (TREE_CODE (arg0) == LSHIFT_EXPR
10357 && integer_onep (TREE_OPERAND (arg0, 0)))
10358 return fold_build2 (LSHIFT_EXPR, type, arg1,
10359 TREE_OPERAND (arg0, 1));
10361 strict_overflow_p = false;
10362 if (TREE_CODE (arg1) == INTEGER_CST
10363 && 0 != (tem = extract_muldiv (op0,
10364 fold_convert (type, arg1),
10365 code, NULL_TREE,
10366 &strict_overflow_p)))
10368 if (strict_overflow_p)
10369 fold_overflow_warning (("assuming signed overflow does not "
10370 "occur when simplifying "
10371 "multiplication"),
10372 WARN_STRICT_OVERFLOW_MISC);
10373 return fold_convert (type, tem);
10376 /* Optimize z * conj(z) for integer complex numbers. */
10377 if (TREE_CODE (arg0) == CONJ_EXPR
10378 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10379 return fold_mult_zconjz (type, arg1);
10380 if (TREE_CODE (arg1) == CONJ_EXPR
10381 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10382 return fold_mult_zconjz (type, arg0);
10384 else
10386 /* Maybe fold x * 0 to 0. The expressions aren't the same
10387 when x is NaN, since x * 0 is also NaN. Nor are they the
10388 same in modes with signed zeros, since multiplying a
10389 negative value by 0 gives -0, not +0. */
10390 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10391 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10392 && real_zerop (arg1))
10393 return omit_one_operand (type, arg1, arg0);
10394 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10395 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10396 && real_onep (arg1))
10397 return non_lvalue (fold_convert (type, arg0));
10399 /* Transform x * -1.0 into -x. */
10400 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10401 && real_minus_onep (arg1))
10402 return fold_convert (type, negate_expr (arg0));
10404 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10405 the result for floating point types due to rounding so it is applied
10406 only if -fassociative-math was specify. */
10407 if (flag_associative_math
10408 && TREE_CODE (arg0) == RDIV_EXPR
10409 && TREE_CODE (arg1) == REAL_CST
10410 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10412 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10413 arg1, 0);
10414 if (tem)
10415 return fold_build2 (RDIV_EXPR, type, tem,
10416 TREE_OPERAND (arg0, 1));
10419 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10420 if (operand_equal_p (arg0, arg1, 0))
10422 tree tem = fold_strip_sign_ops (arg0);
10423 if (tem != NULL_TREE)
10425 tem = fold_convert (type, tem);
10426 return fold_build2 (MULT_EXPR, type, tem, tem);
10430 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10431 This is not the same for NaNs or if signed zeros are
10432 involved. */
10433 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10434 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10435 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10436 && TREE_CODE (arg1) == COMPLEX_CST
10437 && real_zerop (TREE_REALPART (arg1)))
10439 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10440 if (real_onep (TREE_IMAGPART (arg1)))
10441 return fold_build2 (COMPLEX_EXPR, type,
10442 negate_expr (fold_build1 (IMAGPART_EXPR,
10443 rtype, arg0)),
10444 fold_build1 (REALPART_EXPR, rtype, arg0));
10445 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10446 return fold_build2 (COMPLEX_EXPR, type,
10447 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10448 negate_expr (fold_build1 (REALPART_EXPR,
10449 rtype, arg0)));
10452 /* Optimize z * conj(z) for floating point complex numbers.
10453 Guarded by flag_unsafe_math_optimizations as non-finite
10454 imaginary components don't produce scalar results. */
10455 if (flag_unsafe_math_optimizations
10456 && TREE_CODE (arg0) == CONJ_EXPR
10457 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10458 return fold_mult_zconjz (type, arg1);
10459 if (flag_unsafe_math_optimizations
10460 && TREE_CODE (arg1) == CONJ_EXPR
10461 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10462 return fold_mult_zconjz (type, arg0);
10464 if (flag_unsafe_math_optimizations)
10466 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10467 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10469 /* Optimizations of root(...)*root(...). */
10470 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10472 tree rootfn, arg;
10473 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10474 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10476 /* Optimize sqrt(x)*sqrt(x) as x. */
10477 if (BUILTIN_SQRT_P (fcode0)
10478 && operand_equal_p (arg00, arg10, 0)
10479 && ! HONOR_SNANS (TYPE_MODE (type)))
10480 return arg00;
10482 /* Optimize root(x)*root(y) as root(x*y). */
10483 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10484 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10485 return build_call_expr (rootfn, 1, arg);
10488 /* Optimize expN(x)*expN(y) as expN(x+y). */
10489 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10491 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10492 tree arg = fold_build2 (PLUS_EXPR, type,
10493 CALL_EXPR_ARG (arg0, 0),
10494 CALL_EXPR_ARG (arg1, 0));
10495 return build_call_expr (expfn, 1, arg);
10498 /* Optimizations of pow(...)*pow(...). */
10499 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10500 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10501 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10503 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10504 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10505 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10506 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10508 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10509 if (operand_equal_p (arg01, arg11, 0))
10511 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10512 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10513 return build_call_expr (powfn, 2, arg, arg01);
10516 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10517 if (operand_equal_p (arg00, arg10, 0))
10519 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10520 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10521 return build_call_expr (powfn, 2, arg00, arg);
10525 /* Optimize tan(x)*cos(x) as sin(x). */
10526 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10527 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10528 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10529 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10530 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10531 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10532 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10533 CALL_EXPR_ARG (arg1, 0), 0))
10535 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10537 if (sinfn != NULL_TREE)
10538 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10541 /* Optimize x*pow(x,c) as pow(x,c+1). */
10542 if (fcode1 == BUILT_IN_POW
10543 || fcode1 == BUILT_IN_POWF
10544 || fcode1 == BUILT_IN_POWL)
10546 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10547 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10548 if (TREE_CODE (arg11) == REAL_CST
10549 && !TREE_OVERFLOW (arg11)
10550 && operand_equal_p (arg0, arg10, 0))
10552 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10553 REAL_VALUE_TYPE c;
10554 tree arg;
10556 c = TREE_REAL_CST (arg11);
10557 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10558 arg = build_real (type, c);
10559 return build_call_expr (powfn, 2, arg0, arg);
10563 /* Optimize pow(x,c)*x as pow(x,c+1). */
10564 if (fcode0 == BUILT_IN_POW
10565 || fcode0 == BUILT_IN_POWF
10566 || fcode0 == BUILT_IN_POWL)
10568 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10569 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10570 if (TREE_CODE (arg01) == REAL_CST
10571 && !TREE_OVERFLOW (arg01)
10572 && operand_equal_p (arg1, arg00, 0))
10574 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10575 REAL_VALUE_TYPE c;
10576 tree arg;
10578 c = TREE_REAL_CST (arg01);
10579 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10580 arg = build_real (type, c);
10581 return build_call_expr (powfn, 2, arg1, arg);
10585 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10586 if (! optimize_size
10587 && operand_equal_p (arg0, arg1, 0))
10589 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10591 if (powfn)
10593 tree arg = build_real (type, dconst2);
10594 return build_call_expr (powfn, 2, arg0, arg);
10599 goto associate;
10601 case BIT_IOR_EXPR:
10602 bit_ior:
10603 if (integer_all_onesp (arg1))
10604 return omit_one_operand (type, arg1, arg0);
10605 if (integer_zerop (arg1))
10606 return non_lvalue (fold_convert (type, arg0));
10607 if (operand_equal_p (arg0, arg1, 0))
10608 return non_lvalue (fold_convert (type, arg0));
10610 /* ~X | X is -1. */
10611 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10612 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10614 t1 = fold_convert (type, integer_zero_node);
10615 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10616 return omit_one_operand (type, t1, arg1);
10619 /* X | ~X is -1. */
10620 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10621 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10623 t1 = fold_convert (type, integer_zero_node);
10624 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10625 return omit_one_operand (type, t1, arg0);
10628 /* Canonicalize (X & C1) | C2. */
10629 if (TREE_CODE (arg0) == BIT_AND_EXPR
10630 && TREE_CODE (arg1) == INTEGER_CST
10631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10633 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10634 int width = TYPE_PRECISION (type);
10635 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10636 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10637 hi2 = TREE_INT_CST_HIGH (arg1);
10638 lo2 = TREE_INT_CST_LOW (arg1);
10640 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10641 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10642 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10644 if (width > HOST_BITS_PER_WIDE_INT)
10646 mhi = (unsigned HOST_WIDE_INT) -1
10647 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10648 mlo = -1;
10650 else
10652 mhi = 0;
10653 mlo = (unsigned HOST_WIDE_INT) -1
10654 >> (HOST_BITS_PER_WIDE_INT - width);
10657 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10658 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10659 return fold_build2 (BIT_IOR_EXPR, type,
10660 TREE_OPERAND (arg0, 0), arg1);
10662 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10663 hi1 &= mhi;
10664 lo1 &= mlo;
10665 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10666 return fold_build2 (BIT_IOR_EXPR, type,
10667 fold_build2 (BIT_AND_EXPR, type,
10668 TREE_OPERAND (arg0, 0),
10669 build_int_cst_wide (type,
10670 lo1 & ~lo2,
10671 hi1 & ~hi2)),
10672 arg1);
10675 /* (X & Y) | Y is (X, Y). */
10676 if (TREE_CODE (arg0) == BIT_AND_EXPR
10677 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10678 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10679 /* (X & Y) | X is (Y, X). */
10680 if (TREE_CODE (arg0) == BIT_AND_EXPR
10681 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10682 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10683 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10684 /* X | (X & Y) is (Y, X). */
10685 if (TREE_CODE (arg1) == BIT_AND_EXPR
10686 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10687 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10688 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10689 /* X | (Y & X) is (Y, X). */
10690 if (TREE_CODE (arg1) == BIT_AND_EXPR
10691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10692 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10693 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10695 t1 = distribute_bit_expr (code, type, arg0, arg1);
10696 if (t1 != NULL_TREE)
10697 return t1;
10699 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10701 This results in more efficient code for machines without a NAND
10702 instruction. Combine will canonicalize to the first form
10703 which will allow use of NAND instructions provided by the
10704 backend if they exist. */
10705 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10706 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10708 return fold_build1 (BIT_NOT_EXPR, type,
10709 build2 (BIT_AND_EXPR, type,
10710 TREE_OPERAND (arg0, 0),
10711 TREE_OPERAND (arg1, 0)));
10714 /* See if this can be simplified into a rotate first. If that
10715 is unsuccessful continue in the association code. */
10716 goto bit_rotate;
10718 case BIT_XOR_EXPR:
10719 if (integer_zerop (arg1))
10720 return non_lvalue (fold_convert (type, arg0));
10721 if (integer_all_onesp (arg1))
10722 return fold_build1 (BIT_NOT_EXPR, type, op0);
10723 if (operand_equal_p (arg0, arg1, 0))
10724 return omit_one_operand (type, integer_zero_node, arg0);
10726 /* ~X ^ X is -1. */
10727 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10728 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10730 t1 = fold_convert (type, integer_zero_node);
10731 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10732 return omit_one_operand (type, t1, arg1);
10735 /* X ^ ~X is -1. */
10736 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10737 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10739 t1 = fold_convert (type, integer_zero_node);
10740 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10741 return omit_one_operand (type, t1, arg0);
10744 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10745 with a constant, and the two constants have no bits in common,
10746 we should treat this as a BIT_IOR_EXPR since this may produce more
10747 simplifications. */
10748 if (TREE_CODE (arg0) == BIT_AND_EXPR
10749 && TREE_CODE (arg1) == BIT_AND_EXPR
10750 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10751 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10752 && integer_zerop (const_binop (BIT_AND_EXPR,
10753 TREE_OPERAND (arg0, 1),
10754 TREE_OPERAND (arg1, 1), 0)))
10756 code = BIT_IOR_EXPR;
10757 goto bit_ior;
10760 /* (X | Y) ^ X -> Y & ~ X*/
10761 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10762 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10764 tree t2 = TREE_OPERAND (arg0, 1);
10765 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10766 arg1);
10767 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10768 fold_convert (type, t1));
10769 return t1;
10772 /* (Y | X) ^ X -> Y & ~ X*/
10773 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10774 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10776 tree t2 = TREE_OPERAND (arg0, 0);
10777 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10778 arg1);
10779 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10780 fold_convert (type, t1));
10781 return t1;
10784 /* X ^ (X | Y) -> Y & ~ X*/
10785 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10786 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10788 tree t2 = TREE_OPERAND (arg1, 1);
10789 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10790 arg0);
10791 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10792 fold_convert (type, t1));
10793 return t1;
10796 /* X ^ (Y | X) -> Y & ~ X*/
10797 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10798 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10800 tree t2 = TREE_OPERAND (arg1, 0);
10801 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10802 arg0);
10803 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10804 fold_convert (type, t1));
10805 return t1;
10808 /* Convert ~X ^ ~Y to X ^ Y. */
10809 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10810 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10811 return fold_build2 (code, type,
10812 fold_convert (type, TREE_OPERAND (arg0, 0)),
10813 fold_convert (type, TREE_OPERAND (arg1, 0)));
10815 /* Convert ~X ^ C to X ^ ~C. */
10816 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10817 && TREE_CODE (arg1) == INTEGER_CST)
10818 return fold_build2 (code, type,
10819 fold_convert (type, TREE_OPERAND (arg0, 0)),
10820 fold_build1 (BIT_NOT_EXPR, type, arg1));
10822 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10823 if (TREE_CODE (arg0) == BIT_AND_EXPR
10824 && integer_onep (TREE_OPERAND (arg0, 1))
10825 && integer_onep (arg1))
10826 return fold_build2 (EQ_EXPR, type, arg0,
10827 build_int_cst (TREE_TYPE (arg0), 0));
10829 /* Fold (X & Y) ^ Y as ~X & Y. */
10830 if (TREE_CODE (arg0) == BIT_AND_EXPR
10831 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10833 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10834 return fold_build2 (BIT_AND_EXPR, type,
10835 fold_build1 (BIT_NOT_EXPR, type, tem),
10836 fold_convert (type, arg1));
10838 /* Fold (X & Y) ^ X as ~Y & X. */
10839 if (TREE_CODE (arg0) == BIT_AND_EXPR
10840 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10841 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10843 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10844 return fold_build2 (BIT_AND_EXPR, type,
10845 fold_build1 (BIT_NOT_EXPR, type, tem),
10846 fold_convert (type, arg1));
10848 /* Fold X ^ (X & Y) as X & ~Y. */
10849 if (TREE_CODE (arg1) == BIT_AND_EXPR
10850 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10852 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10853 return fold_build2 (BIT_AND_EXPR, type,
10854 fold_convert (type, arg0),
10855 fold_build1 (BIT_NOT_EXPR, type, tem));
10857 /* Fold X ^ (Y & X) as ~Y & X. */
10858 if (TREE_CODE (arg1) == BIT_AND_EXPR
10859 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10860 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10862 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10863 return fold_build2 (BIT_AND_EXPR, type,
10864 fold_build1 (BIT_NOT_EXPR, type, tem),
10865 fold_convert (type, arg0));
10868 /* See if this can be simplified into a rotate first. If that
10869 is unsuccessful continue in the association code. */
10870 goto bit_rotate;
10872 case BIT_AND_EXPR:
10873 if (integer_all_onesp (arg1))
10874 return non_lvalue (fold_convert (type, arg0));
10875 if (integer_zerop (arg1))
10876 return omit_one_operand (type, arg1, arg0);
10877 if (operand_equal_p (arg0, arg1, 0))
10878 return non_lvalue (fold_convert (type, arg0));
10880 /* ~X & X is always zero. */
10881 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10882 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10883 return omit_one_operand (type, integer_zero_node, arg1);
10885 /* X & ~X is always zero. */
10886 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10887 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10888 return omit_one_operand (type, integer_zero_node, arg0);
10890 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10891 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10892 && TREE_CODE (arg1) == INTEGER_CST
10893 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10894 return fold_build2 (BIT_IOR_EXPR, type,
10895 fold_build2 (BIT_AND_EXPR, type,
10896 TREE_OPERAND (arg0, 0), arg1),
10897 fold_build2 (BIT_AND_EXPR, type,
10898 TREE_OPERAND (arg0, 1), arg1));
10900 /* (X | Y) & Y is (X, Y). */
10901 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10902 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10903 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10904 /* (X | Y) & X is (Y, X). */
10905 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10906 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10907 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10908 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10909 /* X & (X | Y) is (Y, X). */
10910 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10911 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10912 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10913 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10914 /* X & (Y | X) is (Y, X). */
10915 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10916 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10917 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10918 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10920 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10921 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10922 && integer_onep (TREE_OPERAND (arg0, 1))
10923 && integer_onep (arg1))
10925 tem = TREE_OPERAND (arg0, 0);
10926 return fold_build2 (EQ_EXPR, type,
10927 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10928 build_int_cst (TREE_TYPE (tem), 1)),
10929 build_int_cst (TREE_TYPE (tem), 0));
10931 /* Fold ~X & 1 as (X & 1) == 0. */
10932 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10933 && integer_onep (arg1))
10935 tem = TREE_OPERAND (arg0, 0);
10936 return fold_build2 (EQ_EXPR, type,
10937 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10938 build_int_cst (TREE_TYPE (tem), 1)),
10939 build_int_cst (TREE_TYPE (tem), 0));
10942 /* Fold (X ^ Y) & Y as ~X & Y. */
10943 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10944 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10946 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10947 return fold_build2 (BIT_AND_EXPR, type,
10948 fold_build1 (BIT_NOT_EXPR, type, tem),
10949 fold_convert (type, arg1));
10951 /* Fold (X ^ Y) & X as ~Y & X. */
10952 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10953 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10954 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10956 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10957 return fold_build2 (BIT_AND_EXPR, type,
10958 fold_build1 (BIT_NOT_EXPR, type, tem),
10959 fold_convert (type, arg1));
10961 /* Fold X & (X ^ Y) as X & ~Y. */
10962 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10963 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10965 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10966 return fold_build2 (BIT_AND_EXPR, type,
10967 fold_convert (type, arg0),
10968 fold_build1 (BIT_NOT_EXPR, type, tem));
10970 /* Fold X & (Y ^ X) as ~Y & X. */
10971 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10972 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10973 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10975 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10976 return fold_build2 (BIT_AND_EXPR, type,
10977 fold_build1 (BIT_NOT_EXPR, type, tem),
10978 fold_convert (type, arg0));
10981 t1 = distribute_bit_expr (code, type, arg0, arg1);
10982 if (t1 != NULL_TREE)
10983 return t1;
10984 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10985 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10986 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10988 unsigned int prec
10989 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10991 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10992 && (~TREE_INT_CST_LOW (arg1)
10993 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10994 return fold_convert (type, TREE_OPERAND (arg0, 0));
10997 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10999 This results in more efficient code for machines without a NOR
11000 instruction. Combine will canonicalize to the first form
11001 which will allow use of NOR instructions provided by the
11002 backend if they exist. */
11003 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11004 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11006 return fold_build1 (BIT_NOT_EXPR, type,
11007 build2 (BIT_IOR_EXPR, type,
11008 TREE_OPERAND (arg0, 0),
11009 TREE_OPERAND (arg1, 0)));
11012 /* If arg0 is derived from the address of an object or function, we may
11013 be able to fold this expression using the object or function's
11014 alignment. */
11015 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11017 unsigned HOST_WIDE_INT modulus, residue;
11018 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11020 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11022 /* This works because modulus is a power of 2. If this weren't the
11023 case, we'd have to replace it by its greatest power-of-2
11024 divisor: modulus & -modulus. */
11025 if (low < modulus)
11026 return build_int_cst (type, residue & low);
11029 goto associate;
11031 case RDIV_EXPR:
11032 /* Don't touch a floating-point divide by zero unless the mode
11033 of the constant can represent infinity. */
11034 if (TREE_CODE (arg1) == REAL_CST
11035 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11036 && real_zerop (arg1))
11037 return NULL_TREE;
11039 /* Optimize A / A to 1.0 if we don't care about
11040 NaNs or Infinities. Skip the transformation
11041 for non-real operands. */
11042 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11043 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11044 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11045 && operand_equal_p (arg0, arg1, 0))
11047 tree r = build_real (TREE_TYPE (arg0), dconst1);
11049 return omit_two_operands (type, r, arg0, arg1);
11052 /* The complex version of the above A / A optimization. */
11053 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11054 && operand_equal_p (arg0, arg1, 0))
11056 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11057 if (! HONOR_NANS (TYPE_MODE (elem_type))
11058 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11060 tree r = build_real (elem_type, dconst1);
11061 /* omit_two_operands will call fold_convert for us. */
11062 return omit_two_operands (type, r, arg0, arg1);
11066 /* (-A) / (-B) -> A / B */
11067 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11068 return fold_build2 (RDIV_EXPR, type,
11069 TREE_OPERAND (arg0, 0),
11070 negate_expr (arg1));
11071 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11072 return fold_build2 (RDIV_EXPR, type,
11073 negate_expr (arg0),
11074 TREE_OPERAND (arg1, 0));
11076 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11077 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11078 && real_onep (arg1))
11079 return non_lvalue (fold_convert (type, arg0));
11081 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11082 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11083 && real_minus_onep (arg1))
11084 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11086 /* If ARG1 is a constant, we can convert this to a multiply by the
11087 reciprocal. This does not have the same rounding properties,
11088 so only do this if -freciprocal-math. We can actually
11089 always safely do it if ARG1 is a power of two, but it's hard to
11090 tell if it is or not in a portable manner. */
11091 if (TREE_CODE (arg1) == REAL_CST)
11093 if (flag_reciprocal_math
11094 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11095 arg1, 0)))
11096 return fold_build2 (MULT_EXPR, type, arg0, tem);
11097 /* Find the reciprocal if optimizing and the result is exact. */
11098 if (optimize)
11100 REAL_VALUE_TYPE r;
11101 r = TREE_REAL_CST (arg1);
11102 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11104 tem = build_real (type, r);
11105 return fold_build2 (MULT_EXPR, type,
11106 fold_convert (type, arg0), tem);
11110 /* Convert A/B/C to A/(B*C). */
11111 if (flag_reciprocal_math
11112 && TREE_CODE (arg0) == RDIV_EXPR)
11113 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11114 fold_build2 (MULT_EXPR, type,
11115 TREE_OPERAND (arg0, 1), arg1));
11117 /* Convert A/(B/C) to (A/B)*C. */
11118 if (flag_reciprocal_math
11119 && TREE_CODE (arg1) == RDIV_EXPR)
11120 return fold_build2 (MULT_EXPR, type,
11121 fold_build2 (RDIV_EXPR, type, arg0,
11122 TREE_OPERAND (arg1, 0)),
11123 TREE_OPERAND (arg1, 1));
11125 /* Convert C1/(X*C2) into (C1/C2)/X. */
11126 if (flag_reciprocal_math
11127 && TREE_CODE (arg1) == MULT_EXPR
11128 && TREE_CODE (arg0) == REAL_CST
11129 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11131 tree tem = const_binop (RDIV_EXPR, arg0,
11132 TREE_OPERAND (arg1, 1), 0);
11133 if (tem)
11134 return fold_build2 (RDIV_EXPR, type, tem,
11135 TREE_OPERAND (arg1, 0));
11138 if (flag_unsafe_math_optimizations)
11140 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11141 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11143 /* Optimize sin(x)/cos(x) as tan(x). */
11144 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11145 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11146 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11147 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11148 CALL_EXPR_ARG (arg1, 0), 0))
11150 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11152 if (tanfn != NULL_TREE)
11153 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11156 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11157 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11158 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11159 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11160 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11161 CALL_EXPR_ARG (arg1, 0), 0))
11163 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11165 if (tanfn != NULL_TREE)
11167 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11168 return fold_build2 (RDIV_EXPR, type,
11169 build_real (type, dconst1), tmp);
11173 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11174 NaNs or Infinities. */
11175 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11176 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11177 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11179 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11180 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11182 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11183 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11184 && operand_equal_p (arg00, arg01, 0))
11186 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11188 if (cosfn != NULL_TREE)
11189 return build_call_expr (cosfn, 1, arg00);
11193 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11194 NaNs or Infinities. */
11195 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11196 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11197 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11199 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11200 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11202 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11203 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11204 && operand_equal_p (arg00, arg01, 0))
11206 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11208 if (cosfn != NULL_TREE)
11210 tree tmp = build_call_expr (cosfn, 1, arg00);
11211 return fold_build2 (RDIV_EXPR, type,
11212 build_real (type, dconst1),
11213 tmp);
11218 /* Optimize pow(x,c)/x as pow(x,c-1). */
11219 if (fcode0 == BUILT_IN_POW
11220 || fcode0 == BUILT_IN_POWF
11221 || fcode0 == BUILT_IN_POWL)
11223 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11224 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11225 if (TREE_CODE (arg01) == REAL_CST
11226 && !TREE_OVERFLOW (arg01)
11227 && operand_equal_p (arg1, arg00, 0))
11229 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11230 REAL_VALUE_TYPE c;
11231 tree arg;
11233 c = TREE_REAL_CST (arg01);
11234 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11235 arg = build_real (type, c);
11236 return build_call_expr (powfn, 2, arg1, arg);
11240 /* Optimize a/root(b/c) into a*root(c/b). */
11241 if (BUILTIN_ROOT_P (fcode1))
11243 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11245 if (TREE_CODE (rootarg) == RDIV_EXPR)
11247 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11248 tree b = TREE_OPERAND (rootarg, 0);
11249 tree c = TREE_OPERAND (rootarg, 1);
11251 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11253 tmp = build_call_expr (rootfn, 1, tmp);
11254 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11258 /* Optimize x/expN(y) into x*expN(-y). */
11259 if (BUILTIN_EXPONENT_P (fcode1))
11261 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11262 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11263 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11264 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11267 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11268 if (fcode1 == BUILT_IN_POW
11269 || fcode1 == BUILT_IN_POWF
11270 || fcode1 == BUILT_IN_POWL)
11272 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11273 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11274 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11275 tree neg11 = fold_convert (type, negate_expr (arg11));
11276 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11277 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11280 return NULL_TREE;
11282 case TRUNC_DIV_EXPR:
11283 case FLOOR_DIV_EXPR:
11284 /* Simplify A / (B << N) where A and B are positive and B is
11285 a power of 2, to A >> (N + log2(B)). */
11286 strict_overflow_p = false;
11287 if (TREE_CODE (arg1) == LSHIFT_EXPR
11288 && (TYPE_UNSIGNED (type)
11289 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
11291 tree sval = TREE_OPERAND (arg1, 0);
11292 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11294 tree sh_cnt = TREE_OPERAND (arg1, 1);
11295 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11297 if (strict_overflow_p)
11298 fold_overflow_warning (("assuming signed overflow does not "
11299 "occur when simplifying A / (B << N)"),
11300 WARN_STRICT_OVERFLOW_MISC);
11302 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11303 sh_cnt, build_int_cst (NULL_TREE, pow2));
11304 return fold_build2 (RSHIFT_EXPR, type,
11305 fold_convert (type, arg0), sh_cnt);
11308 /* Fall thru */
11310 case ROUND_DIV_EXPR:
11311 case CEIL_DIV_EXPR:
11312 case EXACT_DIV_EXPR:
11313 if (integer_onep (arg1))
11314 return non_lvalue (fold_convert (type, arg0));
11315 if (integer_zerop (arg1))
11316 return NULL_TREE;
11317 /* X / -1 is -X. */
11318 if (!TYPE_UNSIGNED (type)
11319 && TREE_CODE (arg1) == INTEGER_CST
11320 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11321 && TREE_INT_CST_HIGH (arg1) == -1)
11322 return fold_convert (type, negate_expr (arg0));
11324 /* Convert -A / -B to A / B when the type is signed and overflow is
11325 undefined. */
11326 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11327 && TREE_CODE (arg0) == NEGATE_EXPR
11328 && negate_expr_p (arg1))
11330 if (INTEGRAL_TYPE_P (type))
11331 fold_overflow_warning (("assuming signed overflow does not occur "
11332 "when distributing negation across "
11333 "division"),
11334 WARN_STRICT_OVERFLOW_MISC);
11335 return fold_build2 (code, type,
11336 fold_convert (type, TREE_OPERAND (arg0, 0)),
11337 negate_expr (arg1));
11339 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11340 && TREE_CODE (arg1) == NEGATE_EXPR
11341 && negate_expr_p (arg0))
11343 if (INTEGRAL_TYPE_P (type))
11344 fold_overflow_warning (("assuming signed overflow does not occur "
11345 "when distributing negation across "
11346 "division"),
11347 WARN_STRICT_OVERFLOW_MISC);
11348 return fold_build2 (code, type, negate_expr (arg0),
11349 TREE_OPERAND (arg1, 0));
11352 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11353 operation, EXACT_DIV_EXPR.
11355 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11356 At one time others generated faster code, it's not clear if they do
11357 after the last round to changes to the DIV code in expmed.c. */
11358 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11359 && multiple_of_p (type, arg0, arg1))
11360 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11362 strict_overflow_p = false;
11363 if (TREE_CODE (arg1) == INTEGER_CST
11364 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11365 &strict_overflow_p)))
11367 if (strict_overflow_p)
11368 fold_overflow_warning (("assuming signed overflow does not occur "
11369 "when simplifying division"),
11370 WARN_STRICT_OVERFLOW_MISC);
11371 return fold_convert (type, tem);
11374 return NULL_TREE;
11376 case CEIL_MOD_EXPR:
11377 case FLOOR_MOD_EXPR:
11378 case ROUND_MOD_EXPR:
11379 case TRUNC_MOD_EXPR:
11380 /* X % 1 is always zero, but be sure to preserve any side
11381 effects in X. */
11382 if (integer_onep (arg1))
11383 return omit_one_operand (type, integer_zero_node, arg0);
11385 /* X % 0, return X % 0 unchanged so that we can get the
11386 proper warnings and errors. */
11387 if (integer_zerop (arg1))
11388 return NULL_TREE;
11390 /* 0 % X is always zero, but be sure to preserve any side
11391 effects in X. Place this after checking for X == 0. */
11392 if (integer_zerop (arg0))
11393 return omit_one_operand (type, integer_zero_node, arg1);
11395 /* X % -1 is zero. */
11396 if (!TYPE_UNSIGNED (type)
11397 && TREE_CODE (arg1) == INTEGER_CST
11398 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11399 && TREE_INT_CST_HIGH (arg1) == -1)
11400 return omit_one_operand (type, integer_zero_node, arg0);
11402 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11403 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11404 strict_overflow_p = false;
11405 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11406 && (TYPE_UNSIGNED (type)
11407 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
11409 tree c = arg1;
11410 /* Also optimize A % (C << N) where C is a power of 2,
11411 to A & ((C << N) - 1). */
11412 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11413 c = TREE_OPERAND (arg1, 0);
11415 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11417 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11418 build_int_cst (TREE_TYPE (arg1), 1));
11419 if (strict_overflow_p)
11420 fold_overflow_warning (("assuming signed overflow does not "
11421 "occur when simplifying "
11422 "X % (power of two)"),
11423 WARN_STRICT_OVERFLOW_MISC);
11424 return fold_build2 (BIT_AND_EXPR, type,
11425 fold_convert (type, arg0),
11426 fold_convert (type, mask));
11430 /* X % -C is the same as X % C. */
11431 if (code == TRUNC_MOD_EXPR
11432 && !TYPE_UNSIGNED (type)
11433 && TREE_CODE (arg1) == INTEGER_CST
11434 && !TREE_OVERFLOW (arg1)
11435 && TREE_INT_CST_HIGH (arg1) < 0
11436 && !TYPE_OVERFLOW_TRAPS (type)
11437 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11438 && !sign_bit_p (arg1, arg1))
11439 return fold_build2 (code, type, fold_convert (type, arg0),
11440 fold_convert (type, negate_expr (arg1)));
11442 /* X % -Y is the same as X % Y. */
11443 if (code == TRUNC_MOD_EXPR
11444 && !TYPE_UNSIGNED (type)
11445 && TREE_CODE (arg1) == NEGATE_EXPR
11446 && !TYPE_OVERFLOW_TRAPS (type))
11447 return fold_build2 (code, type, fold_convert (type, arg0),
11448 fold_convert (type, TREE_OPERAND (arg1, 0)));
11450 if (TREE_CODE (arg1) == INTEGER_CST
11451 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11452 &strict_overflow_p)))
11454 if (strict_overflow_p)
11455 fold_overflow_warning (("assuming signed overflow does not occur "
11456 "when simplifying modulos"),
11457 WARN_STRICT_OVERFLOW_MISC);
11458 return fold_convert (type, tem);
11461 return NULL_TREE;
11463 case LROTATE_EXPR:
11464 case RROTATE_EXPR:
11465 if (integer_all_onesp (arg0))
11466 return omit_one_operand (type, arg0, arg1);
11467 goto shift;
11469 case RSHIFT_EXPR:
11470 /* Optimize -1 >> x for arithmetic right shifts. */
11471 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11472 return omit_one_operand (type, arg0, arg1);
11473 /* ... fall through ... */
11475 case LSHIFT_EXPR:
11476 shift:
11477 if (integer_zerop (arg1))
11478 return non_lvalue (fold_convert (type, arg0));
11479 if (integer_zerop (arg0))
11480 return omit_one_operand (type, arg0, arg1);
11482 /* Since negative shift count is not well-defined,
11483 don't try to compute it in the compiler. */
11484 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11485 return NULL_TREE;
11487 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11488 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11489 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11490 && host_integerp (TREE_OPERAND (arg0, 1), false)
11491 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11493 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11494 + TREE_INT_CST_LOW (arg1));
11496 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11497 being well defined. */
11498 if (low >= TYPE_PRECISION (type))
11500 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11501 low = low % TYPE_PRECISION (type);
11502 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11503 return build_int_cst (type, 0);
11504 else
11505 low = TYPE_PRECISION (type) - 1;
11508 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11509 build_int_cst (type, low));
11512 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11513 into x & ((unsigned)-1 >> c) for unsigned types. */
11514 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11515 || (TYPE_UNSIGNED (type)
11516 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11517 && host_integerp (arg1, false)
11518 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11519 && host_integerp (TREE_OPERAND (arg0, 1), false)
11520 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11522 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11523 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11524 tree lshift;
11525 tree arg00;
11527 if (low0 == low1)
11529 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11531 lshift = build_int_cst (type, -1);
11532 lshift = int_const_binop (code, lshift, arg1, 0);
11534 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11538 /* Rewrite an LROTATE_EXPR by a constant into an
11539 RROTATE_EXPR by a new constant. */
11540 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11542 tree tem = build_int_cst (TREE_TYPE (arg1),
11543 GET_MODE_BITSIZE (TYPE_MODE (type)));
11544 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11545 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
11548 /* If we have a rotate of a bit operation with the rotate count and
11549 the second operand of the bit operation both constant,
11550 permute the two operations. */
11551 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11552 && (TREE_CODE (arg0) == BIT_AND_EXPR
11553 || TREE_CODE (arg0) == BIT_IOR_EXPR
11554 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11555 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11556 return fold_build2 (TREE_CODE (arg0), type,
11557 fold_build2 (code, type,
11558 TREE_OPERAND (arg0, 0), arg1),
11559 fold_build2 (code, type,
11560 TREE_OPERAND (arg0, 1), arg1));
11562 /* Two consecutive rotates adding up to the width of the mode can
11563 be ignored. */
11564 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11565 && TREE_CODE (arg0) == RROTATE_EXPR
11566 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11567 && TREE_INT_CST_HIGH (arg1) == 0
11568 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11569 && ((TREE_INT_CST_LOW (arg1)
11570 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11571 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
11572 return TREE_OPERAND (arg0, 0);
11574 return NULL_TREE;
11576 case MIN_EXPR:
11577 if (operand_equal_p (arg0, arg1, 0))
11578 return omit_one_operand (type, arg0, arg1);
11579 if (INTEGRAL_TYPE_P (type)
11580 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11581 return omit_one_operand (type, arg1, arg0);
11582 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11583 if (tem)
11584 return tem;
11585 goto associate;
11587 case MAX_EXPR:
11588 if (operand_equal_p (arg0, arg1, 0))
11589 return omit_one_operand (type, arg0, arg1);
11590 if (INTEGRAL_TYPE_P (type)
11591 && TYPE_MAX_VALUE (type)
11592 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11593 return omit_one_operand (type, arg1, arg0);
11594 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11595 if (tem)
11596 return tem;
11597 goto associate;
11599 case TRUTH_ANDIF_EXPR:
11600 /* Note that the operands of this must be ints
11601 and their values must be 0 or 1.
11602 ("true" is a fixed value perhaps depending on the language.) */
11603 /* If first arg is constant zero, return it. */
11604 if (integer_zerop (arg0))
11605 return fold_convert (type, arg0);
11606 case TRUTH_AND_EXPR:
11607 /* If either arg is constant true, drop it. */
11608 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11609 return non_lvalue (fold_convert (type, arg1));
11610 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11611 /* Preserve sequence points. */
11612 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11613 return non_lvalue (fold_convert (type, arg0));
11614 /* If second arg is constant zero, result is zero, but first arg
11615 must be evaluated. */
11616 if (integer_zerop (arg1))
11617 return omit_one_operand (type, arg1, arg0);
11618 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11619 case will be handled here. */
11620 if (integer_zerop (arg0))
11621 return omit_one_operand (type, arg0, arg1);
11623 /* !X && X is always false. */
11624 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11625 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11626 return omit_one_operand (type, integer_zero_node, arg1);
11627 /* X && !X is always false. */
11628 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11629 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11630 return omit_one_operand (type, integer_zero_node, arg0);
11632 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11633 means A >= Y && A != MAX, but in this case we know that
11634 A < X <= MAX. */
11636 if (!TREE_SIDE_EFFECTS (arg0)
11637 && !TREE_SIDE_EFFECTS (arg1))
11639 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11640 if (tem && !operand_equal_p (tem, arg0, 0))
11641 return fold_build2 (code, type, tem, arg1);
11643 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11644 if (tem && !operand_equal_p (tem, arg1, 0))
11645 return fold_build2 (code, type, arg0, tem);
11648 truth_andor:
11649 /* We only do these simplifications if we are optimizing. */
11650 if (!optimize)
11651 return NULL_TREE;
11653 /* Check for things like (A || B) && (A || C). We can convert this
11654 to A || (B && C). Note that either operator can be any of the four
11655 truth and/or operations and the transformation will still be
11656 valid. Also note that we only care about order for the
11657 ANDIF and ORIF operators. If B contains side effects, this
11658 might change the truth-value of A. */
11659 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11660 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11661 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11662 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11663 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11666 tree a00 = TREE_OPERAND (arg0, 0);
11667 tree a01 = TREE_OPERAND (arg0, 1);
11668 tree a10 = TREE_OPERAND (arg1, 0);
11669 tree a11 = TREE_OPERAND (arg1, 1);
11670 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11671 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11672 && (code == TRUTH_AND_EXPR
11673 || code == TRUTH_OR_EXPR));
11675 if (operand_equal_p (a00, a10, 0))
11676 return fold_build2 (TREE_CODE (arg0), type, a00,
11677 fold_build2 (code, type, a01, a11));
11678 else if (commutative && operand_equal_p (a00, a11, 0))
11679 return fold_build2 (TREE_CODE (arg0), type, a00,
11680 fold_build2 (code, type, a01, a10));
11681 else if (commutative && operand_equal_p (a01, a10, 0))
11682 return fold_build2 (TREE_CODE (arg0), type, a01,
11683 fold_build2 (code, type, a00, a11));
11685 /* This case if tricky because we must either have commutative
11686 operators or else A10 must not have side-effects. */
11688 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11689 && operand_equal_p (a01, a11, 0))
11690 return fold_build2 (TREE_CODE (arg0), type,
11691 fold_build2 (code, type, a00, a10),
11692 a01);
11695 /* See if we can build a range comparison. */
11696 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11697 return tem;
11699 /* Check for the possibility of merging component references. If our
11700 lhs is another similar operation, try to merge its rhs with our
11701 rhs. Then try to merge our lhs and rhs. */
11702 if (TREE_CODE (arg0) == code
11703 && 0 != (tem = fold_truthop (code, type,
11704 TREE_OPERAND (arg0, 1), arg1)))
11705 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11707 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11708 return tem;
11710 return NULL_TREE;
11712 case TRUTH_ORIF_EXPR:
11713 /* Note that the operands of this must be ints
11714 and their values must be 0 or true.
11715 ("true" is a fixed value perhaps depending on the language.) */
11716 /* If first arg is constant true, return it. */
11717 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11718 return fold_convert (type, arg0);
11719 case TRUTH_OR_EXPR:
11720 /* If either arg is constant zero, drop it. */
11721 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11722 return non_lvalue (fold_convert (type, arg1));
11723 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11724 /* Preserve sequence points. */
11725 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11726 return non_lvalue (fold_convert (type, arg0));
11727 /* If second arg is constant true, result is true, but we must
11728 evaluate first arg. */
11729 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11730 return omit_one_operand (type, arg1, arg0);
11731 /* Likewise for first arg, but note this only occurs here for
11732 TRUTH_OR_EXPR. */
11733 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11734 return omit_one_operand (type, arg0, arg1);
11736 /* !X || X is always true. */
11737 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11738 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11739 return omit_one_operand (type, integer_one_node, arg1);
11740 /* X || !X is always true. */
11741 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11742 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11743 return omit_one_operand (type, integer_one_node, arg0);
11745 goto truth_andor;
11747 case TRUTH_XOR_EXPR:
11748 /* If the second arg is constant zero, drop it. */
11749 if (integer_zerop (arg1))
11750 return non_lvalue (fold_convert (type, arg0));
11751 /* If the second arg is constant true, this is a logical inversion. */
11752 if (integer_onep (arg1))
11754 /* Only call invert_truthvalue if operand is a truth value. */
11755 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11756 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11757 else
11758 tem = invert_truthvalue (arg0);
11759 return non_lvalue (fold_convert (type, tem));
11761 /* Identical arguments cancel to zero. */
11762 if (operand_equal_p (arg0, arg1, 0))
11763 return omit_one_operand (type, integer_zero_node, arg0);
11765 /* !X ^ X is always true. */
11766 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11767 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11768 return omit_one_operand (type, integer_one_node, arg1);
11770 /* X ^ !X is always true. */
11771 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11772 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11773 return omit_one_operand (type, integer_one_node, arg0);
11775 return NULL_TREE;
11777 case EQ_EXPR:
11778 case NE_EXPR:
11779 tem = fold_comparison (code, type, op0, op1);
11780 if (tem != NULL_TREE)
11781 return tem;
11783 /* bool_var != 0 becomes bool_var. */
11784 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11785 && code == NE_EXPR)
11786 return non_lvalue (fold_convert (type, arg0));
11788 /* bool_var == 1 becomes bool_var. */
11789 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11790 && code == EQ_EXPR)
11791 return non_lvalue (fold_convert (type, arg0));
11793 /* bool_var != 1 becomes !bool_var. */
11794 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11795 && code == NE_EXPR)
11796 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11798 /* bool_var == 0 becomes !bool_var. */
11799 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11800 && code == EQ_EXPR)
11801 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11803 /* If this is an equality comparison of the address of two non-weak,
11804 unaliased symbols neither of which are extern (since we do not
11805 have access to attributes for externs), then we know the result. */
11806 if (TREE_CODE (arg0) == ADDR_EXPR
11807 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11808 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11809 && ! lookup_attribute ("alias",
11810 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11811 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11812 && TREE_CODE (arg1) == ADDR_EXPR
11813 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11814 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11815 && ! lookup_attribute ("alias",
11816 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11817 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11819 /* We know that we're looking at the address of two
11820 non-weak, unaliased, static _DECL nodes.
11822 It is both wasteful and incorrect to call operand_equal_p
11823 to compare the two ADDR_EXPR nodes. It is wasteful in that
11824 all we need to do is test pointer equality for the arguments
11825 to the two ADDR_EXPR nodes. It is incorrect to use
11826 operand_equal_p as that function is NOT equivalent to a
11827 C equality test. It can in fact return false for two
11828 objects which would test as equal using the C equality
11829 operator. */
11830 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11831 return constant_boolean_node (equal
11832 ? code == EQ_EXPR : code != EQ_EXPR,
11833 type);
11836 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11837 a MINUS_EXPR of a constant, we can convert it into a comparison with
11838 a revised constant as long as no overflow occurs. */
11839 if (TREE_CODE (arg1) == INTEGER_CST
11840 && (TREE_CODE (arg0) == PLUS_EXPR
11841 || TREE_CODE (arg0) == MINUS_EXPR)
11842 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11843 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11844 ? MINUS_EXPR : PLUS_EXPR,
11845 fold_convert (TREE_TYPE (arg0), arg1),
11846 TREE_OPERAND (arg0, 1), 0))
11847 && !TREE_OVERFLOW (tem))
11848 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11850 /* Similarly for a NEGATE_EXPR. */
11851 if (TREE_CODE (arg0) == NEGATE_EXPR
11852 && TREE_CODE (arg1) == INTEGER_CST
11853 && 0 != (tem = negate_expr (arg1))
11854 && TREE_CODE (tem) == INTEGER_CST
11855 && !TREE_OVERFLOW (tem))
11856 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11858 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11859 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11860 && TREE_CODE (arg1) == INTEGER_CST
11861 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11862 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11863 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11864 fold_convert (TREE_TYPE (arg0), arg1),
11865 TREE_OPERAND (arg0, 1)));
11867 /* Transform comparisons of the form X +- C CMP X. */
11868 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11869 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11870 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11871 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11872 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11874 tree cst = TREE_OPERAND (arg0, 1);
11876 if (code == EQ_EXPR
11877 && !integer_zerop (cst))
11878 return omit_two_operands (type, boolean_false_node,
11879 TREE_OPERAND (arg0, 0), arg1);
11880 else
11881 return omit_two_operands (type, boolean_true_node,
11882 TREE_OPERAND (arg0, 0), arg1);
11885 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11886 for !=. Don't do this for ordered comparisons due to overflow. */
11887 if (TREE_CODE (arg0) == MINUS_EXPR
11888 && integer_zerop (arg1))
11889 return fold_build2 (code, type,
11890 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11892 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11893 if (TREE_CODE (arg0) == ABS_EXPR
11894 && (integer_zerop (arg1) || real_zerop (arg1)))
11895 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11897 /* If this is an EQ or NE comparison with zero and ARG0 is
11898 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11899 two operations, but the latter can be done in one less insn
11900 on machines that have only two-operand insns or on which a
11901 constant cannot be the first operand. */
11902 if (TREE_CODE (arg0) == BIT_AND_EXPR
11903 && integer_zerop (arg1))
11905 tree arg00 = TREE_OPERAND (arg0, 0);
11906 tree arg01 = TREE_OPERAND (arg0, 1);
11907 if (TREE_CODE (arg00) == LSHIFT_EXPR
11908 && integer_onep (TREE_OPERAND (arg00, 0)))
11909 return
11910 fold_build2 (code, type,
11911 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11912 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11913 arg01, TREE_OPERAND (arg00, 1)),
11914 fold_convert (TREE_TYPE (arg0),
11915 integer_one_node)),
11916 arg1);
11917 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11918 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11919 return
11920 fold_build2 (code, type,
11921 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11922 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11923 arg00, TREE_OPERAND (arg01, 1)),
11924 fold_convert (TREE_TYPE (arg0),
11925 integer_one_node)),
11926 arg1);
11929 /* If this is an NE or EQ comparison of zero against the result of a
11930 signed MOD operation whose second operand is a power of 2, make
11931 the MOD operation unsigned since it is simpler and equivalent. */
11932 if (integer_zerop (arg1)
11933 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11934 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11935 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11936 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11937 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11938 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11940 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11941 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11942 fold_convert (newtype,
11943 TREE_OPERAND (arg0, 0)),
11944 fold_convert (newtype,
11945 TREE_OPERAND (arg0, 1)));
11947 return fold_build2 (code, type, newmod,
11948 fold_convert (newtype, arg1));
11951 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11952 C1 is a valid shift constant, and C2 is a power of two, i.e.
11953 a single bit. */
11954 if (TREE_CODE (arg0) == BIT_AND_EXPR
11955 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11956 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11957 == INTEGER_CST
11958 && integer_pow2p (TREE_OPERAND (arg0, 1))
11959 && integer_zerop (arg1))
11961 tree itype = TREE_TYPE (arg0);
11962 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11963 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11965 /* Check for a valid shift count. */
11966 if (TREE_INT_CST_HIGH (arg001) == 0
11967 && TREE_INT_CST_LOW (arg001) < prec)
11969 tree arg01 = TREE_OPERAND (arg0, 1);
11970 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11971 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11972 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11973 can be rewritten as (X & (C2 << C1)) != 0. */
11974 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11976 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11977 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11978 return fold_build2 (code, type, tem, arg1);
11980 /* Otherwise, for signed (arithmetic) shifts,
11981 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11982 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11983 else if (!TYPE_UNSIGNED (itype))
11984 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11985 arg000, build_int_cst (itype, 0));
11986 /* Otherwise, of unsigned (logical) shifts,
11987 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11988 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11989 else
11990 return omit_one_operand (type,
11991 code == EQ_EXPR ? integer_one_node
11992 : integer_zero_node,
11993 arg000);
11997 /* If this is an NE comparison of zero with an AND of one, remove the
11998 comparison since the AND will give the correct value. */
11999 if (code == NE_EXPR
12000 && integer_zerop (arg1)
12001 && TREE_CODE (arg0) == BIT_AND_EXPR
12002 && integer_onep (TREE_OPERAND (arg0, 1)))
12003 return fold_convert (type, arg0);
12005 /* If we have (A & C) == C where C is a power of 2, convert this into
12006 (A & C) != 0. Similarly for NE_EXPR. */
12007 if (TREE_CODE (arg0) == BIT_AND_EXPR
12008 && integer_pow2p (TREE_OPERAND (arg0, 1))
12009 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12010 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12011 arg0, fold_convert (TREE_TYPE (arg0),
12012 integer_zero_node));
12014 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12015 bit, then fold the expression into A < 0 or A >= 0. */
12016 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12017 if (tem)
12018 return tem;
12020 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12021 Similarly for NE_EXPR. */
12022 if (TREE_CODE (arg0) == BIT_AND_EXPR
12023 && TREE_CODE (arg1) == INTEGER_CST
12024 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12026 tree notc = fold_build1 (BIT_NOT_EXPR,
12027 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12028 TREE_OPERAND (arg0, 1));
12029 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12030 arg1, notc);
12031 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12032 if (integer_nonzerop (dandnotc))
12033 return omit_one_operand (type, rslt, arg0);
12036 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12037 Similarly for NE_EXPR. */
12038 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12039 && TREE_CODE (arg1) == INTEGER_CST
12040 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12042 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12043 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12044 TREE_OPERAND (arg0, 1), notd);
12045 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12046 if (integer_nonzerop (candnotd))
12047 return omit_one_operand (type, rslt, arg0);
12050 /* If this is a comparison of a field, we may be able to simplify it. */
12051 if ((TREE_CODE (arg0) == COMPONENT_REF
12052 || TREE_CODE (arg0) == BIT_FIELD_REF)
12053 /* Handle the constant case even without -O
12054 to make sure the warnings are given. */
12055 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12057 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12058 if (t1)
12059 return t1;
12062 /* Optimize comparisons of strlen vs zero to a compare of the
12063 first character of the string vs zero. To wit,
12064 strlen(ptr) == 0 => *ptr == 0
12065 strlen(ptr) != 0 => *ptr != 0
12066 Other cases should reduce to one of these two (or a constant)
12067 due to the return value of strlen being unsigned. */
12068 if (TREE_CODE (arg0) == CALL_EXPR
12069 && integer_zerop (arg1))
12071 tree fndecl = get_callee_fndecl (arg0);
12073 if (fndecl
12074 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12075 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12076 && call_expr_nargs (arg0) == 1
12077 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12079 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12080 return fold_build2 (code, type, iref,
12081 build_int_cst (TREE_TYPE (iref), 0));
12085 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12086 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12087 if (TREE_CODE (arg0) == RSHIFT_EXPR
12088 && integer_zerop (arg1)
12089 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12091 tree arg00 = TREE_OPERAND (arg0, 0);
12092 tree arg01 = TREE_OPERAND (arg0, 1);
12093 tree itype = TREE_TYPE (arg00);
12094 if (TREE_INT_CST_HIGH (arg01) == 0
12095 && TREE_INT_CST_LOW (arg01)
12096 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12098 if (TYPE_UNSIGNED (itype))
12100 itype = signed_type_for (itype);
12101 arg00 = fold_convert (itype, arg00);
12103 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12104 type, arg00, build_int_cst (itype, 0));
12108 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12109 if (integer_zerop (arg1)
12110 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12111 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12112 TREE_OPERAND (arg0, 1));
12114 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12115 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12116 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12117 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12118 build_int_cst (TREE_TYPE (arg1), 0));
12119 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12120 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12121 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12122 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12123 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12124 build_int_cst (TREE_TYPE (arg1), 0));
12126 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12127 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12128 && TREE_CODE (arg1) == INTEGER_CST
12129 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12130 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12131 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12132 TREE_OPERAND (arg0, 1), arg1));
12134 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12135 (X & C) == 0 when C is a single bit. */
12136 if (TREE_CODE (arg0) == BIT_AND_EXPR
12137 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12138 && integer_zerop (arg1)
12139 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12141 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12142 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12143 TREE_OPERAND (arg0, 1));
12144 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12145 type, tem, arg1);
12148 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12149 constant C is a power of two, i.e. a single bit. */
12150 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12151 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12152 && integer_zerop (arg1)
12153 && integer_pow2p (TREE_OPERAND (arg0, 1))
12154 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12155 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12157 tree arg00 = TREE_OPERAND (arg0, 0);
12158 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12159 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12162 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12163 when is C is a power of two, i.e. a single bit. */
12164 if (TREE_CODE (arg0) == BIT_AND_EXPR
12165 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12166 && integer_zerop (arg1)
12167 && integer_pow2p (TREE_OPERAND (arg0, 1))
12168 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12169 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12171 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12172 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12173 arg000, TREE_OPERAND (arg0, 1));
12174 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12175 tem, build_int_cst (TREE_TYPE (tem), 0));
12178 if (integer_zerop (arg1)
12179 && tree_expr_nonzero_p (arg0))
12181 tree res = constant_boolean_node (code==NE_EXPR, type);
12182 return omit_one_operand (type, res, arg0);
12185 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12186 if (TREE_CODE (arg0) == NEGATE_EXPR
12187 && TREE_CODE (arg1) == NEGATE_EXPR)
12188 return fold_build2 (code, type,
12189 TREE_OPERAND (arg0, 0),
12190 TREE_OPERAND (arg1, 0));
12192 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12193 if (TREE_CODE (arg0) == BIT_AND_EXPR
12194 && TREE_CODE (arg1) == BIT_AND_EXPR)
12196 tree arg00 = TREE_OPERAND (arg0, 0);
12197 tree arg01 = TREE_OPERAND (arg0, 1);
12198 tree arg10 = TREE_OPERAND (arg1, 0);
12199 tree arg11 = TREE_OPERAND (arg1, 1);
12200 tree itype = TREE_TYPE (arg0);
12202 if (operand_equal_p (arg01, arg11, 0))
12203 return fold_build2 (code, type,
12204 fold_build2 (BIT_AND_EXPR, itype,
12205 fold_build2 (BIT_XOR_EXPR, itype,
12206 arg00, arg10),
12207 arg01),
12208 build_int_cst (itype, 0));
12210 if (operand_equal_p (arg01, arg10, 0))
12211 return fold_build2 (code, type,
12212 fold_build2 (BIT_AND_EXPR, itype,
12213 fold_build2 (BIT_XOR_EXPR, itype,
12214 arg00, arg11),
12215 arg01),
12216 build_int_cst (itype, 0));
12218 if (operand_equal_p (arg00, arg11, 0))
12219 return fold_build2 (code, type,
12220 fold_build2 (BIT_AND_EXPR, itype,
12221 fold_build2 (BIT_XOR_EXPR, itype,
12222 arg01, arg10),
12223 arg00),
12224 build_int_cst (itype, 0));
12226 if (operand_equal_p (arg00, arg10, 0))
12227 return fold_build2 (code, type,
12228 fold_build2 (BIT_AND_EXPR, itype,
12229 fold_build2 (BIT_XOR_EXPR, itype,
12230 arg01, arg11),
12231 arg00),
12232 build_int_cst (itype, 0));
12235 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12236 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12238 tree arg00 = TREE_OPERAND (arg0, 0);
12239 tree arg01 = TREE_OPERAND (arg0, 1);
12240 tree arg10 = TREE_OPERAND (arg1, 0);
12241 tree arg11 = TREE_OPERAND (arg1, 1);
12242 tree itype = TREE_TYPE (arg0);
12244 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12245 operand_equal_p guarantees no side-effects so we don't need
12246 to use omit_one_operand on Z. */
12247 if (operand_equal_p (arg01, arg11, 0))
12248 return fold_build2 (code, type, arg00, arg10);
12249 if (operand_equal_p (arg01, arg10, 0))
12250 return fold_build2 (code, type, arg00, arg11);
12251 if (operand_equal_p (arg00, arg11, 0))
12252 return fold_build2 (code, type, arg01, arg10);
12253 if (operand_equal_p (arg00, arg10, 0))
12254 return fold_build2 (code, type, arg01, arg11);
12256 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12257 if (TREE_CODE (arg01) == INTEGER_CST
12258 && TREE_CODE (arg11) == INTEGER_CST)
12259 return fold_build2 (code, type,
12260 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12261 fold_build2 (BIT_XOR_EXPR, itype,
12262 arg01, arg11)),
12263 arg10);
12266 /* Attempt to simplify equality/inequality comparisons of complex
12267 values. Only lower the comparison if the result is known or
12268 can be simplified to a single scalar comparison. */
12269 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12270 || TREE_CODE (arg0) == COMPLEX_CST)
12271 && (TREE_CODE (arg1) == COMPLEX_EXPR
12272 || TREE_CODE (arg1) == COMPLEX_CST))
12274 tree real0, imag0, real1, imag1;
12275 tree rcond, icond;
12277 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12279 real0 = TREE_OPERAND (arg0, 0);
12280 imag0 = TREE_OPERAND (arg0, 1);
12282 else
12284 real0 = TREE_REALPART (arg0);
12285 imag0 = TREE_IMAGPART (arg0);
12288 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12290 real1 = TREE_OPERAND (arg1, 0);
12291 imag1 = TREE_OPERAND (arg1, 1);
12293 else
12295 real1 = TREE_REALPART (arg1);
12296 imag1 = TREE_IMAGPART (arg1);
12299 rcond = fold_binary (code, type, real0, real1);
12300 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12302 if (integer_zerop (rcond))
12304 if (code == EQ_EXPR)
12305 return omit_two_operands (type, boolean_false_node,
12306 imag0, imag1);
12307 return fold_build2 (NE_EXPR, type, imag0, imag1);
12309 else
12311 if (code == NE_EXPR)
12312 return omit_two_operands (type, boolean_true_node,
12313 imag0, imag1);
12314 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12318 icond = fold_binary (code, type, imag0, imag1);
12319 if (icond && TREE_CODE (icond) == INTEGER_CST)
12321 if (integer_zerop (icond))
12323 if (code == EQ_EXPR)
12324 return omit_two_operands (type, boolean_false_node,
12325 real0, real1);
12326 return fold_build2 (NE_EXPR, type, real0, real1);
12328 else
12330 if (code == NE_EXPR)
12331 return omit_two_operands (type, boolean_true_node,
12332 real0, real1);
12333 return fold_build2 (EQ_EXPR, type, real0, real1);
12338 return NULL_TREE;
12340 case LT_EXPR:
12341 case GT_EXPR:
12342 case LE_EXPR:
12343 case GE_EXPR:
12344 tem = fold_comparison (code, type, op0, op1);
12345 if (tem != NULL_TREE)
12346 return tem;
12348 /* Transform comparisons of the form X +- C CMP X. */
12349 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12350 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12351 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12352 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12353 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12354 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12356 tree arg01 = TREE_OPERAND (arg0, 1);
12357 enum tree_code code0 = TREE_CODE (arg0);
12358 int is_positive;
12360 if (TREE_CODE (arg01) == REAL_CST)
12361 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12362 else
12363 is_positive = tree_int_cst_sgn (arg01);
12365 /* (X - c) > X becomes false. */
12366 if (code == GT_EXPR
12367 && ((code0 == MINUS_EXPR && is_positive >= 0)
12368 || (code0 == PLUS_EXPR && is_positive <= 0)))
12370 if (TREE_CODE (arg01) == INTEGER_CST
12371 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12372 fold_overflow_warning (("assuming signed overflow does not "
12373 "occur when assuming that (X - c) > X "
12374 "is always false"),
12375 WARN_STRICT_OVERFLOW_ALL);
12376 return constant_boolean_node (0, type);
12379 /* Likewise (X + c) < X becomes false. */
12380 if (code == LT_EXPR
12381 && ((code0 == PLUS_EXPR && is_positive >= 0)
12382 || (code0 == MINUS_EXPR && is_positive <= 0)))
12384 if (TREE_CODE (arg01) == INTEGER_CST
12385 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12386 fold_overflow_warning (("assuming signed overflow does not "
12387 "occur when assuming that "
12388 "(X + c) < X is always false"),
12389 WARN_STRICT_OVERFLOW_ALL);
12390 return constant_boolean_node (0, type);
12393 /* Convert (X - c) <= X to true. */
12394 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12395 && code == LE_EXPR
12396 && ((code0 == MINUS_EXPR && is_positive >= 0)
12397 || (code0 == PLUS_EXPR && is_positive <= 0)))
12399 if (TREE_CODE (arg01) == INTEGER_CST
12400 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12401 fold_overflow_warning (("assuming signed overflow does not "
12402 "occur when assuming that "
12403 "(X - c) <= X is always true"),
12404 WARN_STRICT_OVERFLOW_ALL);
12405 return constant_boolean_node (1, type);
12408 /* Convert (X + c) >= X to true. */
12409 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12410 && code == GE_EXPR
12411 && ((code0 == PLUS_EXPR && is_positive >= 0)
12412 || (code0 == MINUS_EXPR && is_positive <= 0)))
12414 if (TREE_CODE (arg01) == INTEGER_CST
12415 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12416 fold_overflow_warning (("assuming signed overflow does not "
12417 "occur when assuming that "
12418 "(X + c) >= X is always true"),
12419 WARN_STRICT_OVERFLOW_ALL);
12420 return constant_boolean_node (1, type);
12423 if (TREE_CODE (arg01) == INTEGER_CST)
12425 /* Convert X + c > X and X - c < X to true for integers. */
12426 if (code == GT_EXPR
12427 && ((code0 == PLUS_EXPR && is_positive > 0)
12428 || (code0 == MINUS_EXPR && is_positive < 0)))
12430 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12431 fold_overflow_warning (("assuming signed overflow does "
12432 "not occur when assuming that "
12433 "(X + c) > X is always true"),
12434 WARN_STRICT_OVERFLOW_ALL);
12435 return constant_boolean_node (1, type);
12438 if (code == LT_EXPR
12439 && ((code0 == MINUS_EXPR && is_positive > 0)
12440 || (code0 == PLUS_EXPR && is_positive < 0)))
12442 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12443 fold_overflow_warning (("assuming signed overflow does "
12444 "not occur when assuming that "
12445 "(X - c) < X is always true"),
12446 WARN_STRICT_OVERFLOW_ALL);
12447 return constant_boolean_node (1, type);
12450 /* Convert X + c <= X and X - c >= X to false for integers. */
12451 if (code == LE_EXPR
12452 && ((code0 == PLUS_EXPR && is_positive > 0)
12453 || (code0 == MINUS_EXPR && is_positive < 0)))
12455 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12456 fold_overflow_warning (("assuming signed overflow does "
12457 "not occur when assuming that "
12458 "(X + c) <= X is always false"),
12459 WARN_STRICT_OVERFLOW_ALL);
12460 return constant_boolean_node (0, type);
12463 if (code == GE_EXPR
12464 && ((code0 == MINUS_EXPR && is_positive > 0)
12465 || (code0 == PLUS_EXPR && is_positive < 0)))
12467 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12468 fold_overflow_warning (("assuming signed overflow does "
12469 "not occur when assuming that "
12470 "(X - c) >= X is always false"),
12471 WARN_STRICT_OVERFLOW_ALL);
12472 return constant_boolean_node (0, type);
12477 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12478 This transformation affects the cases which are handled in later
12479 optimizations involving comparisons with non-negative constants. */
12480 if (TREE_CODE (arg1) == INTEGER_CST
12481 && TREE_CODE (arg0) != INTEGER_CST
12482 && tree_int_cst_sgn (arg1) > 0)
12484 if (code == GE_EXPR)
12486 arg1 = const_binop (MINUS_EXPR, arg1,
12487 build_int_cst (TREE_TYPE (arg1), 1), 0);
12488 return fold_build2 (GT_EXPR, type, arg0,
12489 fold_convert (TREE_TYPE (arg0), arg1));
12491 if (code == LT_EXPR)
12493 arg1 = const_binop (MINUS_EXPR, arg1,
12494 build_int_cst (TREE_TYPE (arg1), 1), 0);
12495 return fold_build2 (LE_EXPR, type, arg0,
12496 fold_convert (TREE_TYPE (arg0), arg1));
12500 /* Comparisons with the highest or lowest possible integer of
12501 the specified precision will have known values. */
12503 tree arg1_type = TREE_TYPE (arg1);
12504 unsigned int width = TYPE_PRECISION (arg1_type);
12506 if (TREE_CODE (arg1) == INTEGER_CST
12507 && !TREE_OVERFLOW (arg1)
12508 && width <= 2 * HOST_BITS_PER_WIDE_INT
12509 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12511 HOST_WIDE_INT signed_max_hi;
12512 unsigned HOST_WIDE_INT signed_max_lo;
12513 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12515 if (width <= HOST_BITS_PER_WIDE_INT)
12517 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12518 - 1;
12519 signed_max_hi = 0;
12520 max_hi = 0;
12522 if (TYPE_UNSIGNED (arg1_type))
12524 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12525 min_lo = 0;
12526 min_hi = 0;
12528 else
12530 max_lo = signed_max_lo;
12531 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12532 min_hi = -1;
12535 else
12537 width -= HOST_BITS_PER_WIDE_INT;
12538 signed_max_lo = -1;
12539 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12540 - 1;
12541 max_lo = -1;
12542 min_lo = 0;
12544 if (TYPE_UNSIGNED (arg1_type))
12546 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12547 min_hi = 0;
12549 else
12551 max_hi = signed_max_hi;
12552 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12556 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12557 && TREE_INT_CST_LOW (arg1) == max_lo)
12558 switch (code)
12560 case GT_EXPR:
12561 return omit_one_operand (type, integer_zero_node, arg0);
12563 case GE_EXPR:
12564 return fold_build2 (EQ_EXPR, type, op0, op1);
12566 case LE_EXPR:
12567 return omit_one_operand (type, integer_one_node, arg0);
12569 case LT_EXPR:
12570 return fold_build2 (NE_EXPR, type, op0, op1);
12572 /* The GE_EXPR and LT_EXPR cases above are not normally
12573 reached because of previous transformations. */
12575 default:
12576 break;
12578 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12579 == max_hi
12580 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12581 switch (code)
12583 case GT_EXPR:
12584 arg1 = const_binop (PLUS_EXPR, arg1,
12585 build_int_cst (TREE_TYPE (arg1), 1), 0);
12586 return fold_build2 (EQ_EXPR, type,
12587 fold_convert (TREE_TYPE (arg1), arg0),
12588 arg1);
12589 case LE_EXPR:
12590 arg1 = const_binop (PLUS_EXPR, arg1,
12591 build_int_cst (TREE_TYPE (arg1), 1), 0);
12592 return fold_build2 (NE_EXPR, type,
12593 fold_convert (TREE_TYPE (arg1), arg0),
12594 arg1);
12595 default:
12596 break;
12598 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12599 == min_hi
12600 && TREE_INT_CST_LOW (arg1) == min_lo)
12601 switch (code)
12603 case LT_EXPR:
12604 return omit_one_operand (type, integer_zero_node, arg0);
12606 case LE_EXPR:
12607 return fold_build2 (EQ_EXPR, type, op0, op1);
12609 case GE_EXPR:
12610 return omit_one_operand (type, integer_one_node, arg0);
12612 case GT_EXPR:
12613 return fold_build2 (NE_EXPR, type, op0, op1);
12615 default:
12616 break;
12618 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12619 == min_hi
12620 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12621 switch (code)
12623 case GE_EXPR:
12624 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12625 return fold_build2 (NE_EXPR, type,
12626 fold_convert (TREE_TYPE (arg1), arg0),
12627 arg1);
12628 case LT_EXPR:
12629 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12630 return fold_build2 (EQ_EXPR, type,
12631 fold_convert (TREE_TYPE (arg1), arg0),
12632 arg1);
12633 default:
12634 break;
12637 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12638 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12639 && TYPE_UNSIGNED (arg1_type)
12640 /* We will flip the signedness of the comparison operator
12641 associated with the mode of arg1, so the sign bit is
12642 specified by this mode. Check that arg1 is the signed
12643 max associated with this sign bit. */
12644 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12645 /* signed_type does not work on pointer types. */
12646 && INTEGRAL_TYPE_P (arg1_type))
12648 /* The following case also applies to X < signed_max+1
12649 and X >= signed_max+1 because previous transformations. */
12650 if (code == LE_EXPR || code == GT_EXPR)
12652 tree st;
12653 st = signed_type_for (TREE_TYPE (arg1));
12654 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12655 type, fold_convert (st, arg0),
12656 build_int_cst (st, 0));
12662 /* If we are comparing an ABS_EXPR with a constant, we can
12663 convert all the cases into explicit comparisons, but they may
12664 well not be faster than doing the ABS and one comparison.
12665 But ABS (X) <= C is a range comparison, which becomes a subtraction
12666 and a comparison, and is probably faster. */
12667 if (code == LE_EXPR
12668 && TREE_CODE (arg1) == INTEGER_CST
12669 && TREE_CODE (arg0) == ABS_EXPR
12670 && ! TREE_SIDE_EFFECTS (arg0)
12671 && (0 != (tem = negate_expr (arg1)))
12672 && TREE_CODE (tem) == INTEGER_CST
12673 && !TREE_OVERFLOW (tem))
12674 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12675 build2 (GE_EXPR, type,
12676 TREE_OPERAND (arg0, 0), tem),
12677 build2 (LE_EXPR, type,
12678 TREE_OPERAND (arg0, 0), arg1));
12680 /* Convert ABS_EXPR<x> >= 0 to true. */
12681 strict_overflow_p = false;
12682 if (code == GE_EXPR
12683 && (integer_zerop (arg1)
12684 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12685 && real_zerop (arg1)))
12686 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12688 if (strict_overflow_p)
12689 fold_overflow_warning (("assuming signed overflow does not occur "
12690 "when simplifying comparison of "
12691 "absolute value and zero"),
12692 WARN_STRICT_OVERFLOW_CONDITIONAL);
12693 return omit_one_operand (type, integer_one_node, arg0);
12696 /* Convert ABS_EXPR<x> < 0 to false. */
12697 strict_overflow_p = false;
12698 if (code == LT_EXPR
12699 && (integer_zerop (arg1) || real_zerop (arg1))
12700 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12702 if (strict_overflow_p)
12703 fold_overflow_warning (("assuming signed overflow does not occur "
12704 "when simplifying comparison of "
12705 "absolute value and zero"),
12706 WARN_STRICT_OVERFLOW_CONDITIONAL);
12707 return omit_one_operand (type, integer_zero_node, arg0);
12710 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12711 and similarly for >= into !=. */
12712 if ((code == LT_EXPR || code == GE_EXPR)
12713 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12714 && TREE_CODE (arg1) == LSHIFT_EXPR
12715 && integer_onep (TREE_OPERAND (arg1, 0)))
12716 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12717 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12718 TREE_OPERAND (arg1, 1)),
12719 build_int_cst (TREE_TYPE (arg0), 0));
12721 if ((code == LT_EXPR || code == GE_EXPR)
12722 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12723 && (TREE_CODE (arg1) == NOP_EXPR
12724 || TREE_CODE (arg1) == CONVERT_EXPR)
12725 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12726 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12727 return
12728 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12729 fold_convert (TREE_TYPE (arg0),
12730 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12731 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12732 1))),
12733 build_int_cst (TREE_TYPE (arg0), 0));
12735 return NULL_TREE;
12737 case UNORDERED_EXPR:
12738 case ORDERED_EXPR:
12739 case UNLT_EXPR:
12740 case UNLE_EXPR:
12741 case UNGT_EXPR:
12742 case UNGE_EXPR:
12743 case UNEQ_EXPR:
12744 case LTGT_EXPR:
12745 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12747 t1 = fold_relational_const (code, type, arg0, arg1);
12748 if (t1 != NULL_TREE)
12749 return t1;
12752 /* If the first operand is NaN, the result is constant. */
12753 if (TREE_CODE (arg0) == REAL_CST
12754 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12755 && (code != LTGT_EXPR || ! flag_trapping_math))
12757 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12758 ? integer_zero_node
12759 : integer_one_node;
12760 return omit_one_operand (type, t1, arg1);
12763 /* If the second operand is NaN, the result is constant. */
12764 if (TREE_CODE (arg1) == REAL_CST
12765 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12766 && (code != LTGT_EXPR || ! flag_trapping_math))
12768 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12769 ? integer_zero_node
12770 : integer_one_node;
12771 return omit_one_operand (type, t1, arg0);
12774 /* Simplify unordered comparison of something with itself. */
12775 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12776 && operand_equal_p (arg0, arg1, 0))
12777 return constant_boolean_node (1, type);
12779 if (code == LTGT_EXPR
12780 && !flag_trapping_math
12781 && operand_equal_p (arg0, arg1, 0))
12782 return constant_boolean_node (0, type);
12784 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12786 tree targ0 = strip_float_extensions (arg0);
12787 tree targ1 = strip_float_extensions (arg1);
12788 tree newtype = TREE_TYPE (targ0);
12790 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12791 newtype = TREE_TYPE (targ1);
12793 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12794 return fold_build2 (code, type, fold_convert (newtype, targ0),
12795 fold_convert (newtype, targ1));
12798 return NULL_TREE;
12800 case COMPOUND_EXPR:
12801 /* When pedantic, a compound expression can be neither an lvalue
12802 nor an integer constant expression. */
12803 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12804 return NULL_TREE;
12805 /* Don't let (0, 0) be null pointer constant. */
12806 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12807 : fold_convert (type, arg1);
12808 return pedantic_non_lvalue (tem);
12810 case COMPLEX_EXPR:
12811 if ((TREE_CODE (arg0) == REAL_CST
12812 && TREE_CODE (arg1) == REAL_CST)
12813 || (TREE_CODE (arg0) == INTEGER_CST
12814 && TREE_CODE (arg1) == INTEGER_CST))
12815 return build_complex (type, arg0, arg1);
12816 return NULL_TREE;
12818 case ASSERT_EXPR:
12819 /* An ASSERT_EXPR should never be passed to fold_binary. */
12820 gcc_unreachable ();
12822 default:
12823 return NULL_TREE;
12824 } /* switch (code) */
12827 /* Callback for walk_tree, looking for LABEL_EXPR.
12828 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12829 Do not check the sub-tree of GOTO_EXPR. */
12831 static tree
12832 contains_label_1 (tree *tp,
12833 int *walk_subtrees,
12834 void *data ATTRIBUTE_UNUSED)
12836 switch (TREE_CODE (*tp))
12838 case LABEL_EXPR:
12839 return *tp;
12840 case GOTO_EXPR:
12841 *walk_subtrees = 0;
12842 /* no break */
12843 default:
12844 return NULL_TREE;
12848 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12849 accessible from outside the sub-tree. Returns NULL_TREE if no
12850 addressable label is found. */
12852 static bool
12853 contains_label_p (tree st)
12855 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12858 /* Fold a ternary expression of code CODE and type TYPE with operands
12859 OP0, OP1, and OP2. Return the folded expression if folding is
12860 successful. Otherwise, return NULL_TREE. */
12862 tree
12863 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12865 tree tem;
12866 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12867 enum tree_code_class kind = TREE_CODE_CLASS (code);
12869 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12870 && TREE_CODE_LENGTH (code) == 3);
12872 /* Strip any conversions that don't change the mode. This is safe
12873 for every expression, except for a comparison expression because
12874 its signedness is derived from its operands. So, in the latter
12875 case, only strip conversions that don't change the signedness.
12877 Note that this is done as an internal manipulation within the
12878 constant folder, in order to find the simplest representation of
12879 the arguments so that their form can be studied. In any cases,
12880 the appropriate type conversions should be put back in the tree
12881 that will get out of the constant folder. */
12882 if (op0)
12884 arg0 = op0;
12885 STRIP_NOPS (arg0);
12888 if (op1)
12890 arg1 = op1;
12891 STRIP_NOPS (arg1);
12894 switch (code)
12896 case COMPONENT_REF:
12897 if (TREE_CODE (arg0) == CONSTRUCTOR
12898 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12900 unsigned HOST_WIDE_INT idx;
12901 tree field, value;
12902 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12903 if (field == arg1)
12904 return value;
12906 return NULL_TREE;
12908 case COND_EXPR:
12909 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12910 so all simple results must be passed through pedantic_non_lvalue. */
12911 if (TREE_CODE (arg0) == INTEGER_CST)
12913 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12914 tem = integer_zerop (arg0) ? op2 : op1;
12915 /* Only optimize constant conditions when the selected branch
12916 has the same type as the COND_EXPR. This avoids optimizing
12917 away "c ? x : throw", where the throw has a void type.
12918 Avoid throwing away that operand which contains label. */
12919 if ((!TREE_SIDE_EFFECTS (unused_op)
12920 || !contains_label_p (unused_op))
12921 && (! VOID_TYPE_P (TREE_TYPE (tem))
12922 || VOID_TYPE_P (type)))
12923 return pedantic_non_lvalue (tem);
12924 return NULL_TREE;
12926 if (operand_equal_p (arg1, op2, 0))
12927 return pedantic_omit_one_operand (type, arg1, arg0);
12929 /* If we have A op B ? A : C, we may be able to convert this to a
12930 simpler expression, depending on the operation and the values
12931 of B and C. Signed zeros prevent all of these transformations,
12932 for reasons given above each one.
12934 Also try swapping the arguments and inverting the conditional. */
12935 if (COMPARISON_CLASS_P (arg0)
12936 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12937 arg1, TREE_OPERAND (arg0, 1))
12938 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12940 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12941 if (tem)
12942 return tem;
12945 if (COMPARISON_CLASS_P (arg0)
12946 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12947 op2,
12948 TREE_OPERAND (arg0, 1))
12949 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12951 tem = fold_truth_not_expr (arg0);
12952 if (tem && COMPARISON_CLASS_P (tem))
12954 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12955 if (tem)
12956 return tem;
12960 /* If the second operand is simpler than the third, swap them
12961 since that produces better jump optimization results. */
12962 if (truth_value_p (TREE_CODE (arg0))
12963 && tree_swap_operands_p (op1, op2, false))
12965 /* See if this can be inverted. If it can't, possibly because
12966 it was a floating-point inequality comparison, don't do
12967 anything. */
12968 tem = fold_truth_not_expr (arg0);
12969 if (tem)
12970 return fold_build3 (code, type, tem, op2, op1);
12973 /* Convert A ? 1 : 0 to simply A. */
12974 if (integer_onep (op1)
12975 && integer_zerop (op2)
12976 /* If we try to convert OP0 to our type, the
12977 call to fold will try to move the conversion inside
12978 a COND, which will recurse. In that case, the COND_EXPR
12979 is probably the best choice, so leave it alone. */
12980 && type == TREE_TYPE (arg0))
12981 return pedantic_non_lvalue (arg0);
12983 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12984 over COND_EXPR in cases such as floating point comparisons. */
12985 if (integer_zerop (op1)
12986 && integer_onep (op2)
12987 && truth_value_p (TREE_CODE (arg0)))
12988 return pedantic_non_lvalue (fold_convert (type,
12989 invert_truthvalue (arg0)));
12991 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12992 if (TREE_CODE (arg0) == LT_EXPR
12993 && integer_zerop (TREE_OPERAND (arg0, 1))
12994 && integer_zerop (op2)
12995 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12997 /* sign_bit_p only checks ARG1 bits within A's precision.
12998 If <sign bit of A> has wider type than A, bits outside
12999 of A's precision in <sign bit of A> need to be checked.
13000 If they are all 0, this optimization needs to be done
13001 in unsigned A's type, if they are all 1 in signed A's type,
13002 otherwise this can't be done. */
13003 if (TYPE_PRECISION (TREE_TYPE (tem))
13004 < TYPE_PRECISION (TREE_TYPE (arg1))
13005 && TYPE_PRECISION (TREE_TYPE (tem))
13006 < TYPE_PRECISION (type))
13008 unsigned HOST_WIDE_INT mask_lo;
13009 HOST_WIDE_INT mask_hi;
13010 int inner_width, outer_width;
13011 tree tem_type;
13013 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13014 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13015 if (outer_width > TYPE_PRECISION (type))
13016 outer_width = TYPE_PRECISION (type);
13018 if (outer_width > HOST_BITS_PER_WIDE_INT)
13020 mask_hi = ((unsigned HOST_WIDE_INT) -1
13021 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13022 mask_lo = -1;
13024 else
13026 mask_hi = 0;
13027 mask_lo = ((unsigned HOST_WIDE_INT) -1
13028 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13030 if (inner_width > HOST_BITS_PER_WIDE_INT)
13032 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13033 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13034 mask_lo = 0;
13036 else
13037 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13038 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13040 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13041 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13043 tem_type = signed_type_for (TREE_TYPE (tem));
13044 tem = fold_convert (tem_type, tem);
13046 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13047 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13049 tem_type = unsigned_type_for (TREE_TYPE (tem));
13050 tem = fold_convert (tem_type, tem);
13052 else
13053 tem = NULL;
13056 if (tem)
13057 return fold_convert (type,
13058 fold_build2 (BIT_AND_EXPR,
13059 TREE_TYPE (tem), tem,
13060 fold_convert (TREE_TYPE (tem),
13061 arg1)));
13064 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13065 already handled above. */
13066 if (TREE_CODE (arg0) == BIT_AND_EXPR
13067 && integer_onep (TREE_OPERAND (arg0, 1))
13068 && integer_zerop (op2)
13069 && integer_pow2p (arg1))
13071 tree tem = TREE_OPERAND (arg0, 0);
13072 STRIP_NOPS (tem);
13073 if (TREE_CODE (tem) == RSHIFT_EXPR
13074 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13075 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13076 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13077 return fold_build2 (BIT_AND_EXPR, type,
13078 TREE_OPERAND (tem, 0), arg1);
13081 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13082 is probably obsolete because the first operand should be a
13083 truth value (that's why we have the two cases above), but let's
13084 leave it in until we can confirm this for all front-ends. */
13085 if (integer_zerop (op2)
13086 && TREE_CODE (arg0) == NE_EXPR
13087 && integer_zerop (TREE_OPERAND (arg0, 1))
13088 && integer_pow2p (arg1)
13089 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13090 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13091 arg1, OEP_ONLY_CONST))
13092 return pedantic_non_lvalue (fold_convert (type,
13093 TREE_OPERAND (arg0, 0)));
13095 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13096 if (integer_zerop (op2)
13097 && truth_value_p (TREE_CODE (arg0))
13098 && truth_value_p (TREE_CODE (arg1)))
13099 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13100 fold_convert (type, arg0),
13101 arg1);
13103 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13104 if (integer_onep (op2)
13105 && truth_value_p (TREE_CODE (arg0))
13106 && truth_value_p (TREE_CODE (arg1)))
13108 /* Only perform transformation if ARG0 is easily inverted. */
13109 tem = fold_truth_not_expr (arg0);
13110 if (tem)
13111 return fold_build2 (TRUTH_ORIF_EXPR, type,
13112 fold_convert (type, tem),
13113 arg1);
13116 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13117 if (integer_zerop (arg1)
13118 && truth_value_p (TREE_CODE (arg0))
13119 && truth_value_p (TREE_CODE (op2)))
13121 /* Only perform transformation if ARG0 is easily inverted. */
13122 tem = fold_truth_not_expr (arg0);
13123 if (tem)
13124 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13125 fold_convert (type, tem),
13126 op2);
13129 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13130 if (integer_onep (arg1)
13131 && truth_value_p (TREE_CODE (arg0))
13132 && truth_value_p (TREE_CODE (op2)))
13133 return fold_build2 (TRUTH_ORIF_EXPR, type,
13134 fold_convert (type, arg0),
13135 op2);
13137 return NULL_TREE;
13139 case CALL_EXPR:
13140 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13141 of fold_ternary on them. */
13142 gcc_unreachable ();
13144 case BIT_FIELD_REF:
13145 if ((TREE_CODE (arg0) == VECTOR_CST
13146 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13147 && type == TREE_TYPE (TREE_TYPE (arg0))
13148 && host_integerp (arg1, 1)
13149 && host_integerp (op2, 1))
13151 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13152 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13154 if (width != 0
13155 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13156 && (idx % width) == 0
13157 && (idx = idx / width)
13158 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13160 tree elements = NULL_TREE;
13162 if (TREE_CODE (arg0) == VECTOR_CST)
13163 elements = TREE_VECTOR_CST_ELTS (arg0);
13164 else
13166 unsigned HOST_WIDE_INT idx;
13167 tree value;
13169 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13170 elements = tree_cons (NULL_TREE, value, elements);
13172 while (idx-- > 0 && elements)
13173 elements = TREE_CHAIN (elements);
13174 if (elements)
13175 return TREE_VALUE (elements);
13176 else
13177 return fold_convert (type, integer_zero_node);
13180 return NULL_TREE;
13182 default:
13183 return NULL_TREE;
13184 } /* switch (code) */
13187 /* Perform constant folding and related simplification of EXPR.
13188 The related simplifications include x*1 => x, x*0 => 0, etc.,
13189 and application of the associative law.
13190 NOP_EXPR conversions may be removed freely (as long as we
13191 are careful not to change the type of the overall expression).
13192 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13193 but we can constant-fold them if they have constant operands. */
13195 #ifdef ENABLE_FOLD_CHECKING
13196 # define fold(x) fold_1 (x)
13197 static tree fold_1 (tree);
13198 static
13199 #endif
13200 tree
13201 fold (tree expr)
13203 const tree t = expr;
13204 enum tree_code code = TREE_CODE (t);
13205 enum tree_code_class kind = TREE_CODE_CLASS (code);
13206 tree tem;
13208 /* Return right away if a constant. */
13209 if (kind == tcc_constant)
13210 return t;
13212 /* CALL_EXPR-like objects with variable numbers of operands are
13213 treated specially. */
13214 if (kind == tcc_vl_exp)
13216 if (code == CALL_EXPR)
13218 tem = fold_call_expr (expr, false);
13219 return tem ? tem : expr;
13221 return expr;
13224 if (IS_EXPR_CODE_CLASS (kind)
13225 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13227 tree type = TREE_TYPE (t);
13228 tree op0, op1, op2;
13230 switch (TREE_CODE_LENGTH (code))
13232 case 1:
13233 op0 = TREE_OPERAND (t, 0);
13234 tem = fold_unary (code, type, op0);
13235 return tem ? tem : expr;
13236 case 2:
13237 op0 = TREE_OPERAND (t, 0);
13238 op1 = TREE_OPERAND (t, 1);
13239 tem = fold_binary (code, type, op0, op1);
13240 return tem ? tem : expr;
13241 case 3:
13242 op0 = TREE_OPERAND (t, 0);
13243 op1 = TREE_OPERAND (t, 1);
13244 op2 = TREE_OPERAND (t, 2);
13245 tem = fold_ternary (code, type, op0, op1, op2);
13246 return tem ? tem : expr;
13247 default:
13248 break;
13252 switch (code)
13254 case CONST_DECL:
13255 return fold (DECL_INITIAL (t));
13257 default:
13258 return t;
13259 } /* switch (code) */
13262 #ifdef ENABLE_FOLD_CHECKING
13263 #undef fold
13265 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13266 static void fold_check_failed (const_tree, const_tree);
13267 void print_fold_checksum (const_tree);
13269 /* When --enable-checking=fold, compute a digest of expr before
13270 and after actual fold call to see if fold did not accidentally
13271 change original expr. */
13273 tree
13274 fold (tree expr)
13276 tree ret;
13277 struct md5_ctx ctx;
13278 unsigned char checksum_before[16], checksum_after[16];
13279 htab_t ht;
13281 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13282 md5_init_ctx (&ctx);
13283 fold_checksum_tree (expr, &ctx, ht);
13284 md5_finish_ctx (&ctx, checksum_before);
13285 htab_empty (ht);
13287 ret = fold_1 (expr);
13289 md5_init_ctx (&ctx);
13290 fold_checksum_tree (expr, &ctx, ht);
13291 md5_finish_ctx (&ctx, checksum_after);
13292 htab_delete (ht);
13294 if (memcmp (checksum_before, checksum_after, 16))
13295 fold_check_failed (expr, ret);
13297 return ret;
13300 void
13301 print_fold_checksum (const_tree expr)
13303 struct md5_ctx ctx;
13304 unsigned char checksum[16], cnt;
13305 htab_t ht;
13307 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13308 md5_init_ctx (&ctx);
13309 fold_checksum_tree (expr, &ctx, ht);
13310 md5_finish_ctx (&ctx, checksum);
13311 htab_delete (ht);
13312 for (cnt = 0; cnt < 16; ++cnt)
13313 fprintf (stderr, "%02x", checksum[cnt]);
13314 putc ('\n', stderr);
13317 static void
13318 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13320 internal_error ("fold check: original tree changed by fold");
13323 static void
13324 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13326 const void **slot;
13327 enum tree_code code;
13328 struct tree_function_decl buf;
13329 int i, len;
13331 recursive_label:
13333 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13334 <= sizeof (struct tree_function_decl))
13335 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13336 if (expr == NULL)
13337 return;
13338 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13339 if (*slot != NULL)
13340 return;
13341 *slot = expr;
13342 code = TREE_CODE (expr);
13343 if (TREE_CODE_CLASS (code) == tcc_declaration
13344 && DECL_ASSEMBLER_NAME_SET_P (expr))
13346 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13347 memcpy ((char *) &buf, expr, tree_size (expr));
13348 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13349 expr = (tree) &buf;
13351 else if (TREE_CODE_CLASS (code) == tcc_type
13352 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13353 || TYPE_CACHED_VALUES_P (expr)
13354 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13356 /* Allow these fields to be modified. */
13357 tree tmp;
13358 memcpy ((char *) &buf, expr, tree_size (expr));
13359 expr = tmp = (tree) &buf;
13360 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13361 TYPE_POINTER_TO (tmp) = NULL;
13362 TYPE_REFERENCE_TO (tmp) = NULL;
13363 if (TYPE_CACHED_VALUES_P (tmp))
13365 TYPE_CACHED_VALUES_P (tmp) = 0;
13366 TYPE_CACHED_VALUES (tmp) = NULL;
13369 md5_process_bytes (expr, tree_size (expr), ctx);
13370 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13371 if (TREE_CODE_CLASS (code) != tcc_type
13372 && TREE_CODE_CLASS (code) != tcc_declaration
13373 && code != TREE_LIST
13374 && code != SSA_NAME)
13375 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13376 switch (TREE_CODE_CLASS (code))
13378 case tcc_constant:
13379 switch (code)
13381 case STRING_CST:
13382 md5_process_bytes (TREE_STRING_POINTER (expr),
13383 TREE_STRING_LENGTH (expr), ctx);
13384 break;
13385 case COMPLEX_CST:
13386 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13387 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13388 break;
13389 case VECTOR_CST:
13390 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13391 break;
13392 default:
13393 break;
13395 break;
13396 case tcc_exceptional:
13397 switch (code)
13399 case TREE_LIST:
13400 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13401 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13402 expr = TREE_CHAIN (expr);
13403 goto recursive_label;
13404 break;
13405 case TREE_VEC:
13406 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13407 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13408 break;
13409 default:
13410 break;
13412 break;
13413 case tcc_expression:
13414 case tcc_reference:
13415 case tcc_comparison:
13416 case tcc_unary:
13417 case tcc_binary:
13418 case tcc_statement:
13419 case tcc_vl_exp:
13420 len = TREE_OPERAND_LENGTH (expr);
13421 for (i = 0; i < len; ++i)
13422 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13423 break;
13424 case tcc_declaration:
13425 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13426 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13427 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13429 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13430 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13431 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13432 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13433 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13435 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13436 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13438 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13440 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13441 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13442 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13444 break;
13445 case tcc_type:
13446 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13447 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13448 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13449 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13450 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13451 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13452 if (INTEGRAL_TYPE_P (expr)
13453 || SCALAR_FLOAT_TYPE_P (expr))
13455 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13456 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13458 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13459 if (TREE_CODE (expr) == RECORD_TYPE
13460 || TREE_CODE (expr) == UNION_TYPE
13461 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13462 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13463 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13464 break;
13465 default:
13466 break;
13470 /* Helper function for outputting the checksum of a tree T. When
13471 debugging with gdb, you can "define mynext" to be "next" followed
13472 by "call debug_fold_checksum (op0)", then just trace down till the
13473 outputs differ. */
13475 void
13476 debug_fold_checksum (const_tree t)
13478 int i;
13479 unsigned char checksum[16];
13480 struct md5_ctx ctx;
13481 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13483 md5_init_ctx (&ctx);
13484 fold_checksum_tree (t, &ctx, ht);
13485 md5_finish_ctx (&ctx, checksum);
13486 htab_empty (ht);
13488 for (i = 0; i < 16; i++)
13489 fprintf (stderr, "%d ", checksum[i]);
13491 fprintf (stderr, "\n");
13494 #endif
13496 /* Fold a unary tree expression with code CODE of type TYPE with an
13497 operand OP0. Return a folded expression if successful. Otherwise,
13498 return a tree expression with code CODE of type TYPE with an
13499 operand OP0. */
13501 tree
13502 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13504 tree tem;
13505 #ifdef ENABLE_FOLD_CHECKING
13506 unsigned char checksum_before[16], checksum_after[16];
13507 struct md5_ctx ctx;
13508 htab_t ht;
13510 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13511 md5_init_ctx (&ctx);
13512 fold_checksum_tree (op0, &ctx, ht);
13513 md5_finish_ctx (&ctx, checksum_before);
13514 htab_empty (ht);
13515 #endif
13517 tem = fold_unary (code, type, op0);
13518 if (!tem)
13519 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13521 #ifdef ENABLE_FOLD_CHECKING
13522 md5_init_ctx (&ctx);
13523 fold_checksum_tree (op0, &ctx, ht);
13524 md5_finish_ctx (&ctx, checksum_after);
13525 htab_delete (ht);
13527 if (memcmp (checksum_before, checksum_after, 16))
13528 fold_check_failed (op0, tem);
13529 #endif
13530 return tem;
13533 /* Fold a binary tree expression with code CODE of type TYPE with
13534 operands OP0 and OP1. Return a folded expression if successful.
13535 Otherwise, return a tree expression with code CODE of type TYPE
13536 with operands OP0 and OP1. */
13538 tree
13539 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13540 MEM_STAT_DECL)
13542 tree tem;
13543 #ifdef ENABLE_FOLD_CHECKING
13544 unsigned char checksum_before_op0[16],
13545 checksum_before_op1[16],
13546 checksum_after_op0[16],
13547 checksum_after_op1[16];
13548 struct md5_ctx ctx;
13549 htab_t ht;
13551 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13552 md5_init_ctx (&ctx);
13553 fold_checksum_tree (op0, &ctx, ht);
13554 md5_finish_ctx (&ctx, checksum_before_op0);
13555 htab_empty (ht);
13557 md5_init_ctx (&ctx);
13558 fold_checksum_tree (op1, &ctx, ht);
13559 md5_finish_ctx (&ctx, checksum_before_op1);
13560 htab_empty (ht);
13561 #endif
13563 tem = fold_binary (code, type, op0, op1);
13564 if (!tem)
13565 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13567 #ifdef ENABLE_FOLD_CHECKING
13568 md5_init_ctx (&ctx);
13569 fold_checksum_tree (op0, &ctx, ht);
13570 md5_finish_ctx (&ctx, checksum_after_op0);
13571 htab_empty (ht);
13573 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13574 fold_check_failed (op0, tem);
13576 md5_init_ctx (&ctx);
13577 fold_checksum_tree (op1, &ctx, ht);
13578 md5_finish_ctx (&ctx, checksum_after_op1);
13579 htab_delete (ht);
13581 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13582 fold_check_failed (op1, tem);
13583 #endif
13584 return tem;
13587 /* Fold a ternary tree expression with code CODE of type TYPE with
13588 operands OP0, OP1, and OP2. Return a folded expression if
13589 successful. Otherwise, return a tree expression with code CODE of
13590 type TYPE with operands OP0, OP1, and OP2. */
13592 tree
13593 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13594 MEM_STAT_DECL)
13596 tree tem;
13597 #ifdef ENABLE_FOLD_CHECKING
13598 unsigned char checksum_before_op0[16],
13599 checksum_before_op1[16],
13600 checksum_before_op2[16],
13601 checksum_after_op0[16],
13602 checksum_after_op1[16],
13603 checksum_after_op2[16];
13604 struct md5_ctx ctx;
13605 htab_t ht;
13607 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13608 md5_init_ctx (&ctx);
13609 fold_checksum_tree (op0, &ctx, ht);
13610 md5_finish_ctx (&ctx, checksum_before_op0);
13611 htab_empty (ht);
13613 md5_init_ctx (&ctx);
13614 fold_checksum_tree (op1, &ctx, ht);
13615 md5_finish_ctx (&ctx, checksum_before_op1);
13616 htab_empty (ht);
13618 md5_init_ctx (&ctx);
13619 fold_checksum_tree (op2, &ctx, ht);
13620 md5_finish_ctx (&ctx, checksum_before_op2);
13621 htab_empty (ht);
13622 #endif
13624 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13625 tem = fold_ternary (code, type, op0, op1, op2);
13626 if (!tem)
13627 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13629 #ifdef ENABLE_FOLD_CHECKING
13630 md5_init_ctx (&ctx);
13631 fold_checksum_tree (op0, &ctx, ht);
13632 md5_finish_ctx (&ctx, checksum_after_op0);
13633 htab_empty (ht);
13635 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13636 fold_check_failed (op0, tem);
13638 md5_init_ctx (&ctx);
13639 fold_checksum_tree (op1, &ctx, ht);
13640 md5_finish_ctx (&ctx, checksum_after_op1);
13641 htab_empty (ht);
13643 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13644 fold_check_failed (op1, tem);
13646 md5_init_ctx (&ctx);
13647 fold_checksum_tree (op2, &ctx, ht);
13648 md5_finish_ctx (&ctx, checksum_after_op2);
13649 htab_delete (ht);
13651 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13652 fold_check_failed (op2, tem);
13653 #endif
13654 return tem;
13657 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13658 arguments in ARGARRAY, and a null static chain.
13659 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13660 of type TYPE from the given operands as constructed by build_call_array. */
13662 tree
13663 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13665 tree tem;
13666 #ifdef ENABLE_FOLD_CHECKING
13667 unsigned char checksum_before_fn[16],
13668 checksum_before_arglist[16],
13669 checksum_after_fn[16],
13670 checksum_after_arglist[16];
13671 struct md5_ctx ctx;
13672 htab_t ht;
13673 int i;
13675 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13676 md5_init_ctx (&ctx);
13677 fold_checksum_tree (fn, &ctx, ht);
13678 md5_finish_ctx (&ctx, checksum_before_fn);
13679 htab_empty (ht);
13681 md5_init_ctx (&ctx);
13682 for (i = 0; i < nargs; i++)
13683 fold_checksum_tree (argarray[i], &ctx, ht);
13684 md5_finish_ctx (&ctx, checksum_before_arglist);
13685 htab_empty (ht);
13686 #endif
13688 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13690 #ifdef ENABLE_FOLD_CHECKING
13691 md5_init_ctx (&ctx);
13692 fold_checksum_tree (fn, &ctx, ht);
13693 md5_finish_ctx (&ctx, checksum_after_fn);
13694 htab_empty (ht);
13696 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13697 fold_check_failed (fn, tem);
13699 md5_init_ctx (&ctx);
13700 for (i = 0; i < nargs; i++)
13701 fold_checksum_tree (argarray[i], &ctx, ht);
13702 md5_finish_ctx (&ctx, checksum_after_arglist);
13703 htab_delete (ht);
13705 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13706 fold_check_failed (NULL_TREE, tem);
13707 #endif
13708 return tem;
13711 /* Perform constant folding and related simplification of initializer
13712 expression EXPR. These behave identically to "fold_buildN" but ignore
13713 potential run-time traps and exceptions that fold must preserve. */
13715 #define START_FOLD_INIT \
13716 int saved_signaling_nans = flag_signaling_nans;\
13717 int saved_trapping_math = flag_trapping_math;\
13718 int saved_rounding_math = flag_rounding_math;\
13719 int saved_trapv = flag_trapv;\
13720 int saved_folding_initializer = folding_initializer;\
13721 flag_signaling_nans = 0;\
13722 flag_trapping_math = 0;\
13723 flag_rounding_math = 0;\
13724 flag_trapv = 0;\
13725 folding_initializer = 1;
13727 #define END_FOLD_INIT \
13728 flag_signaling_nans = saved_signaling_nans;\
13729 flag_trapping_math = saved_trapping_math;\
13730 flag_rounding_math = saved_rounding_math;\
13731 flag_trapv = saved_trapv;\
13732 folding_initializer = saved_folding_initializer;
13734 tree
13735 fold_build1_initializer (enum tree_code code, tree type, tree op)
13737 tree result;
13738 START_FOLD_INIT;
13740 result = fold_build1 (code, type, op);
13742 END_FOLD_INIT;
13743 return result;
13746 tree
13747 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13749 tree result;
13750 START_FOLD_INIT;
13752 result = fold_build2 (code, type, op0, op1);
13754 END_FOLD_INIT;
13755 return result;
13758 tree
13759 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13760 tree op2)
13762 tree result;
13763 START_FOLD_INIT;
13765 result = fold_build3 (code, type, op0, op1, op2);
13767 END_FOLD_INIT;
13768 return result;
13771 tree
13772 fold_build_call_array_initializer (tree type, tree fn,
13773 int nargs, tree *argarray)
13775 tree result;
13776 START_FOLD_INIT;
13778 result = fold_build_call_array (type, fn, nargs, argarray);
13780 END_FOLD_INIT;
13781 return result;
13784 #undef START_FOLD_INIT
13785 #undef END_FOLD_INIT
13787 /* Determine if first argument is a multiple of second argument. Return 0 if
13788 it is not, or we cannot easily determined it to be.
13790 An example of the sort of thing we care about (at this point; this routine
13791 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13792 fold cases do now) is discovering that
13794 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13796 is a multiple of
13798 SAVE_EXPR (J * 8)
13800 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13802 This code also handles discovering that
13804 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13806 is a multiple of 8 so we don't have to worry about dealing with a
13807 possible remainder.
13809 Note that we *look* inside a SAVE_EXPR only to determine how it was
13810 calculated; it is not safe for fold to do much of anything else with the
13811 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13812 at run time. For example, the latter example above *cannot* be implemented
13813 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13814 evaluation time of the original SAVE_EXPR is not necessarily the same at
13815 the time the new expression is evaluated. The only optimization of this
13816 sort that would be valid is changing
13818 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13820 divided by 8 to
13822 SAVE_EXPR (I) * SAVE_EXPR (J)
13824 (where the same SAVE_EXPR (J) is used in the original and the
13825 transformed version). */
13828 multiple_of_p (tree type, const_tree top, const_tree bottom)
13830 if (operand_equal_p (top, bottom, 0))
13831 return 1;
13833 if (TREE_CODE (type) != INTEGER_TYPE)
13834 return 0;
13836 switch (TREE_CODE (top))
13838 case BIT_AND_EXPR:
13839 /* Bitwise and provides a power of two multiple. If the mask is
13840 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13841 if (!integer_pow2p (bottom))
13842 return 0;
13843 /* FALLTHRU */
13845 case MULT_EXPR:
13846 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13847 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13849 case PLUS_EXPR:
13850 case MINUS_EXPR:
13851 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13852 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13854 case LSHIFT_EXPR:
13855 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13857 tree op1, t1;
13859 op1 = TREE_OPERAND (top, 1);
13860 /* const_binop may not detect overflow correctly,
13861 so check for it explicitly here. */
13862 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13863 > TREE_INT_CST_LOW (op1)
13864 && TREE_INT_CST_HIGH (op1) == 0
13865 && 0 != (t1 = fold_convert (type,
13866 const_binop (LSHIFT_EXPR,
13867 size_one_node,
13868 op1, 0)))
13869 && !TREE_OVERFLOW (t1))
13870 return multiple_of_p (type, t1, bottom);
13872 return 0;
13874 case NOP_EXPR:
13875 /* Can't handle conversions from non-integral or wider integral type. */
13876 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13877 || (TYPE_PRECISION (type)
13878 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13879 return 0;
13881 /* .. fall through ... */
13883 case SAVE_EXPR:
13884 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13886 case INTEGER_CST:
13887 if (TREE_CODE (bottom) != INTEGER_CST
13888 || integer_zerop (bottom)
13889 || (TYPE_UNSIGNED (type)
13890 && (tree_int_cst_sgn (top) < 0
13891 || tree_int_cst_sgn (bottom) < 0)))
13892 return 0;
13893 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13894 top, bottom, 0));
13896 default:
13897 return 0;
13901 /* Return true if `t' is known to be non-negative. If the return
13902 value is based on the assumption that signed overflow is undefined,
13903 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13904 *STRICT_OVERFLOW_P. */
13906 bool
13907 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13909 if (t == error_mark_node)
13910 return false;
13912 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13913 return true;
13915 switch (TREE_CODE (t))
13917 case SSA_NAME:
13918 /* Query VRP to see if it has recorded any information about
13919 the range of this object. */
13920 return ssa_name_nonnegative_p (t);
13922 case ABS_EXPR:
13923 /* We can't return 1 if flag_wrapv is set because
13924 ABS_EXPR<INT_MIN> = INT_MIN. */
13925 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13926 return true;
13927 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13929 *strict_overflow_p = true;
13930 return true;
13932 break;
13934 case INTEGER_CST:
13935 return tree_int_cst_sgn (t) >= 0;
13937 case REAL_CST:
13938 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13940 case FIXED_CST:
13941 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13943 case POINTER_PLUS_EXPR:
13944 case PLUS_EXPR:
13945 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13946 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13947 strict_overflow_p)
13948 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13949 strict_overflow_p));
13951 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13952 both unsigned and at least 2 bits shorter than the result. */
13953 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13954 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13955 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13957 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13958 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13959 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13960 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13962 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13963 TYPE_PRECISION (inner2)) + 1;
13964 return prec < TYPE_PRECISION (TREE_TYPE (t));
13967 break;
13969 case MULT_EXPR:
13970 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13972 /* x * x for floating point x is always non-negative. */
13973 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13974 return true;
13975 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13976 strict_overflow_p)
13977 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13978 strict_overflow_p));
13981 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13982 both unsigned and their total bits is shorter than the result. */
13983 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13984 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13985 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13987 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13988 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13989 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13990 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13991 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13992 < TYPE_PRECISION (TREE_TYPE (t));
13994 return false;
13996 case BIT_AND_EXPR:
13997 case MAX_EXPR:
13998 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13999 strict_overflow_p)
14000 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14001 strict_overflow_p));
14003 case BIT_IOR_EXPR:
14004 case BIT_XOR_EXPR:
14005 case MIN_EXPR:
14006 case RDIV_EXPR:
14007 case TRUNC_DIV_EXPR:
14008 case CEIL_DIV_EXPR:
14009 case FLOOR_DIV_EXPR:
14010 case ROUND_DIV_EXPR:
14011 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14012 strict_overflow_p)
14013 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14014 strict_overflow_p));
14016 case TRUNC_MOD_EXPR:
14017 case CEIL_MOD_EXPR:
14018 case FLOOR_MOD_EXPR:
14019 case ROUND_MOD_EXPR:
14020 case SAVE_EXPR:
14021 case NON_LVALUE_EXPR:
14022 case FLOAT_EXPR:
14023 case FIX_TRUNC_EXPR:
14024 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14025 strict_overflow_p);
14027 case COMPOUND_EXPR:
14028 case MODIFY_EXPR:
14029 case GIMPLE_MODIFY_STMT:
14030 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14031 strict_overflow_p);
14033 case BIND_EXPR:
14034 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14035 strict_overflow_p);
14037 case COND_EXPR:
14038 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14039 strict_overflow_p)
14040 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14041 strict_overflow_p));
14043 case NOP_EXPR:
14045 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14046 tree outer_type = TREE_TYPE (t);
14048 if (TREE_CODE (outer_type) == REAL_TYPE)
14050 if (TREE_CODE (inner_type) == REAL_TYPE)
14051 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14052 strict_overflow_p);
14053 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14055 if (TYPE_UNSIGNED (inner_type))
14056 return true;
14057 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14058 strict_overflow_p);
14061 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14063 if (TREE_CODE (inner_type) == REAL_TYPE)
14064 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
14065 strict_overflow_p);
14066 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14067 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14068 && TYPE_UNSIGNED (inner_type);
14071 break;
14073 case TARGET_EXPR:
14075 tree temp = TARGET_EXPR_SLOT (t);
14076 t = TARGET_EXPR_INITIAL (t);
14078 /* If the initializer is non-void, then it's a normal expression
14079 that will be assigned to the slot. */
14080 if (!VOID_TYPE_P (t))
14081 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14083 /* Otherwise, the initializer sets the slot in some way. One common
14084 way is an assignment statement at the end of the initializer. */
14085 while (1)
14087 if (TREE_CODE (t) == BIND_EXPR)
14088 t = expr_last (BIND_EXPR_BODY (t));
14089 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14090 || TREE_CODE (t) == TRY_CATCH_EXPR)
14091 t = expr_last (TREE_OPERAND (t, 0));
14092 else if (TREE_CODE (t) == STATEMENT_LIST)
14093 t = expr_last (t);
14094 else
14095 break;
14097 if ((TREE_CODE (t) == MODIFY_EXPR
14098 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14099 && GENERIC_TREE_OPERAND (t, 0) == temp)
14100 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14101 strict_overflow_p);
14103 return false;
14106 case CALL_EXPR:
14108 tree fndecl = get_callee_fndecl (t);
14109 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14110 switch (DECL_FUNCTION_CODE (fndecl))
14112 CASE_FLT_FN (BUILT_IN_ACOS):
14113 CASE_FLT_FN (BUILT_IN_ACOSH):
14114 CASE_FLT_FN (BUILT_IN_CABS):
14115 CASE_FLT_FN (BUILT_IN_COSH):
14116 CASE_FLT_FN (BUILT_IN_ERFC):
14117 CASE_FLT_FN (BUILT_IN_EXP):
14118 CASE_FLT_FN (BUILT_IN_EXP10):
14119 CASE_FLT_FN (BUILT_IN_EXP2):
14120 CASE_FLT_FN (BUILT_IN_FABS):
14121 CASE_FLT_FN (BUILT_IN_FDIM):
14122 CASE_FLT_FN (BUILT_IN_HYPOT):
14123 CASE_FLT_FN (BUILT_IN_POW10):
14124 CASE_INT_FN (BUILT_IN_FFS):
14125 CASE_INT_FN (BUILT_IN_PARITY):
14126 CASE_INT_FN (BUILT_IN_POPCOUNT):
14127 case BUILT_IN_BSWAP32:
14128 case BUILT_IN_BSWAP64:
14129 /* Always true. */
14130 return true;
14132 CASE_FLT_FN (BUILT_IN_SQRT):
14133 /* sqrt(-0.0) is -0.0. */
14134 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
14135 return true;
14136 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14137 strict_overflow_p);
14139 CASE_FLT_FN (BUILT_IN_ASINH):
14140 CASE_FLT_FN (BUILT_IN_ATAN):
14141 CASE_FLT_FN (BUILT_IN_ATANH):
14142 CASE_FLT_FN (BUILT_IN_CBRT):
14143 CASE_FLT_FN (BUILT_IN_CEIL):
14144 CASE_FLT_FN (BUILT_IN_ERF):
14145 CASE_FLT_FN (BUILT_IN_EXPM1):
14146 CASE_FLT_FN (BUILT_IN_FLOOR):
14147 CASE_FLT_FN (BUILT_IN_FMOD):
14148 CASE_FLT_FN (BUILT_IN_FREXP):
14149 CASE_FLT_FN (BUILT_IN_LCEIL):
14150 CASE_FLT_FN (BUILT_IN_LDEXP):
14151 CASE_FLT_FN (BUILT_IN_LFLOOR):
14152 CASE_FLT_FN (BUILT_IN_LLCEIL):
14153 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14154 CASE_FLT_FN (BUILT_IN_LLRINT):
14155 CASE_FLT_FN (BUILT_IN_LLROUND):
14156 CASE_FLT_FN (BUILT_IN_LRINT):
14157 CASE_FLT_FN (BUILT_IN_LROUND):
14158 CASE_FLT_FN (BUILT_IN_MODF):
14159 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14160 CASE_FLT_FN (BUILT_IN_RINT):
14161 CASE_FLT_FN (BUILT_IN_ROUND):
14162 CASE_FLT_FN (BUILT_IN_SCALB):
14163 CASE_FLT_FN (BUILT_IN_SCALBLN):
14164 CASE_FLT_FN (BUILT_IN_SCALBN):
14165 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14166 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14167 CASE_FLT_FN (BUILT_IN_SINH):
14168 CASE_FLT_FN (BUILT_IN_TANH):
14169 CASE_FLT_FN (BUILT_IN_TRUNC):
14170 /* True if the 1st argument is nonnegative. */
14171 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14172 strict_overflow_p);
14174 CASE_FLT_FN (BUILT_IN_FMAX):
14175 /* True if the 1st OR 2nd arguments are nonnegative. */
14176 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14177 strict_overflow_p)
14178 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14179 strict_overflow_p)));
14181 CASE_FLT_FN (BUILT_IN_FMIN):
14182 /* True if the 1st AND 2nd arguments are nonnegative. */
14183 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14184 strict_overflow_p)
14185 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14186 strict_overflow_p)));
14188 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14189 /* True if the 2nd argument is nonnegative. */
14190 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14191 strict_overflow_p);
14193 CASE_FLT_FN (BUILT_IN_POWI):
14194 /* True if the 1st argument is nonnegative or the second
14195 argument is an even integer. */
14196 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
14198 tree arg1 = CALL_EXPR_ARG (t, 1);
14199 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
14200 return true;
14202 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14203 strict_overflow_p);
14205 CASE_FLT_FN (BUILT_IN_POW):
14206 /* True if the 1st argument is nonnegative or the second
14207 argument is an even integer valued real. */
14208 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
14210 REAL_VALUE_TYPE c;
14211 HOST_WIDE_INT n;
14213 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
14214 n = real_to_integer (&c);
14215 if ((n & 1) == 0)
14217 REAL_VALUE_TYPE cint;
14218 real_from_integer (&cint, VOIDmode, n,
14219 n < 0 ? -1 : 0, 0);
14220 if (real_identical (&c, &cint))
14221 return true;
14224 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14225 strict_overflow_p);
14227 default:
14228 break;
14232 /* ... fall through ... */
14234 default:
14236 tree type = TREE_TYPE (t);
14237 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14238 && truth_value_p (TREE_CODE (t)))
14239 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14240 have a signed:1 type (where the value is -1 and 0). */
14241 return true;
14245 /* We don't know sign of `t', so be conservative and return false. */
14246 return false;
14249 /* Return true if `t' is known to be non-negative. Handle warnings
14250 about undefined signed overflow. */
14252 bool
14253 tree_expr_nonnegative_p (tree t)
14255 bool ret, strict_overflow_p;
14257 strict_overflow_p = false;
14258 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14259 if (strict_overflow_p)
14260 fold_overflow_warning (("assuming signed overflow does not occur when "
14261 "determining that expression is always "
14262 "non-negative"),
14263 WARN_STRICT_OVERFLOW_MISC);
14264 return ret;
14267 /* Return true when T is an address and is known to be nonzero.
14268 For floating point we further ensure that T is not denormal.
14269 Similar logic is present in nonzero_address in rtlanal.h.
14271 If the return value is based on the assumption that signed overflow
14272 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14273 change *STRICT_OVERFLOW_P. */
14275 bool
14276 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14278 tree type = TREE_TYPE (t);
14279 bool sub_strict_overflow_p;
14281 /* Doing something useful for floating point would need more work. */
14282 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14283 return false;
14285 switch (TREE_CODE (t))
14287 case SSA_NAME:
14288 /* Query VRP to see if it has recorded any information about
14289 the range of this object. */
14290 return ssa_name_nonzero_p (t);
14292 case ABS_EXPR:
14293 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14294 strict_overflow_p);
14296 case INTEGER_CST:
14297 return !integer_zerop (t);
14299 case POINTER_PLUS_EXPR:
14300 case PLUS_EXPR:
14301 if (TYPE_OVERFLOW_UNDEFINED (type))
14303 /* With the presence of negative values it is hard
14304 to say something. */
14305 sub_strict_overflow_p = false;
14306 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14307 &sub_strict_overflow_p)
14308 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14309 &sub_strict_overflow_p))
14310 return false;
14311 /* One of operands must be positive and the other non-negative. */
14312 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14313 overflows, on a twos-complement machine the sum of two
14314 nonnegative numbers can never be zero. */
14315 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14316 strict_overflow_p)
14317 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14318 strict_overflow_p));
14320 break;
14322 case MULT_EXPR:
14323 if (TYPE_OVERFLOW_UNDEFINED (type))
14325 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14326 strict_overflow_p)
14327 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14328 strict_overflow_p))
14330 *strict_overflow_p = true;
14331 return true;
14334 break;
14336 case NOP_EXPR:
14338 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14339 tree outer_type = TREE_TYPE (t);
14341 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14342 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14343 strict_overflow_p));
14345 break;
14347 case ADDR_EXPR:
14349 tree base = get_base_address (TREE_OPERAND (t, 0));
14351 if (!base)
14352 return false;
14354 /* Weak declarations may link to NULL. */
14355 if (VAR_OR_FUNCTION_DECL_P (base))
14356 return !DECL_WEAK (base);
14358 /* Constants are never weak. */
14359 if (CONSTANT_CLASS_P (base))
14360 return true;
14362 return false;
14365 case COND_EXPR:
14366 sub_strict_overflow_p = false;
14367 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14368 &sub_strict_overflow_p)
14369 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14370 &sub_strict_overflow_p))
14372 if (sub_strict_overflow_p)
14373 *strict_overflow_p = true;
14374 return true;
14376 break;
14378 case MIN_EXPR:
14379 sub_strict_overflow_p = false;
14380 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14381 &sub_strict_overflow_p)
14382 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14383 &sub_strict_overflow_p))
14385 if (sub_strict_overflow_p)
14386 *strict_overflow_p = true;
14388 break;
14390 case MAX_EXPR:
14391 sub_strict_overflow_p = false;
14392 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14393 &sub_strict_overflow_p))
14395 if (sub_strict_overflow_p)
14396 *strict_overflow_p = true;
14398 /* When both operands are nonzero, then MAX must be too. */
14399 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14400 strict_overflow_p))
14401 return true;
14403 /* MAX where operand 0 is positive is positive. */
14404 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14405 strict_overflow_p);
14407 /* MAX where operand 1 is positive is positive. */
14408 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14409 &sub_strict_overflow_p)
14410 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14411 &sub_strict_overflow_p))
14413 if (sub_strict_overflow_p)
14414 *strict_overflow_p = true;
14415 return true;
14417 break;
14419 case COMPOUND_EXPR:
14420 case MODIFY_EXPR:
14421 case GIMPLE_MODIFY_STMT:
14422 case BIND_EXPR:
14423 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14424 strict_overflow_p);
14426 case SAVE_EXPR:
14427 case NON_LVALUE_EXPR:
14428 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14429 strict_overflow_p);
14431 case BIT_IOR_EXPR:
14432 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14433 strict_overflow_p)
14434 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14435 strict_overflow_p));
14437 case CALL_EXPR:
14438 return alloca_call_p (t);
14440 default:
14441 break;
14443 return false;
14446 /* Return true when T is an address and is known to be nonzero.
14447 Handle warnings about undefined signed overflow. */
14449 bool
14450 tree_expr_nonzero_p (tree t)
14452 bool ret, strict_overflow_p;
14454 strict_overflow_p = false;
14455 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14456 if (strict_overflow_p)
14457 fold_overflow_warning (("assuming signed overflow does not occur when "
14458 "determining that expression is always "
14459 "non-zero"),
14460 WARN_STRICT_OVERFLOW_MISC);
14461 return ret;
14464 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14465 attempt to fold the expression to a constant without modifying TYPE,
14466 OP0 or OP1.
14468 If the expression could be simplified to a constant, then return
14469 the constant. If the expression would not be simplified to a
14470 constant, then return NULL_TREE. */
14472 tree
14473 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14475 tree tem = fold_binary (code, type, op0, op1);
14476 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14479 /* Given the components of a unary expression CODE, TYPE and OP0,
14480 attempt to fold the expression to a constant without modifying
14481 TYPE or OP0.
14483 If the expression could be simplified to a constant, then return
14484 the constant. If the expression would not be simplified to a
14485 constant, then return NULL_TREE. */
14487 tree
14488 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14490 tree tem = fold_unary (code, type, op0);
14491 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14494 /* If EXP represents referencing an element in a constant string
14495 (either via pointer arithmetic or array indexing), return the
14496 tree representing the value accessed, otherwise return NULL. */
14498 tree
14499 fold_read_from_constant_string (tree exp)
14501 if ((TREE_CODE (exp) == INDIRECT_REF
14502 || TREE_CODE (exp) == ARRAY_REF)
14503 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14505 tree exp1 = TREE_OPERAND (exp, 0);
14506 tree index;
14507 tree string;
14509 if (TREE_CODE (exp) == INDIRECT_REF)
14510 string = string_constant (exp1, &index);
14511 else
14513 tree low_bound = array_ref_low_bound (exp);
14514 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14516 /* Optimize the special-case of a zero lower bound.
14518 We convert the low_bound to sizetype to avoid some problems
14519 with constant folding. (E.g. suppose the lower bound is 1,
14520 and its mode is QI. Without the conversion,l (ARRAY
14521 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14522 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14523 if (! integer_zerop (low_bound))
14524 index = size_diffop (index, fold_convert (sizetype, low_bound));
14526 string = exp1;
14529 if (string
14530 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14531 && TREE_CODE (string) == STRING_CST
14532 && TREE_CODE (index) == INTEGER_CST
14533 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14534 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14535 == MODE_INT)
14536 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14537 return build_int_cst_type (TREE_TYPE (exp),
14538 (TREE_STRING_POINTER (string)
14539 [TREE_INT_CST_LOW (index)]));
14541 return NULL;
14544 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14545 an integer constant, real, or fixed-point constant.
14547 TYPE is the type of the result. */
14549 static tree
14550 fold_negate_const (tree arg0, tree type)
14552 tree t = NULL_TREE;
14554 switch (TREE_CODE (arg0))
14556 case INTEGER_CST:
14558 unsigned HOST_WIDE_INT low;
14559 HOST_WIDE_INT high;
14560 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14561 TREE_INT_CST_HIGH (arg0),
14562 &low, &high);
14563 t = force_fit_type_double (type, low, high, 1,
14564 (overflow | TREE_OVERFLOW (arg0))
14565 && !TYPE_UNSIGNED (type));
14566 break;
14569 case REAL_CST:
14570 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14571 break;
14573 case FIXED_CST:
14575 FIXED_VALUE_TYPE f;
14576 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14577 &(TREE_FIXED_CST (arg0)), NULL,
14578 TYPE_SATURATING (type));
14579 t = build_fixed (type, f);
14580 /* Propagate overflow flags. */
14581 if (overflow_p | TREE_OVERFLOW (arg0))
14583 TREE_OVERFLOW (t) = 1;
14584 TREE_CONSTANT_OVERFLOW (t) = 1;
14586 else if (TREE_CONSTANT_OVERFLOW (arg0))
14587 TREE_CONSTANT_OVERFLOW (t) = 1;
14588 break;
14591 default:
14592 gcc_unreachable ();
14595 return t;
14598 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14599 an integer constant or real constant.
14601 TYPE is the type of the result. */
14603 tree
14604 fold_abs_const (tree arg0, tree type)
14606 tree t = NULL_TREE;
14608 switch (TREE_CODE (arg0))
14610 case INTEGER_CST:
14611 /* If the value is unsigned, then the absolute value is
14612 the same as the ordinary value. */
14613 if (TYPE_UNSIGNED (type))
14614 t = arg0;
14615 /* Similarly, if the value is non-negative. */
14616 else if (INT_CST_LT (integer_minus_one_node, arg0))
14617 t = arg0;
14618 /* If the value is negative, then the absolute value is
14619 its negation. */
14620 else
14622 unsigned HOST_WIDE_INT low;
14623 HOST_WIDE_INT high;
14624 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14625 TREE_INT_CST_HIGH (arg0),
14626 &low, &high);
14627 t = force_fit_type_double (type, low, high, -1,
14628 overflow | TREE_OVERFLOW (arg0));
14630 break;
14632 case REAL_CST:
14633 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14634 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14635 else
14636 t = arg0;
14637 break;
14639 default:
14640 gcc_unreachable ();
14643 return t;
14646 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14647 constant. TYPE is the type of the result. */
14649 static tree
14650 fold_not_const (tree arg0, tree type)
14652 tree t = NULL_TREE;
14654 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14656 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14657 ~TREE_INT_CST_HIGH (arg0), 0,
14658 TREE_OVERFLOW (arg0));
14660 return t;
14663 /* Given CODE, a relational operator, the target type, TYPE and two
14664 constant operands OP0 and OP1, return the result of the
14665 relational operation. If the result is not a compile time
14666 constant, then return NULL_TREE. */
14668 static tree
14669 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14671 int result, invert;
14673 /* From here on, the only cases we handle are when the result is
14674 known to be a constant. */
14676 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14678 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14679 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14681 /* Handle the cases where either operand is a NaN. */
14682 if (real_isnan (c0) || real_isnan (c1))
14684 switch (code)
14686 case EQ_EXPR:
14687 case ORDERED_EXPR:
14688 result = 0;
14689 break;
14691 case NE_EXPR:
14692 case UNORDERED_EXPR:
14693 case UNLT_EXPR:
14694 case UNLE_EXPR:
14695 case UNGT_EXPR:
14696 case UNGE_EXPR:
14697 case UNEQ_EXPR:
14698 result = 1;
14699 break;
14701 case LT_EXPR:
14702 case LE_EXPR:
14703 case GT_EXPR:
14704 case GE_EXPR:
14705 case LTGT_EXPR:
14706 if (flag_trapping_math)
14707 return NULL_TREE;
14708 result = 0;
14709 break;
14711 default:
14712 gcc_unreachable ();
14715 return constant_boolean_node (result, type);
14718 return constant_boolean_node (real_compare (code, c0, c1), type);
14721 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14723 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14724 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14725 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14728 /* Handle equality/inequality of complex constants. */
14729 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14731 tree rcond = fold_relational_const (code, type,
14732 TREE_REALPART (op0),
14733 TREE_REALPART (op1));
14734 tree icond = fold_relational_const (code, type,
14735 TREE_IMAGPART (op0),
14736 TREE_IMAGPART (op1));
14737 if (code == EQ_EXPR)
14738 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14739 else if (code == NE_EXPR)
14740 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14741 else
14742 return NULL_TREE;
14745 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14747 To compute GT, swap the arguments and do LT.
14748 To compute GE, do LT and invert the result.
14749 To compute LE, swap the arguments, do LT and invert the result.
14750 To compute NE, do EQ and invert the result.
14752 Therefore, the code below must handle only EQ and LT. */
14754 if (code == LE_EXPR || code == GT_EXPR)
14756 tree tem = op0;
14757 op0 = op1;
14758 op1 = tem;
14759 code = swap_tree_comparison (code);
14762 /* Note that it is safe to invert for real values here because we
14763 have already handled the one case that it matters. */
14765 invert = 0;
14766 if (code == NE_EXPR || code == GE_EXPR)
14768 invert = 1;
14769 code = invert_tree_comparison (code, false);
14772 /* Compute a result for LT or EQ if args permit;
14773 Otherwise return T. */
14774 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14776 if (code == EQ_EXPR)
14777 result = tree_int_cst_equal (op0, op1);
14778 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14779 result = INT_CST_LT_UNSIGNED (op0, op1);
14780 else
14781 result = INT_CST_LT (op0, op1);
14783 else
14784 return NULL_TREE;
14786 if (invert)
14787 result ^= 1;
14788 return constant_boolean_node (result, type);
14791 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14792 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14793 itself. */
14795 tree
14796 fold_build_cleanup_point_expr (tree type, tree expr)
14798 /* If the expression does not have side effects then we don't have to wrap
14799 it with a cleanup point expression. */
14800 if (!TREE_SIDE_EFFECTS (expr))
14801 return expr;
14803 /* If the expression is a return, check to see if the expression inside the
14804 return has no side effects or the right hand side of the modify expression
14805 inside the return. If either don't have side effects set we don't need to
14806 wrap the expression in a cleanup point expression. Note we don't check the
14807 left hand side of the modify because it should always be a return decl. */
14808 if (TREE_CODE (expr) == RETURN_EXPR)
14810 tree op = TREE_OPERAND (expr, 0);
14811 if (!op || !TREE_SIDE_EFFECTS (op))
14812 return expr;
14813 op = TREE_OPERAND (op, 1);
14814 if (!TREE_SIDE_EFFECTS (op))
14815 return expr;
14818 return build1 (CLEANUP_POINT_EXPR, type, expr);
14821 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14822 of an indirection through OP0, or NULL_TREE if no simplification is
14823 possible. */
14825 tree
14826 fold_indirect_ref_1 (tree type, tree op0)
14828 tree sub = op0;
14829 tree subtype;
14831 STRIP_NOPS (sub);
14832 subtype = TREE_TYPE (sub);
14833 if (!POINTER_TYPE_P (subtype))
14834 return NULL_TREE;
14836 if (TREE_CODE (sub) == ADDR_EXPR)
14838 tree op = TREE_OPERAND (sub, 0);
14839 tree optype = TREE_TYPE (op);
14840 /* *&CONST_DECL -> to the value of the const decl. */
14841 if (TREE_CODE (op) == CONST_DECL)
14842 return DECL_INITIAL (op);
14843 /* *&p => p; make sure to handle *&"str"[cst] here. */
14844 if (type == optype)
14846 tree fop = fold_read_from_constant_string (op);
14847 if (fop)
14848 return fop;
14849 else
14850 return op;
14852 /* *(foo *)&fooarray => fooarray[0] */
14853 else if (TREE_CODE (optype) == ARRAY_TYPE
14854 && type == TREE_TYPE (optype))
14856 tree type_domain = TYPE_DOMAIN (optype);
14857 tree min_val = size_zero_node;
14858 if (type_domain && TYPE_MIN_VALUE (type_domain))
14859 min_val = TYPE_MIN_VALUE (type_domain);
14860 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14862 /* *(foo *)&complexfoo => __real__ complexfoo */
14863 else if (TREE_CODE (optype) == COMPLEX_TYPE
14864 && type == TREE_TYPE (optype))
14865 return fold_build1 (REALPART_EXPR, type, op);
14866 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14867 else if (TREE_CODE (optype) == VECTOR_TYPE
14868 && type == TREE_TYPE (optype))
14870 tree part_width = TYPE_SIZE (type);
14871 tree index = bitsize_int (0);
14872 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14876 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14877 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14878 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14880 tree op00 = TREE_OPERAND (sub, 0);
14881 tree op01 = TREE_OPERAND (sub, 1);
14882 tree op00type;
14884 STRIP_NOPS (op00);
14885 op00type = TREE_TYPE (op00);
14886 if (TREE_CODE (op00) == ADDR_EXPR
14887 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14888 && type == TREE_TYPE (TREE_TYPE (op00type)))
14890 tree size = TYPE_SIZE_UNIT (type);
14891 if (tree_int_cst_equal (size, op01))
14892 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14896 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14897 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14898 && type == TREE_TYPE (TREE_TYPE (subtype)))
14900 tree type_domain;
14901 tree min_val = size_zero_node;
14902 sub = build_fold_indirect_ref (sub);
14903 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14904 if (type_domain && TYPE_MIN_VALUE (type_domain))
14905 min_val = TYPE_MIN_VALUE (type_domain);
14906 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14909 return NULL_TREE;
14912 /* Builds an expression for an indirection through T, simplifying some
14913 cases. */
14915 tree
14916 build_fold_indirect_ref (tree t)
14918 tree type = TREE_TYPE (TREE_TYPE (t));
14919 tree sub = fold_indirect_ref_1 (type, t);
14921 if (sub)
14922 return sub;
14923 else
14924 return build1 (INDIRECT_REF, type, t);
14927 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14929 tree
14930 fold_indirect_ref (tree t)
14932 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14934 if (sub)
14935 return sub;
14936 else
14937 return t;
14940 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14941 whose result is ignored. The type of the returned tree need not be
14942 the same as the original expression. */
14944 tree
14945 fold_ignored_result (tree t)
14947 if (!TREE_SIDE_EFFECTS (t))
14948 return integer_zero_node;
14950 for (;;)
14951 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14953 case tcc_unary:
14954 t = TREE_OPERAND (t, 0);
14955 break;
14957 case tcc_binary:
14958 case tcc_comparison:
14959 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14960 t = TREE_OPERAND (t, 0);
14961 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14962 t = TREE_OPERAND (t, 1);
14963 else
14964 return t;
14965 break;
14967 case tcc_expression:
14968 switch (TREE_CODE (t))
14970 case COMPOUND_EXPR:
14971 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14972 return t;
14973 t = TREE_OPERAND (t, 0);
14974 break;
14976 case COND_EXPR:
14977 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14978 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14979 return t;
14980 t = TREE_OPERAND (t, 0);
14981 break;
14983 default:
14984 return t;
14986 break;
14988 default:
14989 return t;
14993 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14994 This can only be applied to objects of a sizetype. */
14996 tree
14997 round_up (tree value, int divisor)
14999 tree div = NULL_TREE;
15001 gcc_assert (divisor > 0);
15002 if (divisor == 1)
15003 return value;
15005 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15006 have to do anything. Only do this when we are not given a const,
15007 because in that case, this check is more expensive than just
15008 doing it. */
15009 if (TREE_CODE (value) != INTEGER_CST)
15011 div = build_int_cst (TREE_TYPE (value), divisor);
15013 if (multiple_of_p (TREE_TYPE (value), value, div))
15014 return value;
15017 /* If divisor is a power of two, simplify this to bit manipulation. */
15018 if (divisor == (divisor & -divisor))
15020 if (TREE_CODE (value) == INTEGER_CST)
15022 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15023 unsigned HOST_WIDE_INT high;
15024 bool overflow_p;
15026 if ((low & (divisor - 1)) == 0)
15027 return value;
15029 overflow_p = TREE_OVERFLOW (value);
15030 high = TREE_INT_CST_HIGH (value);
15031 low &= ~(divisor - 1);
15032 low += divisor;
15033 if (low == 0)
15035 high++;
15036 if (high == 0)
15037 overflow_p = true;
15040 return force_fit_type_double (TREE_TYPE (value), low, high,
15041 -1, overflow_p);
15043 else
15045 tree t;
15047 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15048 value = size_binop (PLUS_EXPR, value, t);
15049 t = build_int_cst (TREE_TYPE (value), -divisor);
15050 value = size_binop (BIT_AND_EXPR, value, t);
15053 else
15055 if (!div)
15056 div = build_int_cst (TREE_TYPE (value), divisor);
15057 value = size_binop (CEIL_DIV_EXPR, value, div);
15058 value = size_binop (MULT_EXPR, value, div);
15061 return value;
15064 /* Likewise, but round down. */
15066 tree
15067 round_down (tree value, int divisor)
15069 tree div = NULL_TREE;
15071 gcc_assert (divisor > 0);
15072 if (divisor == 1)
15073 return value;
15075 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15076 have to do anything. Only do this when we are not given a const,
15077 because in that case, this check is more expensive than just
15078 doing it. */
15079 if (TREE_CODE (value) != INTEGER_CST)
15081 div = build_int_cst (TREE_TYPE (value), divisor);
15083 if (multiple_of_p (TREE_TYPE (value), value, div))
15084 return value;
15087 /* If divisor is a power of two, simplify this to bit manipulation. */
15088 if (divisor == (divisor & -divisor))
15090 tree t;
15092 t = build_int_cst (TREE_TYPE (value), -divisor);
15093 value = size_binop (BIT_AND_EXPR, value, t);
15095 else
15097 if (!div)
15098 div = build_int_cst (TREE_TYPE (value), divisor);
15099 value = size_binop (FLOOR_DIV_EXPR, value, div);
15100 value = size_binop (MULT_EXPR, value, div);
15103 return value;
15106 /* Returns the pointer to the base of the object addressed by EXP and
15107 extracts the information about the offset of the access, storing it
15108 to PBITPOS and POFFSET. */
15110 static tree
15111 split_address_to_core_and_offset (tree exp,
15112 HOST_WIDE_INT *pbitpos, tree *poffset)
15114 tree core;
15115 enum machine_mode mode;
15116 int unsignedp, volatilep;
15117 HOST_WIDE_INT bitsize;
15119 if (TREE_CODE (exp) == ADDR_EXPR)
15121 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15122 poffset, &mode, &unsignedp, &volatilep,
15123 false);
15124 core = fold_addr_expr (core);
15126 else
15128 core = exp;
15129 *pbitpos = 0;
15130 *poffset = NULL_TREE;
15133 return core;
15136 /* Returns true if addresses of E1 and E2 differ by a constant, false
15137 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15139 bool
15140 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15142 tree core1, core2;
15143 HOST_WIDE_INT bitpos1, bitpos2;
15144 tree toffset1, toffset2, tdiff, type;
15146 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15147 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15149 if (bitpos1 % BITS_PER_UNIT != 0
15150 || bitpos2 % BITS_PER_UNIT != 0
15151 || !operand_equal_p (core1, core2, 0))
15152 return false;
15154 if (toffset1 && toffset2)
15156 type = TREE_TYPE (toffset1);
15157 if (type != TREE_TYPE (toffset2))
15158 toffset2 = fold_convert (type, toffset2);
15160 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15161 if (!cst_and_fits_in_hwi (tdiff))
15162 return false;
15164 *diff = int_cst_value (tdiff);
15166 else if (toffset1 || toffset2)
15168 /* If only one of the offsets is non-constant, the difference cannot
15169 be a constant. */
15170 return false;
15172 else
15173 *diff = 0;
15175 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15176 return true;
15179 /* Simplify the floating point expression EXP when the sign of the
15180 result is not significant. Return NULL_TREE if no simplification
15181 is possible. */
15183 tree
15184 fold_strip_sign_ops (tree exp)
15186 tree arg0, arg1;
15188 switch (TREE_CODE (exp))
15190 case ABS_EXPR:
15191 case NEGATE_EXPR:
15192 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15193 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15195 case MULT_EXPR:
15196 case RDIV_EXPR:
15197 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15198 return NULL_TREE;
15199 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15200 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15201 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15202 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15203 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15204 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15205 break;
15207 case COMPOUND_EXPR:
15208 arg0 = TREE_OPERAND (exp, 0);
15209 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15210 if (arg1)
15211 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15212 break;
15214 case COND_EXPR:
15215 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15216 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15217 if (arg0 || arg1)
15218 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15219 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15220 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15221 break;
15223 case CALL_EXPR:
15225 const enum built_in_function fcode = builtin_mathfn_code (exp);
15226 switch (fcode)
15228 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15229 /* Strip copysign function call, return the 1st argument. */
15230 arg0 = CALL_EXPR_ARG (exp, 0);
15231 arg1 = CALL_EXPR_ARG (exp, 1);
15232 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15234 default:
15235 /* Strip sign ops from the argument of "odd" math functions. */
15236 if (negate_mathfn_p (fcode))
15238 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15239 if (arg0)
15240 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15242 break;
15245 break;
15247 default:
15248 break;
15250 return NULL_TREE;