* charset.c (convert_using_iconv): Close out any shift states,
[official-gcc.git] / gcc / fold-const.c
blob8519e6831f7588608bb1239dda24e4bff0150f01
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (const_tree, int);
118 static tree sign_bit_p (tree, const_tree);
119 static int simple_operand_p (const_tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (const_tree, const_tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
151 addition.
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 sign. */
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 #define LOWPART(x) \
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
173 static void
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 static void
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 HOST_WIDE_INT *hi)
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
205 unsigned int prec;
206 int sign_extended_type;
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = POINTER_SIZE;
211 else
212 prec = TYPE_PRECISION (type);
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 else
226 h1 = 0;
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 else if (prec == HOST_BITS_PER_WIDE_INT)
245 if ((HOST_WIDE_INT)l1 < 0)
246 h1 = -1;
248 else
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 h1 = -1;
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 *lv = l1;
259 *hv = h1;
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
280 tree
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
283 bool overflowed)
285 int sign_extended_type;
286 bool overflow;
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
293 overflow = fit_double_type (low, high, &low, &high, type);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
298 if (overflowed
299 || overflowable < 0
300 || (overflowable > 0 && sign_extended_type))
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
307 return t;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 bool unsigned_p)
327 unsigned HOST_WIDE_INT l;
328 HOST_WIDE_INT h;
330 l = l1 + l2;
331 h = h1 + h2 + (l < l1);
333 *lv = l;
334 *hv = h;
336 if (unsigned_p)
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 else
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
351 if (l1 == 0)
353 *lv = 0;
354 *hv = - h1;
355 return (*hv & h1) < 0;
357 else
359 *lv = -l1;
360 *hv = ~h1;
361 return 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 bool unsigned_p)
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
381 int i, j, k;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
388 memset (prod, 0, sizeof prod);
390 for (i = 0; i < 4; i++)
392 carry = 0;
393 for (j = 0; j < 4; j++)
395 k = i + j;
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 carry += prod[k];
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
403 prod[i + 4] = carry;
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
409 /* Unsigned overflow is immediate. */
410 if (unsigned_p)
411 return (toplow | tophigh) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
415 if (h1 < 0)
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 if (h2 < 0)
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 void
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
439 unsigned HOST_WIDE_INT signmask;
441 if (count < 0)
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 return;
447 if (SHIFT_COUNT_TRUNCATED)
448 count %= prec;
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
454 *hv = 0;
455 *lv = 0;
457 else if (count >= HOST_BITS_PER_WIDE_INT)
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
460 *lv = 0;
462 else
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 *lv = l1 << count;
469 /* Sign extend all bits that are beyond the precision. */
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
483 else
485 *hv = signmask;
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 void
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 int arith)
502 unsigned HOST_WIDE_INT signmask;
504 signmask = (arith
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 : 0);
508 if (SHIFT_COUNT_TRUNCATED)
509 count %= prec;
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
515 *hv = 0;
516 *lv = 0;
518 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *hv = 0;
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
523 else
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 *lv = ((l1 >> count)
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count >= (HOST_WIDE_INT)prec)
534 *hv = signmask;
535 *lv = signmask;
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
544 else
546 *hv = signmask;
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 void
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
565 count %= prec;
566 if (count < 0)
567 count += prec;
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
571 *lv = s1l | s2l;
572 *hv = s1h | s2h;
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 void
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
587 count %= prec;
588 if (count < 0)
589 count += prec;
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
593 *lv = s1l | s2l;
594 *hv = s1h | s2h;
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 or EXACT_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT *hrem)
616 int quo_neg = 0;
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
619 int i, j;
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
626 int overflow = 0;
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
632 if (!uns)
634 if (hnum < 0)
636 quo_neg = ~ quo_neg;
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
640 overflow = 1;
642 if (hden < 0)
644 quo_neg = ~ quo_neg;
645 neg_double (lden, hden, &lden, &hden);
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
651 *hquo = *hrem = 0;
652 /* This unsigned division rounds toward zero. */
653 *lquo = lnum / lden;
654 goto finish_up;
657 if (hnum == 0)
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
660 *hquo = *lquo = 0;
661 *hrem = hnum;
662 *lrem = lnum;
663 goto finish_up;
666 memset (quo, 0, sizeof quo);
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
682 carry = work % lden;
685 else
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
694 if (den[i] != 0)
696 den_hi_sig = i;
697 break;
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale = BASE / (den[den_hi_sig] + 1);
704 if (scale > 1)
705 { /* scale divisor and dividend */
706 carry = 0;
707 for (i = 0; i <= 4 - 1; i++)
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
714 num[4] = carry;
715 carry = 0;
716 for (i = 0; i <= 4 - 1; i++)
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
725 num_hi_sig = 4;
727 /* Main loop */
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
739 else
740 quo_est = BASE - 1;
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
744 if (tmp < BASE
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
747 quo_est--;
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
753 carry = 0;
754 for (j = 0; j <= den_hi_sig; j++)
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 quo_est--;
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
776 num [num_hi_sig] += carry;
779 /* Store the quotient digit. */
780 quo[i] = quo_est;
784 decode (quo, lquo, hquo);
786 finish_up:
787 /* If result is negative, make it so. */
788 if (quo_neg)
789 neg_double (*lquo, *hquo, lquo, hquo);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
796 switch (code)
798 case TRUNC_DIV_EXPR:
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 return overflow;
803 case FLOOR_DIV_EXPR:
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 /* quo = quo - 1; */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
809 lquo, hquo);
811 else
812 return overflow;
813 break;
815 case CEIL_DIV_EXPR:
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
822 else
823 return overflow;
824 break;
826 case ROUND_DIV_EXPR:
827 case ROUND_MOD_EXPR: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
834 /* Get absolute values. */
835 if (*hrem < 0)
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 if (hden < 0)
838 neg_double (lden, hden, &labs_den, &habs_den);
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, &ltwice, &htwice);
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den < ltwice)))
850 if (*hquo < 0)
851 /* quo = quo - 1; */
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 else
855 /* quo = quo + 1; */
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
857 lquo, hquo);
859 else
860 return overflow;
862 break;
864 default:
865 gcc_unreachable ();
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 return overflow;
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
879 static tree
880 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
895 uns = false;
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
898 type);
900 else
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
908 return NULL_TREE;
910 return build_int_cst_wide (type, quol, quoh);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
920 used. */
922 static int fold_deferring_overflow_warnings;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
939 void
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
952 deferred code. */
954 void
955 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
957 const char *warnmsg;
958 location_t locus;
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
964 if (fold_deferred_overflow_warning != NULL
965 && code != 0
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
968 return;
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
974 if (!issue || warnmsg == NULL)
975 return;
977 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
978 return;
980 /* Use the smallest code level when deciding to issue the
981 warning. */
982 if (code == 0 || code > (int) fold_deferred_overflow_code)
983 code = fold_deferred_overflow_code;
985 if (!issue_strict_overflow_warning (code))
986 return;
988 if (stmt == NULL_TREE || !expr_has_location (stmt))
989 locus = input_location;
990 else
991 locus = expr_location (stmt);
992 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
995 /* Stop deferring overflow warnings, ignoring any deferred
996 warnings. */
998 void
999 fold_undefer_and_ignore_overflow_warnings (void)
1001 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1004 /* Whether we are deferring overflow warnings. */
1006 bool
1007 fold_deferring_overflow_warnings_p (void)
1009 return fold_deferring_overflow_warnings > 0;
1012 /* This is called when we fold something based on the fact that signed
1013 overflow is undefined. */
1015 static void
1016 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1018 gcc_assert (!flag_wrapv && !flag_trapv);
1019 if (fold_deferring_overflow_warnings > 0)
1021 if (fold_deferred_overflow_warning == NULL
1022 || wc < fold_deferred_overflow_code)
1024 fold_deferred_overflow_warning = gmsgid;
1025 fold_deferred_overflow_code = wc;
1028 else if (issue_strict_overflow_warning (wc))
1029 warning (OPT_Wstrict_overflow, gmsgid);
1032 /* Return true if the built-in mathematical function specified by CODE
1033 is odd, i.e. -f(x) == f(-x). */
1035 static bool
1036 negate_mathfn_p (enum built_in_function code)
1038 switch (code)
1040 CASE_FLT_FN (BUILT_IN_ASIN):
1041 CASE_FLT_FN (BUILT_IN_ASINH):
1042 CASE_FLT_FN (BUILT_IN_ATAN):
1043 CASE_FLT_FN (BUILT_IN_ATANH):
1044 CASE_FLT_FN (BUILT_IN_CASIN):
1045 CASE_FLT_FN (BUILT_IN_CASINH):
1046 CASE_FLT_FN (BUILT_IN_CATAN):
1047 CASE_FLT_FN (BUILT_IN_CATANH):
1048 CASE_FLT_FN (BUILT_IN_CBRT):
1049 CASE_FLT_FN (BUILT_IN_CPROJ):
1050 CASE_FLT_FN (BUILT_IN_CSIN):
1051 CASE_FLT_FN (BUILT_IN_CSINH):
1052 CASE_FLT_FN (BUILT_IN_CTAN):
1053 CASE_FLT_FN (BUILT_IN_CTANH):
1054 CASE_FLT_FN (BUILT_IN_ERF):
1055 CASE_FLT_FN (BUILT_IN_LLROUND):
1056 CASE_FLT_FN (BUILT_IN_LROUND):
1057 CASE_FLT_FN (BUILT_IN_ROUND):
1058 CASE_FLT_FN (BUILT_IN_SIN):
1059 CASE_FLT_FN (BUILT_IN_SINH):
1060 CASE_FLT_FN (BUILT_IN_TAN):
1061 CASE_FLT_FN (BUILT_IN_TANH):
1062 CASE_FLT_FN (BUILT_IN_TRUNC):
1063 return true;
1065 CASE_FLT_FN (BUILT_IN_LLRINT):
1066 CASE_FLT_FN (BUILT_IN_LRINT):
1067 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1068 CASE_FLT_FN (BUILT_IN_RINT):
1069 return !flag_rounding_math;
1071 default:
1072 break;
1074 return false;
1077 /* Check whether we may negate an integer constant T without causing
1078 overflow. */
1080 bool
1081 may_negate_without_overflow_p (const_tree t)
1083 unsigned HOST_WIDE_INT val;
1084 unsigned int prec;
1085 tree type;
1087 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1089 type = TREE_TYPE (t);
1090 if (TYPE_UNSIGNED (type))
1091 return false;
1093 prec = TYPE_PRECISION (type);
1094 if (prec > HOST_BITS_PER_WIDE_INT)
1096 if (TREE_INT_CST_LOW (t) != 0)
1097 return true;
1098 prec -= HOST_BITS_PER_WIDE_INT;
1099 val = TREE_INT_CST_HIGH (t);
1101 else
1102 val = TREE_INT_CST_LOW (t);
1103 if (prec < HOST_BITS_PER_WIDE_INT)
1104 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1105 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1108 /* Determine whether an expression T can be cheaply negated using
1109 the function negate_expr without introducing undefined overflow. */
1111 static bool
1112 negate_expr_p (tree t)
1114 tree type;
1116 if (t == 0)
1117 return false;
1119 type = TREE_TYPE (t);
1121 STRIP_SIGN_NOPS (t);
1122 switch (TREE_CODE (t))
1124 case INTEGER_CST:
1125 if (TYPE_OVERFLOW_WRAPS (type))
1126 return true;
1128 /* Check that -CST will not overflow type. */
1129 return may_negate_without_overflow_p (t);
1130 case BIT_NOT_EXPR:
1131 return (INTEGRAL_TYPE_P (type)
1132 && TYPE_OVERFLOW_WRAPS (type));
1134 case FIXED_CST:
1135 case REAL_CST:
1136 case NEGATE_EXPR:
1137 return true;
1139 case COMPLEX_CST:
1140 return negate_expr_p (TREE_REALPART (t))
1141 && negate_expr_p (TREE_IMAGPART (t));
1143 case COMPLEX_EXPR:
1144 return negate_expr_p (TREE_OPERAND (t, 0))
1145 && negate_expr_p (TREE_OPERAND (t, 1));
1147 case CONJ_EXPR:
1148 return negate_expr_p (TREE_OPERAND (t, 0));
1150 case PLUS_EXPR:
1151 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1152 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1153 return false;
1154 /* -(A + B) -> (-B) - A. */
1155 if (negate_expr_p (TREE_OPERAND (t, 1))
1156 && reorder_operands_p (TREE_OPERAND (t, 0),
1157 TREE_OPERAND (t, 1)))
1158 return true;
1159 /* -(A + B) -> (-A) - B. */
1160 return negate_expr_p (TREE_OPERAND (t, 0));
1162 case MINUS_EXPR:
1163 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1164 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1165 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1166 && reorder_operands_p (TREE_OPERAND (t, 0),
1167 TREE_OPERAND (t, 1));
1169 case MULT_EXPR:
1170 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1171 break;
1173 /* Fall through. */
1175 case RDIV_EXPR:
1176 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1177 return negate_expr_p (TREE_OPERAND (t, 1))
1178 || negate_expr_p (TREE_OPERAND (t, 0));
1179 break;
1181 case TRUNC_DIV_EXPR:
1182 case ROUND_DIV_EXPR:
1183 case FLOOR_DIV_EXPR:
1184 case CEIL_DIV_EXPR:
1185 case EXACT_DIV_EXPR:
1186 /* In general we can't negate A / B, because if A is INT_MIN and
1187 B is 1, we may turn this into INT_MIN / -1 which is undefined
1188 and actually traps on some architectures. But if overflow is
1189 undefined, we can negate, because - (INT_MIN / 1) is an
1190 overflow. */
1191 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1192 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1193 break;
1194 return negate_expr_p (TREE_OPERAND (t, 1))
1195 || negate_expr_p (TREE_OPERAND (t, 0));
1197 case NOP_EXPR:
1198 /* Negate -((double)float) as (double)(-float). */
1199 if (TREE_CODE (type) == REAL_TYPE)
1201 tree tem = strip_float_extensions (t);
1202 if (tem != t)
1203 return negate_expr_p (tem);
1205 break;
1207 case CALL_EXPR:
1208 /* Negate -f(x) as f(-x). */
1209 if (negate_mathfn_p (builtin_mathfn_code (t)))
1210 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1211 break;
1213 case RSHIFT_EXPR:
1214 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1215 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1217 tree op1 = TREE_OPERAND (t, 1);
1218 if (TREE_INT_CST_HIGH (op1) == 0
1219 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1220 == TREE_INT_CST_LOW (op1))
1221 return true;
1223 break;
1225 default:
1226 break;
1228 return false;
1231 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1232 simplification is possible.
1233 If negate_expr_p would return true for T, NULL_TREE will never be
1234 returned. */
1236 static tree
1237 fold_negate_expr (tree t)
1239 tree type = TREE_TYPE (t);
1240 tree tem;
1242 switch (TREE_CODE (t))
1244 /* Convert - (~A) to A + 1. */
1245 case BIT_NOT_EXPR:
1246 if (INTEGRAL_TYPE_P (type))
1247 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1248 build_int_cst (type, 1));
1249 break;
1251 case INTEGER_CST:
1252 tem = fold_negate_const (t, type);
1253 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1254 || !TYPE_OVERFLOW_TRAPS (type))
1255 return tem;
1256 break;
1258 case REAL_CST:
1259 tem = fold_negate_const (t, type);
1260 /* Two's complement FP formats, such as c4x, may overflow. */
1261 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1262 return tem;
1263 break;
1265 case FIXED_CST:
1266 tem = fold_negate_const (t, type);
1267 return tem;
1269 case COMPLEX_CST:
1271 tree rpart = negate_expr (TREE_REALPART (t));
1272 tree ipart = negate_expr (TREE_IMAGPART (t));
1274 if ((TREE_CODE (rpart) == REAL_CST
1275 && TREE_CODE (ipart) == REAL_CST)
1276 || (TREE_CODE (rpart) == INTEGER_CST
1277 && TREE_CODE (ipart) == INTEGER_CST))
1278 return build_complex (type, rpart, ipart);
1280 break;
1282 case COMPLEX_EXPR:
1283 if (negate_expr_p (t))
1284 return fold_build2 (COMPLEX_EXPR, type,
1285 fold_negate_expr (TREE_OPERAND (t, 0)),
1286 fold_negate_expr (TREE_OPERAND (t, 1)));
1287 break;
1289 case CONJ_EXPR:
1290 if (negate_expr_p (t))
1291 return fold_build1 (CONJ_EXPR, type,
1292 fold_negate_expr (TREE_OPERAND (t, 0)));
1293 break;
1295 case NEGATE_EXPR:
1296 return TREE_OPERAND (t, 0);
1298 case PLUS_EXPR:
1299 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1300 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1302 /* -(A + B) -> (-B) - A. */
1303 if (negate_expr_p (TREE_OPERAND (t, 1))
1304 && reorder_operands_p (TREE_OPERAND (t, 0),
1305 TREE_OPERAND (t, 1)))
1307 tem = negate_expr (TREE_OPERAND (t, 1));
1308 return fold_build2 (MINUS_EXPR, type,
1309 tem, TREE_OPERAND (t, 0));
1312 /* -(A + B) -> (-A) - B. */
1313 if (negate_expr_p (TREE_OPERAND (t, 0)))
1315 tem = negate_expr (TREE_OPERAND (t, 0));
1316 return fold_build2 (MINUS_EXPR, type,
1317 tem, TREE_OPERAND (t, 1));
1320 break;
1322 case MINUS_EXPR:
1323 /* - (A - B) -> B - A */
1324 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1325 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1326 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1327 return fold_build2 (MINUS_EXPR, type,
1328 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1329 break;
1331 case MULT_EXPR:
1332 if (TYPE_UNSIGNED (type))
1333 break;
1335 /* Fall through. */
1337 case RDIV_EXPR:
1338 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1340 tem = TREE_OPERAND (t, 1);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 TREE_OPERAND (t, 0), negate_expr (tem));
1344 tem = TREE_OPERAND (t, 0);
1345 if (negate_expr_p (tem))
1346 return fold_build2 (TREE_CODE (t), type,
1347 negate_expr (tem), TREE_OPERAND (t, 1));
1349 break;
1351 case TRUNC_DIV_EXPR:
1352 case ROUND_DIV_EXPR:
1353 case FLOOR_DIV_EXPR:
1354 case CEIL_DIV_EXPR:
1355 case EXACT_DIV_EXPR:
1356 /* In general we can't negate A / B, because if A is INT_MIN and
1357 B is 1, we may turn this into INT_MIN / -1 which is undefined
1358 and actually traps on some architectures. But if overflow is
1359 undefined, we can negate, because - (INT_MIN / 1) is an
1360 overflow. */
1361 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1363 const char * const warnmsg = G_("assuming signed overflow does not "
1364 "occur when negating a division");
1365 tem = TREE_OPERAND (t, 1);
1366 if (negate_expr_p (tem))
1368 if (INTEGRAL_TYPE_P (type)
1369 && (TREE_CODE (tem) != INTEGER_CST
1370 || integer_onep (tem)))
1371 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1372 return fold_build2 (TREE_CODE (t), type,
1373 TREE_OPERAND (t, 0), negate_expr (tem));
1375 tem = TREE_OPERAND (t, 0);
1376 if (negate_expr_p (tem))
1378 if (INTEGRAL_TYPE_P (type)
1379 && (TREE_CODE (tem) != INTEGER_CST
1380 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1381 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1382 return fold_build2 (TREE_CODE (t), type,
1383 negate_expr (tem), TREE_OPERAND (t, 1));
1386 break;
1388 case NOP_EXPR:
1389 /* Convert -((double)float) into (double)(-float). */
1390 if (TREE_CODE (type) == REAL_TYPE)
1392 tem = strip_float_extensions (t);
1393 if (tem != t && negate_expr_p (tem))
1394 return fold_convert (type, negate_expr (tem));
1396 break;
1398 case CALL_EXPR:
1399 /* Negate -f(x) as f(-x). */
1400 if (negate_mathfn_p (builtin_mathfn_code (t))
1401 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1403 tree fndecl, arg;
1405 fndecl = get_callee_fndecl (t);
1406 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1407 return build_call_expr (fndecl, 1, arg);
1409 break;
1411 case RSHIFT_EXPR:
1412 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1413 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1415 tree op1 = TREE_OPERAND (t, 1);
1416 if (TREE_INT_CST_HIGH (op1) == 0
1417 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1418 == TREE_INT_CST_LOW (op1))
1420 tree ntype = TYPE_UNSIGNED (type)
1421 ? signed_type_for (type)
1422 : unsigned_type_for (type);
1423 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1424 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1425 return fold_convert (type, temp);
1428 break;
1430 default:
1431 break;
1434 return NULL_TREE;
1437 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1438 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1439 return NULL_TREE. */
1441 static tree
1442 negate_expr (tree t)
1444 tree type, tem;
1446 if (t == NULL_TREE)
1447 return NULL_TREE;
1449 type = TREE_TYPE (t);
1450 STRIP_SIGN_NOPS (t);
1452 tem = fold_negate_expr (t);
1453 if (!tem)
1454 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1455 return fold_convert (type, tem);
1458 /* Split a tree IN into a constant, literal and variable parts that could be
1459 combined with CODE to make IN. "constant" means an expression with
1460 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1461 commutative arithmetic operation. Store the constant part into *CONP,
1462 the literal in *LITP and return the variable part. If a part isn't
1463 present, set it to null. If the tree does not decompose in this way,
1464 return the entire tree as the variable part and the other parts as null.
1466 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1467 case, we negate an operand that was subtracted. Except if it is a
1468 literal for which we use *MINUS_LITP instead.
1470 If NEGATE_P is true, we are negating all of IN, again except a literal
1471 for which we use *MINUS_LITP instead.
1473 If IN is itself a literal or constant, return it as appropriate.
1475 Note that we do not guarantee that any of the three values will be the
1476 same type as IN, but they will have the same signedness and mode. */
1478 static tree
1479 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1480 tree *minus_litp, int negate_p)
1482 tree var = 0;
1484 *conp = 0;
1485 *litp = 0;
1486 *minus_litp = 0;
1488 /* Strip any conversions that don't change the machine mode or signedness. */
1489 STRIP_SIGN_NOPS (in);
1491 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1492 || TREE_CODE (in) == FIXED_CST)
1493 *litp = in;
1494 else if (TREE_CODE (in) == code
1495 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1496 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1497 /* We can associate addition and subtraction together (even
1498 though the C standard doesn't say so) for integers because
1499 the value is not affected. For reals, the value might be
1500 affected, so we can't. */
1501 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1502 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1504 tree op0 = TREE_OPERAND (in, 0);
1505 tree op1 = TREE_OPERAND (in, 1);
1506 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1507 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1509 /* First see if either of the operands is a literal, then a constant. */
1510 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1511 || TREE_CODE (op0) == FIXED_CST)
1512 *litp = op0, op0 = 0;
1513 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1514 || TREE_CODE (op1) == FIXED_CST)
1515 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1517 if (op0 != 0 && TREE_CONSTANT (op0))
1518 *conp = op0, op0 = 0;
1519 else if (op1 != 0 && TREE_CONSTANT (op1))
1520 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1522 /* If we haven't dealt with either operand, this is not a case we can
1523 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1524 if (op0 != 0 && op1 != 0)
1525 var = in;
1526 else if (op0 != 0)
1527 var = op0;
1528 else
1529 var = op1, neg_var_p = neg1_p;
1531 /* Now do any needed negations. */
1532 if (neg_litp_p)
1533 *minus_litp = *litp, *litp = 0;
1534 if (neg_conp_p)
1535 *conp = negate_expr (*conp);
1536 if (neg_var_p)
1537 var = negate_expr (var);
1539 else if (TREE_CONSTANT (in))
1540 *conp = in;
1541 else
1542 var = in;
1544 if (negate_p)
1546 if (*litp)
1547 *minus_litp = *litp, *litp = 0;
1548 else if (*minus_litp)
1549 *litp = *minus_litp, *minus_litp = 0;
1550 *conp = negate_expr (*conp);
1551 var = negate_expr (var);
1554 return var;
1557 /* Re-associate trees split by the above function. T1 and T2 are either
1558 expressions to associate or null. Return the new expression, if any. If
1559 we build an operation, do it in TYPE and with CODE. */
1561 static tree
1562 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1564 if (t1 == 0)
1565 return t2;
1566 else if (t2 == 0)
1567 return t1;
1569 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1570 try to fold this since we will have infinite recursion. But do
1571 deal with any NEGATE_EXPRs. */
1572 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1573 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1575 if (code == PLUS_EXPR)
1577 if (TREE_CODE (t1) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1579 fold_convert (type, TREE_OPERAND (t1, 0)));
1580 else if (TREE_CODE (t2) == NEGATE_EXPR)
1581 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1582 fold_convert (type, TREE_OPERAND (t2, 0)));
1583 else if (integer_zerop (t2))
1584 return fold_convert (type, t1);
1586 else if (code == MINUS_EXPR)
1588 if (integer_zerop (t2))
1589 return fold_convert (type, t1);
1592 return build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 return fold_build2 (code, type, fold_convert (type, t1),
1597 fold_convert (type, t2));
1600 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1601 for use in int_const_binop, size_binop and size_diffop. */
1603 static bool
1604 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1606 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1607 return false;
1608 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1609 return false;
1611 switch (code)
1613 case LSHIFT_EXPR:
1614 case RSHIFT_EXPR:
1615 case LROTATE_EXPR:
1616 case RROTATE_EXPR:
1617 return true;
1619 default:
1620 break;
1623 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1624 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1625 && TYPE_MODE (type1) == TYPE_MODE (type2);
1629 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1630 to produce a new constant. Return NULL_TREE if we don't know how
1631 to evaluate CODE at compile-time.
1633 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1635 tree
1636 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1638 unsigned HOST_WIDE_INT int1l, int2l;
1639 HOST_WIDE_INT int1h, int2h;
1640 unsigned HOST_WIDE_INT low;
1641 HOST_WIDE_INT hi;
1642 unsigned HOST_WIDE_INT garbagel;
1643 HOST_WIDE_INT garbageh;
1644 tree t;
1645 tree type = TREE_TYPE (arg1);
1646 int uns = TYPE_UNSIGNED (type);
1647 int is_sizetype
1648 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1649 int overflow = 0;
1651 int1l = TREE_INT_CST_LOW (arg1);
1652 int1h = TREE_INT_CST_HIGH (arg1);
1653 int2l = TREE_INT_CST_LOW (arg2);
1654 int2h = TREE_INT_CST_HIGH (arg2);
1656 switch (code)
1658 case BIT_IOR_EXPR:
1659 low = int1l | int2l, hi = int1h | int2h;
1660 break;
1662 case BIT_XOR_EXPR:
1663 low = int1l ^ int2l, hi = int1h ^ int2h;
1664 break;
1666 case BIT_AND_EXPR:
1667 low = int1l & int2l, hi = int1h & int2h;
1668 break;
1670 case RSHIFT_EXPR:
1671 int2l = -int2l;
1672 case LSHIFT_EXPR:
1673 /* It's unclear from the C standard whether shifts can overflow.
1674 The following code ignores overflow; perhaps a C standard
1675 interpretation ruling is needed. */
1676 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1677 &low, &hi, !uns);
1678 break;
1680 case RROTATE_EXPR:
1681 int2l = - int2l;
1682 case LROTATE_EXPR:
1683 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1684 &low, &hi);
1685 break;
1687 case PLUS_EXPR:
1688 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 break;
1691 case MINUS_EXPR:
1692 neg_double (int2l, int2h, &low, &hi);
1693 add_double (int1l, int1h, low, hi, &low, &hi);
1694 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1695 break;
1697 case MULT_EXPR:
1698 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1699 break;
1701 case TRUNC_DIV_EXPR:
1702 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1703 case EXACT_DIV_EXPR:
1704 /* This is a shortcut for a common special case. */
1705 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1706 && !TREE_OVERFLOW (arg1)
1707 && !TREE_OVERFLOW (arg2)
1708 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1710 if (code == CEIL_DIV_EXPR)
1711 int1l += int2l - 1;
1713 low = int1l / int2l, hi = 0;
1714 break;
1717 /* ... fall through ... */
1719 case ROUND_DIV_EXPR:
1720 if (int2h == 0 && int2l == 0)
1721 return NULL_TREE;
1722 if (int2h == 0 && int2l == 1)
1724 low = int1l, hi = int1h;
1725 break;
1727 if (int1l == int2l && int1h == int2h
1728 && ! (int1l == 0 && int1h == 0))
1730 low = 1, hi = 0;
1731 break;
1733 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1734 &low, &hi, &garbagel, &garbageh);
1735 break;
1737 case TRUNC_MOD_EXPR:
1738 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1739 /* This is a shortcut for a common special case. */
1740 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1741 && !TREE_OVERFLOW (arg1)
1742 && !TREE_OVERFLOW (arg2)
1743 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1745 if (code == CEIL_MOD_EXPR)
1746 int1l += int2l - 1;
1747 low = int1l % int2l, hi = 0;
1748 break;
1751 /* ... fall through ... */
1753 case ROUND_MOD_EXPR:
1754 if (int2h == 0 && int2l == 0)
1755 return NULL_TREE;
1756 overflow = div_and_round_double (code, uns,
1757 int1l, int1h, int2l, int2h,
1758 &garbagel, &garbageh, &low, &hi);
1759 break;
1761 case MIN_EXPR:
1762 case MAX_EXPR:
1763 if (uns)
1764 low = (((unsigned HOST_WIDE_INT) int1h
1765 < (unsigned HOST_WIDE_INT) int2h)
1766 || (((unsigned HOST_WIDE_INT) int1h
1767 == (unsigned HOST_WIDE_INT) int2h)
1768 && int1l < int2l));
1769 else
1770 low = (int1h < int2h
1771 || (int1h == int2h && int1l < int2l));
1773 if (low == (code == MIN_EXPR))
1774 low = int1l, hi = int1h;
1775 else
1776 low = int2l, hi = int2h;
1777 break;
1779 default:
1780 return NULL_TREE;
1783 if (notrunc)
1785 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1787 /* Propagate overflow flags ourselves. */
1788 if (((!uns || is_sizetype) && overflow)
1789 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1791 t = copy_node (t);
1792 TREE_OVERFLOW (t) = 1;
1795 else
1796 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1797 ((!uns || is_sizetype) && overflow)
1798 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1800 return t;
1803 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1804 constant. We assume ARG1 and ARG2 have the same data type, or at least
1805 are the same kind of constant and the same machine mode. Return zero if
1806 combining the constants is not allowed in the current operating mode.
1808 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1810 static tree
1811 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1813 /* Sanity check for the recursive cases. */
1814 if (!arg1 || !arg2)
1815 return NULL_TREE;
1817 STRIP_NOPS (arg1);
1818 STRIP_NOPS (arg2);
1820 if (TREE_CODE (arg1) == INTEGER_CST)
1821 return int_const_binop (code, arg1, arg2, notrunc);
1823 if (TREE_CODE (arg1) == REAL_CST)
1825 enum machine_mode mode;
1826 REAL_VALUE_TYPE d1;
1827 REAL_VALUE_TYPE d2;
1828 REAL_VALUE_TYPE value;
1829 REAL_VALUE_TYPE result;
1830 bool inexact;
1831 tree t, type;
1833 /* The following codes are handled by real_arithmetic. */
1834 switch (code)
1836 case PLUS_EXPR:
1837 case MINUS_EXPR:
1838 case MULT_EXPR:
1839 case RDIV_EXPR:
1840 case MIN_EXPR:
1841 case MAX_EXPR:
1842 break;
1844 default:
1845 return NULL_TREE;
1848 d1 = TREE_REAL_CST (arg1);
1849 d2 = TREE_REAL_CST (arg2);
1851 type = TREE_TYPE (arg1);
1852 mode = TYPE_MODE (type);
1854 /* Don't perform operation if we honor signaling NaNs and
1855 either operand is a NaN. */
1856 if (HONOR_SNANS (mode)
1857 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1858 return NULL_TREE;
1860 /* Don't perform operation if it would raise a division
1861 by zero exception. */
1862 if (code == RDIV_EXPR
1863 && REAL_VALUES_EQUAL (d2, dconst0)
1864 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1865 return NULL_TREE;
1867 /* If either operand is a NaN, just return it. Otherwise, set up
1868 for floating-point trap; we return an overflow. */
1869 if (REAL_VALUE_ISNAN (d1))
1870 return arg1;
1871 else if (REAL_VALUE_ISNAN (d2))
1872 return arg2;
1874 inexact = real_arithmetic (&value, code, &d1, &d2);
1875 real_convert (&result, mode, &value);
1877 /* Don't constant fold this floating point operation if
1878 the result has overflowed and flag_trapping_math. */
1879 if (flag_trapping_math
1880 && MODE_HAS_INFINITIES (mode)
1881 && REAL_VALUE_ISINF (result)
1882 && !REAL_VALUE_ISINF (d1)
1883 && !REAL_VALUE_ISINF (d2))
1884 return NULL_TREE;
1886 /* Don't constant fold this floating point operation if the
1887 result may dependent upon the run-time rounding mode and
1888 flag_rounding_math is set, or if GCC's software emulation
1889 is unable to accurately represent the result. */
1890 if ((flag_rounding_math
1891 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1892 && !flag_unsafe_math_optimizations))
1893 && (inexact || !real_identical (&result, &value)))
1894 return NULL_TREE;
1896 t = build_real (type, result);
1898 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1899 return t;
1902 if (TREE_CODE (arg1) == FIXED_CST)
1904 FIXED_VALUE_TYPE f1;
1905 FIXED_VALUE_TYPE f2;
1906 FIXED_VALUE_TYPE result;
1907 tree t, type;
1908 int sat_p;
1909 bool overflow_p;
1911 /* The following codes are handled by fixed_arithmetic. */
1912 switch (code)
1914 case PLUS_EXPR:
1915 case MINUS_EXPR:
1916 case MULT_EXPR:
1917 case TRUNC_DIV_EXPR:
1918 f2 = TREE_FIXED_CST (arg2);
1919 break;
1921 case LSHIFT_EXPR:
1922 case RSHIFT_EXPR:
1923 f2.data.high = TREE_INT_CST_HIGH (arg2);
1924 f2.data.low = TREE_INT_CST_LOW (arg2);
1925 f2.mode = SImode;
1926 break;
1928 default:
1929 return NULL_TREE;
1932 f1 = TREE_FIXED_CST (arg1);
1933 type = TREE_TYPE (arg1);
1934 sat_p = TYPE_SATURATING (type);
1935 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1936 t = build_fixed (type, result);
1937 /* Propagate overflow flags. */
1938 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1940 TREE_OVERFLOW (t) = 1;
1941 TREE_CONSTANT_OVERFLOW (t) = 1;
1943 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1944 TREE_CONSTANT_OVERFLOW (t) = 1;
1945 return t;
1948 if (TREE_CODE (arg1) == COMPLEX_CST)
1950 tree type = TREE_TYPE (arg1);
1951 tree r1 = TREE_REALPART (arg1);
1952 tree i1 = TREE_IMAGPART (arg1);
1953 tree r2 = TREE_REALPART (arg2);
1954 tree i2 = TREE_IMAGPART (arg2);
1955 tree real, imag;
1957 switch (code)
1959 case PLUS_EXPR:
1960 case MINUS_EXPR:
1961 real = const_binop (code, r1, r2, notrunc);
1962 imag = const_binop (code, i1, i2, notrunc);
1963 break;
1965 case MULT_EXPR:
1966 real = const_binop (MINUS_EXPR,
1967 const_binop (MULT_EXPR, r1, r2, notrunc),
1968 const_binop (MULT_EXPR, i1, i2, notrunc),
1969 notrunc);
1970 imag = const_binop (PLUS_EXPR,
1971 const_binop (MULT_EXPR, r1, i2, notrunc),
1972 const_binop (MULT_EXPR, i1, r2, notrunc),
1973 notrunc);
1974 break;
1976 case RDIV_EXPR:
1978 tree magsquared
1979 = const_binop (PLUS_EXPR,
1980 const_binop (MULT_EXPR, r2, r2, notrunc),
1981 const_binop (MULT_EXPR, i2, i2, notrunc),
1982 notrunc);
1983 tree t1
1984 = const_binop (PLUS_EXPR,
1985 const_binop (MULT_EXPR, r1, r2, notrunc),
1986 const_binop (MULT_EXPR, i1, i2, notrunc),
1987 notrunc);
1988 tree t2
1989 = const_binop (MINUS_EXPR,
1990 const_binop (MULT_EXPR, i1, r2, notrunc),
1991 const_binop (MULT_EXPR, r1, i2, notrunc),
1992 notrunc);
1994 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1995 code = TRUNC_DIV_EXPR;
1997 real = const_binop (code, t1, magsquared, notrunc);
1998 imag = const_binop (code, t2, magsquared, notrunc);
2000 break;
2002 default:
2003 return NULL_TREE;
2006 if (real && imag)
2007 return build_complex (type, real, imag);
2010 return NULL_TREE;
2013 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2014 indicates which particular sizetype to create. */
2016 tree
2017 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2019 return build_int_cst (sizetype_tab[(int) kind], number);
2022 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2023 is a tree code. The type of the result is taken from the operands.
2024 Both must be equivalent integer types, ala int_binop_types_match_p.
2025 If the operands are constant, so is the result. */
2027 tree
2028 size_binop (enum tree_code code, tree arg0, tree arg1)
2030 tree type = TREE_TYPE (arg0);
2032 if (arg0 == error_mark_node || arg1 == error_mark_node)
2033 return error_mark_node;
2035 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2036 TREE_TYPE (arg1)));
2038 /* Handle the special case of two integer constants faster. */
2039 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2041 /* And some specific cases even faster than that. */
2042 if (code == PLUS_EXPR)
2044 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2045 return arg1;
2046 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2047 return arg0;
2049 else if (code == MINUS_EXPR)
2051 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2052 return arg0;
2054 else if (code == MULT_EXPR)
2056 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2057 return arg1;
2060 /* Handle general case of two integer constants. */
2061 return int_const_binop (code, arg0, arg1, 0);
2064 return fold_build2 (code, type, arg0, arg1);
2067 /* Given two values, either both of sizetype or both of bitsizetype,
2068 compute the difference between the two values. Return the value
2069 in signed type corresponding to the type of the operands. */
2071 tree
2072 size_diffop (tree arg0, tree arg1)
2074 tree type = TREE_TYPE (arg0);
2075 tree ctype;
2077 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2078 TREE_TYPE (arg1)));
2080 /* If the type is already signed, just do the simple thing. */
2081 if (!TYPE_UNSIGNED (type))
2082 return size_binop (MINUS_EXPR, arg0, arg1);
2084 if (type == sizetype)
2085 ctype = ssizetype;
2086 else if (type == bitsizetype)
2087 ctype = sbitsizetype;
2088 else
2089 ctype = signed_type_for (type);
2091 /* If either operand is not a constant, do the conversions to the signed
2092 type and subtract. The hardware will do the right thing with any
2093 overflow in the subtraction. */
2094 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2095 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2096 fold_convert (ctype, arg1));
2098 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2099 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2100 overflow) and negate (which can't either). Special-case a result
2101 of zero while we're here. */
2102 if (tree_int_cst_equal (arg0, arg1))
2103 return build_int_cst (ctype, 0);
2104 else if (tree_int_cst_lt (arg1, arg0))
2105 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2106 else
2107 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2108 fold_convert (ctype, size_binop (MINUS_EXPR,
2109 arg1, arg0)));
2112 /* A subroutine of fold_convert_const handling conversions of an
2113 INTEGER_CST to another integer type. */
2115 static tree
2116 fold_convert_const_int_from_int (tree type, const_tree arg1)
2118 tree t;
2120 /* Given an integer constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2123 TREE_INT_CST_HIGH (arg1),
2124 /* Don't set the overflow when
2125 converting a pointer */
2126 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2127 (TREE_INT_CST_HIGH (arg1) < 0
2128 && (TYPE_UNSIGNED (type)
2129 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2130 | TREE_OVERFLOW (arg1));
2132 return t;
2135 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2136 to an integer type. */
2138 static tree
2139 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2141 int overflow = 0;
2142 tree t;
2144 /* The following code implements the floating point to integer
2145 conversion rules required by the Java Language Specification,
2146 that IEEE NaNs are mapped to zero and values that overflow
2147 the target precision saturate, i.e. values greater than
2148 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2149 are mapped to INT_MIN. These semantics are allowed by the
2150 C and C++ standards that simply state that the behavior of
2151 FP-to-integer conversion is unspecified upon overflow. */
2153 HOST_WIDE_INT high, low;
2154 REAL_VALUE_TYPE r;
2155 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2157 switch (code)
2159 case FIX_TRUNC_EXPR:
2160 real_trunc (&r, VOIDmode, &x);
2161 break;
2163 default:
2164 gcc_unreachable ();
2167 /* If R is NaN, return zero and show we have an overflow. */
2168 if (REAL_VALUE_ISNAN (r))
2170 overflow = 1;
2171 high = 0;
2172 low = 0;
2175 /* See if R is less than the lower bound or greater than the
2176 upper bound. */
2178 if (! overflow)
2180 tree lt = TYPE_MIN_VALUE (type);
2181 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2182 if (REAL_VALUES_LESS (r, l))
2184 overflow = 1;
2185 high = TREE_INT_CST_HIGH (lt);
2186 low = TREE_INT_CST_LOW (lt);
2190 if (! overflow)
2192 tree ut = TYPE_MAX_VALUE (type);
2193 if (ut)
2195 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2196 if (REAL_VALUES_LESS (u, r))
2198 overflow = 1;
2199 high = TREE_INT_CST_HIGH (ut);
2200 low = TREE_INT_CST_LOW (ut);
2205 if (! overflow)
2206 REAL_VALUE_TO_INT (&low, &high, r);
2208 t = force_fit_type_double (type, low, high, -1,
2209 overflow | TREE_OVERFLOW (arg1));
2210 return t;
2213 /* A subroutine of fold_convert_const handling conversions of a
2214 FIXED_CST to an integer type. */
2216 static tree
2217 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2219 tree t;
2220 double_int temp, temp_trunc;
2221 unsigned int mode;
2223 /* Right shift FIXED_CST to temp by fbit. */
2224 temp = TREE_FIXED_CST (arg1).data;
2225 mode = TREE_FIXED_CST (arg1).mode;
2226 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2228 lshift_double (temp.low, temp.high,
2229 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2230 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2232 /* Left shift temp to temp_trunc by fbit. */
2233 lshift_double (temp.low, temp.high,
2234 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2235 &temp_trunc.low, &temp_trunc.high,
2236 SIGNED_FIXED_POINT_MODE_P (mode));
2238 else
2240 temp.low = 0;
2241 temp.high = 0;
2242 temp_trunc.low = 0;
2243 temp_trunc.high = 0;
2246 /* If FIXED_CST is negative, we need to round the value toward 0.
2247 By checking if the fractional bits are not zero to add 1 to temp. */
2248 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2249 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2251 double_int one;
2252 one.low = 1;
2253 one.high = 0;
2254 temp = double_int_add (temp, one);
2257 /* Given a fixed-point constant, make new constant with new type,
2258 appropriately sign-extended or truncated. */
2259 t = force_fit_type_double (type, temp.low, temp.high, -1,
2260 (temp.high < 0
2261 && (TYPE_UNSIGNED (type)
2262 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2263 | TREE_OVERFLOW (arg1));
2265 return t;
2268 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2269 to another floating point type. */
2271 static tree
2272 fold_convert_const_real_from_real (tree type, const_tree arg1)
2274 REAL_VALUE_TYPE value;
2275 tree t;
2277 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2278 t = build_real (type, value);
2280 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2281 return t;
2284 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2285 to a floating point type. */
2287 static tree
2288 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2290 REAL_VALUE_TYPE value;
2291 tree t;
2293 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2294 t = build_real (type, value);
2296 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2297 TREE_CONSTANT_OVERFLOW (t)
2298 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2299 return t;
2302 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2303 to another fixed-point type. */
2305 static tree
2306 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2308 FIXED_VALUE_TYPE value;
2309 tree t;
2310 bool overflow_p;
2312 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2313 TYPE_SATURATING (type));
2314 t = build_fixed (type, value);
2316 /* Propagate overflow flags. */
2317 if (overflow_p | TREE_OVERFLOW (arg1))
2319 TREE_OVERFLOW (t) = 1;
2320 TREE_CONSTANT_OVERFLOW (t) = 1;
2322 else if (TREE_CONSTANT_OVERFLOW (arg1))
2323 TREE_CONSTANT_OVERFLOW (t) = 1;
2324 return t;
2327 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2328 to a fixed-point type. */
2330 static tree
2331 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2333 FIXED_VALUE_TYPE value;
2334 tree t;
2335 bool overflow_p;
2337 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2338 TREE_INT_CST (arg1),
2339 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2340 TYPE_SATURATING (type));
2341 t = build_fixed (type, value);
2343 /* Propagate overflow flags. */
2344 if (overflow_p | TREE_OVERFLOW (arg1))
2346 TREE_OVERFLOW (t) = 1;
2347 TREE_CONSTANT_OVERFLOW (t) = 1;
2349 else if (TREE_CONSTANT_OVERFLOW (arg1))
2350 TREE_CONSTANT_OVERFLOW (t) = 1;
2351 return t;
2354 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2355 to a fixed-point type. */
2357 static tree
2358 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2360 FIXED_VALUE_TYPE value;
2361 tree t;
2362 bool overflow_p;
2364 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2365 &TREE_REAL_CST (arg1),
2366 TYPE_SATURATING (type));
2367 t = build_fixed (type, value);
2369 /* Propagate overflow flags. */
2370 if (overflow_p | TREE_OVERFLOW (arg1))
2372 TREE_OVERFLOW (t) = 1;
2373 TREE_CONSTANT_OVERFLOW (t) = 1;
2375 else if (TREE_CONSTANT_OVERFLOW (arg1))
2376 TREE_CONSTANT_OVERFLOW (t) = 1;
2377 return t;
2380 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2381 type TYPE. If no simplification can be done return NULL_TREE. */
2383 static tree
2384 fold_convert_const (enum tree_code code, tree type, tree arg1)
2386 if (TREE_TYPE (arg1) == type)
2387 return arg1;
2389 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2391 if (TREE_CODE (arg1) == INTEGER_CST)
2392 return fold_convert_const_int_from_int (type, arg1);
2393 else if (TREE_CODE (arg1) == REAL_CST)
2394 return fold_convert_const_int_from_real (code, type, arg1);
2395 else if (TREE_CODE (arg1) == FIXED_CST)
2396 return fold_convert_const_int_from_fixed (type, arg1);
2398 else if (TREE_CODE (type) == REAL_TYPE)
2400 if (TREE_CODE (arg1) == INTEGER_CST)
2401 return build_real_from_int_cst (type, arg1);
2402 else if (TREE_CODE (arg1) == REAL_CST)
2403 return fold_convert_const_real_from_real (type, arg1);
2404 else if (TREE_CODE (arg1) == FIXED_CST)
2405 return fold_convert_const_real_from_fixed (type, arg1);
2407 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2409 if (TREE_CODE (arg1) == FIXED_CST)
2410 return fold_convert_const_fixed_from_fixed (type, arg1);
2411 else if (TREE_CODE (arg1) == INTEGER_CST)
2412 return fold_convert_const_fixed_from_int (type, arg1);
2413 else if (TREE_CODE (arg1) == REAL_CST)
2414 return fold_convert_const_fixed_from_real (type, arg1);
2416 return NULL_TREE;
2419 /* Construct a vector of zero elements of vector type TYPE. */
2421 static tree
2422 build_zero_vector (tree type)
2424 tree elem, list;
2425 int i, units;
2427 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2428 units = TYPE_VECTOR_SUBPARTS (type);
2430 list = NULL_TREE;
2431 for (i = 0; i < units; i++)
2432 list = tree_cons (NULL_TREE, elem, list);
2433 return build_vector (type, list);
2436 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2438 bool
2439 fold_convertible_p (const_tree type, const_tree arg)
2441 tree orig = TREE_TYPE (arg);
2443 if (type == orig)
2444 return true;
2446 if (TREE_CODE (arg) == ERROR_MARK
2447 || TREE_CODE (type) == ERROR_MARK
2448 || TREE_CODE (orig) == ERROR_MARK)
2449 return false;
2451 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2452 return true;
2454 switch (TREE_CODE (type))
2456 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2457 case POINTER_TYPE: case REFERENCE_TYPE:
2458 case OFFSET_TYPE:
2459 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2460 || TREE_CODE (orig) == OFFSET_TYPE)
2461 return true;
2462 return (TREE_CODE (orig) == VECTOR_TYPE
2463 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2465 default:
2466 return TREE_CODE (type) == TREE_CODE (orig);
2470 /* Convert expression ARG to type TYPE. Used by the middle-end for
2471 simple conversions in preference to calling the front-end's convert. */
2473 tree
2474 fold_convert (tree type, tree arg)
2476 tree orig = TREE_TYPE (arg);
2477 tree tem;
2479 if (type == orig)
2480 return arg;
2482 if (TREE_CODE (arg) == ERROR_MARK
2483 || TREE_CODE (type) == ERROR_MARK
2484 || TREE_CODE (orig) == ERROR_MARK)
2485 return error_mark_node;
2487 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2488 return fold_build1 (NOP_EXPR, type, arg);
2490 switch (TREE_CODE (type))
2492 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2493 case POINTER_TYPE: case REFERENCE_TYPE:
2494 case OFFSET_TYPE:
2495 if (TREE_CODE (arg) == INTEGER_CST)
2497 tem = fold_convert_const (NOP_EXPR, type, arg);
2498 if (tem != NULL_TREE)
2499 return tem;
2501 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2502 || TREE_CODE (orig) == OFFSET_TYPE)
2503 return fold_build1 (NOP_EXPR, type, arg);
2504 if (TREE_CODE (orig) == COMPLEX_TYPE)
2506 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2507 return fold_convert (type, tem);
2509 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2510 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2511 return fold_build1 (NOP_EXPR, type, arg);
2513 case REAL_TYPE:
2514 if (TREE_CODE (arg) == INTEGER_CST)
2516 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2517 if (tem != NULL_TREE)
2518 return tem;
2520 else if (TREE_CODE (arg) == REAL_CST)
2522 tem = fold_convert_const (NOP_EXPR, type, arg);
2523 if (tem != NULL_TREE)
2524 return tem;
2526 else if (TREE_CODE (arg) == FIXED_CST)
2528 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2529 if (tem != NULL_TREE)
2530 return tem;
2533 switch (TREE_CODE (orig))
2535 case INTEGER_TYPE:
2536 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2537 case POINTER_TYPE: case REFERENCE_TYPE:
2538 return fold_build1 (FLOAT_EXPR, type, arg);
2540 case REAL_TYPE:
2541 return fold_build1 (NOP_EXPR, type, arg);
2543 case FIXED_POINT_TYPE:
2544 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2546 case COMPLEX_TYPE:
2547 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2548 return fold_convert (type, tem);
2550 default:
2551 gcc_unreachable ();
2554 case FIXED_POINT_TYPE:
2555 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2556 || TREE_CODE (arg) == REAL_CST)
2558 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2559 if (tem != NULL_TREE)
2560 return tem;
2563 switch (TREE_CODE (orig))
2565 case FIXED_POINT_TYPE:
2566 case INTEGER_TYPE:
2567 case ENUMERAL_TYPE:
2568 case BOOLEAN_TYPE:
2569 case REAL_TYPE:
2570 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2572 case COMPLEX_TYPE:
2573 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2574 return fold_convert (type, tem);
2576 default:
2577 gcc_unreachable ();
2580 case COMPLEX_TYPE:
2581 switch (TREE_CODE (orig))
2583 case INTEGER_TYPE:
2584 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2585 case POINTER_TYPE: case REFERENCE_TYPE:
2586 case REAL_TYPE:
2587 case FIXED_POINT_TYPE:
2588 return build2 (COMPLEX_EXPR, type,
2589 fold_convert (TREE_TYPE (type), arg),
2590 fold_convert (TREE_TYPE (type), integer_zero_node));
2591 case COMPLEX_TYPE:
2593 tree rpart, ipart;
2595 if (TREE_CODE (arg) == COMPLEX_EXPR)
2597 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2598 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2599 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2602 arg = save_expr (arg);
2603 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2604 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2605 rpart = fold_convert (TREE_TYPE (type), rpart);
2606 ipart = fold_convert (TREE_TYPE (type), ipart);
2607 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2610 default:
2611 gcc_unreachable ();
2614 case VECTOR_TYPE:
2615 if (integer_zerop (arg))
2616 return build_zero_vector (type);
2617 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2618 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2619 || TREE_CODE (orig) == VECTOR_TYPE);
2620 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2622 case VOID_TYPE:
2623 tem = fold_ignored_result (arg);
2624 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2625 return tem;
2626 return fold_build1 (NOP_EXPR, type, tem);
2628 default:
2629 gcc_unreachable ();
2633 /* Return false if expr can be assumed not to be an lvalue, true
2634 otherwise. */
2636 static bool
2637 maybe_lvalue_p (const_tree x)
2639 /* We only need to wrap lvalue tree codes. */
2640 switch (TREE_CODE (x))
2642 case VAR_DECL:
2643 case PARM_DECL:
2644 case RESULT_DECL:
2645 case LABEL_DECL:
2646 case FUNCTION_DECL:
2647 case SSA_NAME:
2649 case COMPONENT_REF:
2650 case INDIRECT_REF:
2651 case ALIGN_INDIRECT_REF:
2652 case MISALIGNED_INDIRECT_REF:
2653 case ARRAY_REF:
2654 case ARRAY_RANGE_REF:
2655 case BIT_FIELD_REF:
2656 case OBJ_TYPE_REF:
2658 case REALPART_EXPR:
2659 case IMAGPART_EXPR:
2660 case PREINCREMENT_EXPR:
2661 case PREDECREMENT_EXPR:
2662 case SAVE_EXPR:
2663 case TRY_CATCH_EXPR:
2664 case WITH_CLEANUP_EXPR:
2665 case COMPOUND_EXPR:
2666 case MODIFY_EXPR:
2667 case GIMPLE_MODIFY_STMT:
2668 case TARGET_EXPR:
2669 case COND_EXPR:
2670 case BIND_EXPR:
2671 case MIN_EXPR:
2672 case MAX_EXPR:
2673 break;
2675 default:
2676 /* Assume the worst for front-end tree codes. */
2677 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2678 break;
2679 return false;
2682 return true;
2685 /* Return an expr equal to X but certainly not valid as an lvalue. */
2687 tree
2688 non_lvalue (tree x)
2690 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2691 us. */
2692 if (in_gimple_form)
2693 return x;
2695 if (! maybe_lvalue_p (x))
2696 return x;
2697 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2700 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2701 Zero means allow extended lvalues. */
2703 int pedantic_lvalues;
2705 /* When pedantic, return an expr equal to X but certainly not valid as a
2706 pedantic lvalue. Otherwise, return X. */
2708 static tree
2709 pedantic_non_lvalue (tree x)
2711 if (pedantic_lvalues)
2712 return non_lvalue (x);
2713 else
2714 return x;
2717 /* Given a tree comparison code, return the code that is the logical inverse
2718 of the given code. It is not safe to do this for floating-point
2719 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2720 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2722 enum tree_code
2723 invert_tree_comparison (enum tree_code code, bool honor_nans)
2725 if (honor_nans && flag_trapping_math)
2726 return ERROR_MARK;
2728 switch (code)
2730 case EQ_EXPR:
2731 return NE_EXPR;
2732 case NE_EXPR:
2733 return EQ_EXPR;
2734 case GT_EXPR:
2735 return honor_nans ? UNLE_EXPR : LE_EXPR;
2736 case GE_EXPR:
2737 return honor_nans ? UNLT_EXPR : LT_EXPR;
2738 case LT_EXPR:
2739 return honor_nans ? UNGE_EXPR : GE_EXPR;
2740 case LE_EXPR:
2741 return honor_nans ? UNGT_EXPR : GT_EXPR;
2742 case LTGT_EXPR:
2743 return UNEQ_EXPR;
2744 case UNEQ_EXPR:
2745 return LTGT_EXPR;
2746 case UNGT_EXPR:
2747 return LE_EXPR;
2748 case UNGE_EXPR:
2749 return LT_EXPR;
2750 case UNLT_EXPR:
2751 return GE_EXPR;
2752 case UNLE_EXPR:
2753 return GT_EXPR;
2754 case ORDERED_EXPR:
2755 return UNORDERED_EXPR;
2756 case UNORDERED_EXPR:
2757 return ORDERED_EXPR;
2758 default:
2759 gcc_unreachable ();
2763 /* Similar, but return the comparison that results if the operands are
2764 swapped. This is safe for floating-point. */
2766 enum tree_code
2767 swap_tree_comparison (enum tree_code code)
2769 switch (code)
2771 case EQ_EXPR:
2772 case NE_EXPR:
2773 case ORDERED_EXPR:
2774 case UNORDERED_EXPR:
2775 case LTGT_EXPR:
2776 case UNEQ_EXPR:
2777 return code;
2778 case GT_EXPR:
2779 return LT_EXPR;
2780 case GE_EXPR:
2781 return LE_EXPR;
2782 case LT_EXPR:
2783 return GT_EXPR;
2784 case LE_EXPR:
2785 return GE_EXPR;
2786 case UNGT_EXPR:
2787 return UNLT_EXPR;
2788 case UNGE_EXPR:
2789 return UNLE_EXPR;
2790 case UNLT_EXPR:
2791 return UNGT_EXPR;
2792 case UNLE_EXPR:
2793 return UNGE_EXPR;
2794 default:
2795 gcc_unreachable ();
2800 /* Convert a comparison tree code from an enum tree_code representation
2801 into a compcode bit-based encoding. This function is the inverse of
2802 compcode_to_comparison. */
2804 static enum comparison_code
2805 comparison_to_compcode (enum tree_code code)
2807 switch (code)
2809 case LT_EXPR:
2810 return COMPCODE_LT;
2811 case EQ_EXPR:
2812 return COMPCODE_EQ;
2813 case LE_EXPR:
2814 return COMPCODE_LE;
2815 case GT_EXPR:
2816 return COMPCODE_GT;
2817 case NE_EXPR:
2818 return COMPCODE_NE;
2819 case GE_EXPR:
2820 return COMPCODE_GE;
2821 case ORDERED_EXPR:
2822 return COMPCODE_ORD;
2823 case UNORDERED_EXPR:
2824 return COMPCODE_UNORD;
2825 case UNLT_EXPR:
2826 return COMPCODE_UNLT;
2827 case UNEQ_EXPR:
2828 return COMPCODE_UNEQ;
2829 case UNLE_EXPR:
2830 return COMPCODE_UNLE;
2831 case UNGT_EXPR:
2832 return COMPCODE_UNGT;
2833 case LTGT_EXPR:
2834 return COMPCODE_LTGT;
2835 case UNGE_EXPR:
2836 return COMPCODE_UNGE;
2837 default:
2838 gcc_unreachable ();
2842 /* Convert a compcode bit-based encoding of a comparison operator back
2843 to GCC's enum tree_code representation. This function is the
2844 inverse of comparison_to_compcode. */
2846 static enum tree_code
2847 compcode_to_comparison (enum comparison_code code)
2849 switch (code)
2851 case COMPCODE_LT:
2852 return LT_EXPR;
2853 case COMPCODE_EQ:
2854 return EQ_EXPR;
2855 case COMPCODE_LE:
2856 return LE_EXPR;
2857 case COMPCODE_GT:
2858 return GT_EXPR;
2859 case COMPCODE_NE:
2860 return NE_EXPR;
2861 case COMPCODE_GE:
2862 return GE_EXPR;
2863 case COMPCODE_ORD:
2864 return ORDERED_EXPR;
2865 case COMPCODE_UNORD:
2866 return UNORDERED_EXPR;
2867 case COMPCODE_UNLT:
2868 return UNLT_EXPR;
2869 case COMPCODE_UNEQ:
2870 return UNEQ_EXPR;
2871 case COMPCODE_UNLE:
2872 return UNLE_EXPR;
2873 case COMPCODE_UNGT:
2874 return UNGT_EXPR;
2875 case COMPCODE_LTGT:
2876 return LTGT_EXPR;
2877 case COMPCODE_UNGE:
2878 return UNGE_EXPR;
2879 default:
2880 gcc_unreachable ();
2884 /* Return a tree for the comparison which is the combination of
2885 doing the AND or OR (depending on CODE) of the two operations LCODE
2886 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2887 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2888 if this makes the transformation invalid. */
2890 tree
2891 combine_comparisons (enum tree_code code, enum tree_code lcode,
2892 enum tree_code rcode, tree truth_type,
2893 tree ll_arg, tree lr_arg)
2895 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2896 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2897 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2898 enum comparison_code compcode;
2900 switch (code)
2902 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2903 compcode = lcompcode & rcompcode;
2904 break;
2906 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2907 compcode = lcompcode | rcompcode;
2908 break;
2910 default:
2911 return NULL_TREE;
2914 if (!honor_nans)
2916 /* Eliminate unordered comparisons, as well as LTGT and ORD
2917 which are not used unless the mode has NaNs. */
2918 compcode &= ~COMPCODE_UNORD;
2919 if (compcode == COMPCODE_LTGT)
2920 compcode = COMPCODE_NE;
2921 else if (compcode == COMPCODE_ORD)
2922 compcode = COMPCODE_TRUE;
2924 else if (flag_trapping_math)
2926 /* Check that the original operation and the optimized ones will trap
2927 under the same condition. */
2928 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2929 && (lcompcode != COMPCODE_EQ)
2930 && (lcompcode != COMPCODE_ORD);
2931 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2932 && (rcompcode != COMPCODE_EQ)
2933 && (rcompcode != COMPCODE_ORD);
2934 bool trap = (compcode & COMPCODE_UNORD) == 0
2935 && (compcode != COMPCODE_EQ)
2936 && (compcode != COMPCODE_ORD);
2938 /* In a short-circuited boolean expression the LHS might be
2939 such that the RHS, if evaluated, will never trap. For
2940 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2941 if neither x nor y is NaN. (This is a mixed blessing: for
2942 example, the expression above will never trap, hence
2943 optimizing it to x < y would be invalid). */
2944 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2945 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2946 rtrap = false;
2948 /* If the comparison was short-circuited, and only the RHS
2949 trapped, we may now generate a spurious trap. */
2950 if (rtrap && !ltrap
2951 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2952 return NULL_TREE;
2954 /* If we changed the conditions that cause a trap, we lose. */
2955 if ((ltrap || rtrap) != trap)
2956 return NULL_TREE;
2959 if (compcode == COMPCODE_TRUE)
2960 return constant_boolean_node (true, truth_type);
2961 else if (compcode == COMPCODE_FALSE)
2962 return constant_boolean_node (false, truth_type);
2963 else
2964 return fold_build2 (compcode_to_comparison (compcode),
2965 truth_type, ll_arg, lr_arg);
2968 /* Return nonzero if CODE is a tree code that represents a truth value. */
2970 static int
2971 truth_value_p (enum tree_code code)
2973 return (TREE_CODE_CLASS (code) == tcc_comparison
2974 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2975 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2976 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2979 /* Return nonzero if two operands (typically of the same tree node)
2980 are necessarily equal. If either argument has side-effects this
2981 function returns zero. FLAGS modifies behavior as follows:
2983 If OEP_ONLY_CONST is set, only return nonzero for constants.
2984 This function tests whether the operands are indistinguishable;
2985 it does not test whether they are equal using C's == operation.
2986 The distinction is important for IEEE floating point, because
2987 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2988 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2990 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2991 even though it may hold multiple values during a function.
2992 This is because a GCC tree node guarantees that nothing else is
2993 executed between the evaluation of its "operands" (which may often
2994 be evaluated in arbitrary order). Hence if the operands themselves
2995 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2996 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2997 unset means assuming isochronic (or instantaneous) tree equivalence.
2998 Unless comparing arbitrary expression trees, such as from different
2999 statements, this flag can usually be left unset.
3001 If OEP_PURE_SAME is set, then pure functions with identical arguments
3002 are considered the same. It is used when the caller has other ways
3003 to ensure that global memory is unchanged in between. */
3006 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3008 /* If either is ERROR_MARK, they aren't equal. */
3009 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3010 return 0;
3012 /* If both types don't have the same signedness, then we can't consider
3013 them equal. We must check this before the STRIP_NOPS calls
3014 because they may change the signedness of the arguments. */
3015 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3016 return 0;
3018 /* If both types don't have the same precision, then it is not safe
3019 to strip NOPs. */
3020 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3021 return 0;
3023 STRIP_NOPS (arg0);
3024 STRIP_NOPS (arg1);
3026 /* In case both args are comparisons but with different comparison
3027 code, try to swap the comparison operands of one arg to produce
3028 a match and compare that variant. */
3029 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3030 && COMPARISON_CLASS_P (arg0)
3031 && COMPARISON_CLASS_P (arg1))
3033 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3035 if (TREE_CODE (arg0) == swap_code)
3036 return operand_equal_p (TREE_OPERAND (arg0, 0),
3037 TREE_OPERAND (arg1, 1), flags)
3038 && operand_equal_p (TREE_OPERAND (arg0, 1),
3039 TREE_OPERAND (arg1, 0), flags);
3042 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3043 /* This is needed for conversions and for COMPONENT_REF.
3044 Might as well play it safe and always test this. */
3045 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3046 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3047 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3048 return 0;
3050 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3051 We don't care about side effects in that case because the SAVE_EXPR
3052 takes care of that for us. In all other cases, two expressions are
3053 equal if they have no side effects. If we have two identical
3054 expressions with side effects that should be treated the same due
3055 to the only side effects being identical SAVE_EXPR's, that will
3056 be detected in the recursive calls below. */
3057 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3058 && (TREE_CODE (arg0) == SAVE_EXPR
3059 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3060 return 1;
3062 /* Next handle constant cases, those for which we can return 1 even
3063 if ONLY_CONST is set. */
3064 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3065 switch (TREE_CODE (arg0))
3067 case INTEGER_CST:
3068 return tree_int_cst_equal (arg0, arg1);
3070 case FIXED_CST:
3071 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3072 TREE_FIXED_CST (arg1));
3074 case REAL_CST:
3075 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3076 TREE_REAL_CST (arg1)))
3077 return 1;
3080 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3082 /* If we do not distinguish between signed and unsigned zero,
3083 consider them equal. */
3084 if (real_zerop (arg0) && real_zerop (arg1))
3085 return 1;
3087 return 0;
3089 case VECTOR_CST:
3091 tree v1, v2;
3093 v1 = TREE_VECTOR_CST_ELTS (arg0);
3094 v2 = TREE_VECTOR_CST_ELTS (arg1);
3095 while (v1 && v2)
3097 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3098 flags))
3099 return 0;
3100 v1 = TREE_CHAIN (v1);
3101 v2 = TREE_CHAIN (v2);
3104 return v1 == v2;
3107 case COMPLEX_CST:
3108 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3109 flags)
3110 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3111 flags));
3113 case STRING_CST:
3114 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3115 && ! memcmp (TREE_STRING_POINTER (arg0),
3116 TREE_STRING_POINTER (arg1),
3117 TREE_STRING_LENGTH (arg0)));
3119 case ADDR_EXPR:
3120 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3122 default:
3123 break;
3126 if (flags & OEP_ONLY_CONST)
3127 return 0;
3129 /* Define macros to test an operand from arg0 and arg1 for equality and a
3130 variant that allows null and views null as being different from any
3131 non-null value. In the latter case, if either is null, the both
3132 must be; otherwise, do the normal comparison. */
3133 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3134 TREE_OPERAND (arg1, N), flags)
3136 #define OP_SAME_WITH_NULL(N) \
3137 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3138 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3140 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3142 case tcc_unary:
3143 /* Two conversions are equal only if signedness and modes match. */
3144 switch (TREE_CODE (arg0))
3146 case NOP_EXPR:
3147 case CONVERT_EXPR:
3148 case FIX_TRUNC_EXPR:
3149 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3150 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3151 return 0;
3152 break;
3153 default:
3154 break;
3157 return OP_SAME (0);
3160 case tcc_comparison:
3161 case tcc_binary:
3162 if (OP_SAME (0) && OP_SAME (1))
3163 return 1;
3165 /* For commutative ops, allow the other order. */
3166 return (commutative_tree_code (TREE_CODE (arg0))
3167 && operand_equal_p (TREE_OPERAND (arg0, 0),
3168 TREE_OPERAND (arg1, 1), flags)
3169 && operand_equal_p (TREE_OPERAND (arg0, 1),
3170 TREE_OPERAND (arg1, 0), flags));
3172 case tcc_reference:
3173 /* If either of the pointer (or reference) expressions we are
3174 dereferencing contain a side effect, these cannot be equal. */
3175 if (TREE_SIDE_EFFECTS (arg0)
3176 || TREE_SIDE_EFFECTS (arg1))
3177 return 0;
3179 switch (TREE_CODE (arg0))
3181 case INDIRECT_REF:
3182 case ALIGN_INDIRECT_REF:
3183 case MISALIGNED_INDIRECT_REF:
3184 case REALPART_EXPR:
3185 case IMAGPART_EXPR:
3186 return OP_SAME (0);
3188 case ARRAY_REF:
3189 case ARRAY_RANGE_REF:
3190 /* Operands 2 and 3 may be null.
3191 Compare the array index by value if it is constant first as we
3192 may have different types but same value here. */
3193 return (OP_SAME (0)
3194 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3195 TREE_OPERAND (arg1, 1))
3196 || OP_SAME (1))
3197 && OP_SAME_WITH_NULL (2)
3198 && OP_SAME_WITH_NULL (3));
3200 case COMPONENT_REF:
3201 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3202 may be NULL when we're called to compare MEM_EXPRs. */
3203 return OP_SAME_WITH_NULL (0)
3204 && OP_SAME (1)
3205 && OP_SAME_WITH_NULL (2);
3207 case BIT_FIELD_REF:
3208 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3210 default:
3211 return 0;
3214 case tcc_expression:
3215 switch (TREE_CODE (arg0))
3217 case ADDR_EXPR:
3218 case TRUTH_NOT_EXPR:
3219 return OP_SAME (0);
3221 case TRUTH_ANDIF_EXPR:
3222 case TRUTH_ORIF_EXPR:
3223 return OP_SAME (0) && OP_SAME (1);
3225 case TRUTH_AND_EXPR:
3226 case TRUTH_OR_EXPR:
3227 case TRUTH_XOR_EXPR:
3228 if (OP_SAME (0) && OP_SAME (1))
3229 return 1;
3231 /* Otherwise take into account this is a commutative operation. */
3232 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3233 TREE_OPERAND (arg1, 1), flags)
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 0), flags));
3237 default:
3238 return 0;
3241 case tcc_vl_exp:
3242 switch (TREE_CODE (arg0))
3244 case CALL_EXPR:
3245 /* If the CALL_EXPRs call different functions, then they
3246 clearly can not be equal. */
3247 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3248 flags))
3249 return 0;
3252 unsigned int cef = call_expr_flags (arg0);
3253 if (flags & OEP_PURE_SAME)
3254 cef &= ECF_CONST | ECF_PURE;
3255 else
3256 cef &= ECF_CONST;
3257 if (!cef)
3258 return 0;
3261 /* Now see if all the arguments are the same. */
3263 const_call_expr_arg_iterator iter0, iter1;
3264 const_tree a0, a1;
3265 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3266 a1 = first_const_call_expr_arg (arg1, &iter1);
3267 a0 && a1;
3268 a0 = next_const_call_expr_arg (&iter0),
3269 a1 = next_const_call_expr_arg (&iter1))
3270 if (! operand_equal_p (a0, a1, flags))
3271 return 0;
3273 /* If we get here and both argument lists are exhausted
3274 then the CALL_EXPRs are equal. */
3275 return ! (a0 || a1);
3277 default:
3278 return 0;
3281 case tcc_declaration:
3282 /* Consider __builtin_sqrt equal to sqrt. */
3283 return (TREE_CODE (arg0) == FUNCTION_DECL
3284 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3285 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3286 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3288 default:
3289 return 0;
3292 #undef OP_SAME
3293 #undef OP_SAME_WITH_NULL
3296 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3297 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3299 When in doubt, return 0. */
3301 static int
3302 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3304 int unsignedp1, unsignedpo;
3305 tree primarg0, primarg1, primother;
3306 unsigned int correct_width;
3308 if (operand_equal_p (arg0, arg1, 0))
3309 return 1;
3311 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3312 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3313 return 0;
3315 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3316 and see if the inner values are the same. This removes any
3317 signedness comparison, which doesn't matter here. */
3318 primarg0 = arg0, primarg1 = arg1;
3319 STRIP_NOPS (primarg0);
3320 STRIP_NOPS (primarg1);
3321 if (operand_equal_p (primarg0, primarg1, 0))
3322 return 1;
3324 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3325 actual comparison operand, ARG0.
3327 First throw away any conversions to wider types
3328 already present in the operands. */
3330 primarg1 = get_narrower (arg1, &unsignedp1);
3331 primother = get_narrower (other, &unsignedpo);
3333 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3334 if (unsignedp1 == unsignedpo
3335 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3336 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3338 tree type = TREE_TYPE (arg0);
3340 /* Make sure shorter operand is extended the right way
3341 to match the longer operand. */
3342 primarg1 = fold_convert (signed_or_unsigned_type_for
3343 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3345 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3346 return 1;
3349 return 0;
3352 /* See if ARG is an expression that is either a comparison or is performing
3353 arithmetic on comparisons. The comparisons must only be comparing
3354 two different values, which will be stored in *CVAL1 and *CVAL2; if
3355 they are nonzero it means that some operands have already been found.
3356 No variables may be used anywhere else in the expression except in the
3357 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3358 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3360 If this is true, return 1. Otherwise, return zero. */
3362 static int
3363 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3365 enum tree_code code = TREE_CODE (arg);
3366 enum tree_code_class class = TREE_CODE_CLASS (code);
3368 /* We can handle some of the tcc_expression cases here. */
3369 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3370 class = tcc_unary;
3371 else if (class == tcc_expression
3372 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3373 || code == COMPOUND_EXPR))
3374 class = tcc_binary;
3376 else if (class == tcc_expression && code == SAVE_EXPR
3377 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3379 /* If we've already found a CVAL1 or CVAL2, this expression is
3380 two complex to handle. */
3381 if (*cval1 || *cval2)
3382 return 0;
3384 class = tcc_unary;
3385 *save_p = 1;
3388 switch (class)
3390 case tcc_unary:
3391 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3393 case tcc_binary:
3394 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3395 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3396 cval1, cval2, save_p));
3398 case tcc_constant:
3399 return 1;
3401 case tcc_expression:
3402 if (code == COND_EXPR)
3403 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3404 cval1, cval2, save_p)
3405 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3406 cval1, cval2, save_p)
3407 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3408 cval1, cval2, save_p));
3409 return 0;
3411 case tcc_comparison:
3412 /* First see if we can handle the first operand, then the second. For
3413 the second operand, we know *CVAL1 can't be zero. It must be that
3414 one side of the comparison is each of the values; test for the
3415 case where this isn't true by failing if the two operands
3416 are the same. */
3418 if (operand_equal_p (TREE_OPERAND (arg, 0),
3419 TREE_OPERAND (arg, 1), 0))
3420 return 0;
3422 if (*cval1 == 0)
3423 *cval1 = TREE_OPERAND (arg, 0);
3424 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3426 else if (*cval2 == 0)
3427 *cval2 = TREE_OPERAND (arg, 0);
3428 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3430 else
3431 return 0;
3433 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3435 else if (*cval2 == 0)
3436 *cval2 = TREE_OPERAND (arg, 1);
3437 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3439 else
3440 return 0;
3442 return 1;
3444 default:
3445 return 0;
3449 /* ARG is a tree that is known to contain just arithmetic operations and
3450 comparisons. Evaluate the operations in the tree substituting NEW0 for
3451 any occurrence of OLD0 as an operand of a comparison and likewise for
3452 NEW1 and OLD1. */
3454 static tree
3455 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3457 tree type = TREE_TYPE (arg);
3458 enum tree_code code = TREE_CODE (arg);
3459 enum tree_code_class class = TREE_CODE_CLASS (code);
3461 /* We can handle some of the tcc_expression cases here. */
3462 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3463 class = tcc_unary;
3464 else if (class == tcc_expression
3465 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3466 class = tcc_binary;
3468 switch (class)
3470 case tcc_unary:
3471 return fold_build1 (code, type,
3472 eval_subst (TREE_OPERAND (arg, 0),
3473 old0, new0, old1, new1));
3475 case tcc_binary:
3476 return fold_build2 (code, type,
3477 eval_subst (TREE_OPERAND (arg, 0),
3478 old0, new0, old1, new1),
3479 eval_subst (TREE_OPERAND (arg, 1),
3480 old0, new0, old1, new1));
3482 case tcc_expression:
3483 switch (code)
3485 case SAVE_EXPR:
3486 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3488 case COMPOUND_EXPR:
3489 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3491 case COND_EXPR:
3492 return fold_build3 (code, type,
3493 eval_subst (TREE_OPERAND (arg, 0),
3494 old0, new0, old1, new1),
3495 eval_subst (TREE_OPERAND (arg, 1),
3496 old0, new0, old1, new1),
3497 eval_subst (TREE_OPERAND (arg, 2),
3498 old0, new0, old1, new1));
3499 default:
3500 break;
3502 /* Fall through - ??? */
3504 case tcc_comparison:
3506 tree arg0 = TREE_OPERAND (arg, 0);
3507 tree arg1 = TREE_OPERAND (arg, 1);
3509 /* We need to check both for exact equality and tree equality. The
3510 former will be true if the operand has a side-effect. In that
3511 case, we know the operand occurred exactly once. */
3513 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3514 arg0 = new0;
3515 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3516 arg0 = new1;
3518 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3519 arg1 = new0;
3520 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3521 arg1 = new1;
3523 return fold_build2 (code, type, arg0, arg1);
3526 default:
3527 return arg;
3531 /* Return a tree for the case when the result of an expression is RESULT
3532 converted to TYPE and OMITTED was previously an operand of the expression
3533 but is now not needed (e.g., we folded OMITTED * 0).
3535 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3536 the conversion of RESULT to TYPE. */
3538 tree
3539 omit_one_operand (tree type, tree result, tree omitted)
3541 tree t = fold_convert (type, result);
3543 /* If the resulting operand is an empty statement, just return the omitted
3544 statement casted to void. */
3545 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3546 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3548 if (TREE_SIDE_EFFECTS (omitted))
3549 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3551 return non_lvalue (t);
3554 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3556 static tree
3557 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3559 tree t = fold_convert (type, result);
3561 /* If the resulting operand is an empty statement, just return the omitted
3562 statement casted to void. */
3563 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3564 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3566 if (TREE_SIDE_EFFECTS (omitted))
3567 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3569 return pedantic_non_lvalue (t);
3572 /* Return a tree for the case when the result of an expression is RESULT
3573 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3574 of the expression but are now not needed.
3576 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3577 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3578 evaluated before OMITTED2. Otherwise, if neither has side effects,
3579 just do the conversion of RESULT to TYPE. */
3581 tree
3582 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3584 tree t = fold_convert (type, result);
3586 if (TREE_SIDE_EFFECTS (omitted2))
3587 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3588 if (TREE_SIDE_EFFECTS (omitted1))
3589 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3591 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3595 /* Return a simplified tree node for the truth-negation of ARG. This
3596 never alters ARG itself. We assume that ARG is an operation that
3597 returns a truth value (0 or 1).
3599 FIXME: one would think we would fold the result, but it causes
3600 problems with the dominator optimizer. */
3602 tree
3603 fold_truth_not_expr (tree arg)
3605 tree type = TREE_TYPE (arg);
3606 enum tree_code code = TREE_CODE (arg);
3608 /* If this is a comparison, we can simply invert it, except for
3609 floating-point non-equality comparisons, in which case we just
3610 enclose a TRUTH_NOT_EXPR around what we have. */
3612 if (TREE_CODE_CLASS (code) == tcc_comparison)
3614 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3615 if (FLOAT_TYPE_P (op_type)
3616 && flag_trapping_math
3617 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3618 && code != NE_EXPR && code != EQ_EXPR)
3619 return NULL_TREE;
3620 else
3622 code = invert_tree_comparison (code,
3623 HONOR_NANS (TYPE_MODE (op_type)));
3624 if (code == ERROR_MARK)
3625 return NULL_TREE;
3626 else
3627 return build2 (code, type,
3628 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3632 switch (code)
3634 case INTEGER_CST:
3635 return constant_boolean_node (integer_zerop (arg), type);
3637 case TRUTH_AND_EXPR:
3638 return build2 (TRUTH_OR_EXPR, type,
3639 invert_truthvalue (TREE_OPERAND (arg, 0)),
3640 invert_truthvalue (TREE_OPERAND (arg, 1)));
3642 case TRUTH_OR_EXPR:
3643 return build2 (TRUTH_AND_EXPR, type,
3644 invert_truthvalue (TREE_OPERAND (arg, 0)),
3645 invert_truthvalue (TREE_OPERAND (arg, 1)));
3647 case TRUTH_XOR_EXPR:
3648 /* Here we can invert either operand. We invert the first operand
3649 unless the second operand is a TRUTH_NOT_EXPR in which case our
3650 result is the XOR of the first operand with the inside of the
3651 negation of the second operand. */
3653 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3654 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3655 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3656 else
3657 return build2 (TRUTH_XOR_EXPR, type,
3658 invert_truthvalue (TREE_OPERAND (arg, 0)),
3659 TREE_OPERAND (arg, 1));
3661 case TRUTH_ANDIF_EXPR:
3662 return build2 (TRUTH_ORIF_EXPR, type,
3663 invert_truthvalue (TREE_OPERAND (arg, 0)),
3664 invert_truthvalue (TREE_OPERAND (arg, 1)));
3666 case TRUTH_ORIF_EXPR:
3667 return build2 (TRUTH_ANDIF_EXPR, type,
3668 invert_truthvalue (TREE_OPERAND (arg, 0)),
3669 invert_truthvalue (TREE_OPERAND (arg, 1)));
3671 case TRUTH_NOT_EXPR:
3672 return TREE_OPERAND (arg, 0);
3674 case COND_EXPR:
3676 tree arg1 = TREE_OPERAND (arg, 1);
3677 tree arg2 = TREE_OPERAND (arg, 2);
3678 /* A COND_EXPR may have a throw as one operand, which
3679 then has void type. Just leave void operands
3680 as they are. */
3681 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3682 VOID_TYPE_P (TREE_TYPE (arg1))
3683 ? arg1 : invert_truthvalue (arg1),
3684 VOID_TYPE_P (TREE_TYPE (arg2))
3685 ? arg2 : invert_truthvalue (arg2));
3688 case COMPOUND_EXPR:
3689 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3690 invert_truthvalue (TREE_OPERAND (arg, 1)));
3692 case NON_LVALUE_EXPR:
3693 return invert_truthvalue (TREE_OPERAND (arg, 0));
3695 case NOP_EXPR:
3696 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3697 return build1 (TRUTH_NOT_EXPR, type, arg);
3699 case CONVERT_EXPR:
3700 case FLOAT_EXPR:
3701 return build1 (TREE_CODE (arg), type,
3702 invert_truthvalue (TREE_OPERAND (arg, 0)));
3704 case BIT_AND_EXPR:
3705 if (!integer_onep (TREE_OPERAND (arg, 1)))
3706 break;
3707 return build2 (EQ_EXPR, type, arg,
3708 build_int_cst (type, 0));
3710 case SAVE_EXPR:
3711 return build1 (TRUTH_NOT_EXPR, type, arg);
3713 case CLEANUP_POINT_EXPR:
3714 return build1 (CLEANUP_POINT_EXPR, type,
3715 invert_truthvalue (TREE_OPERAND (arg, 0)));
3717 default:
3718 break;
3721 return NULL_TREE;
3724 /* Return a simplified tree node for the truth-negation of ARG. This
3725 never alters ARG itself. We assume that ARG is an operation that
3726 returns a truth value (0 or 1).
3728 FIXME: one would think we would fold the result, but it causes
3729 problems with the dominator optimizer. */
3731 tree
3732 invert_truthvalue (tree arg)
3734 tree tem;
3736 if (TREE_CODE (arg) == ERROR_MARK)
3737 return arg;
3739 tem = fold_truth_not_expr (arg);
3740 if (!tem)
3741 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3743 return tem;
3746 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3747 operands are another bit-wise operation with a common input. If so,
3748 distribute the bit operations to save an operation and possibly two if
3749 constants are involved. For example, convert
3750 (A | B) & (A | C) into A | (B & C)
3751 Further simplification will occur if B and C are constants.
3753 If this optimization cannot be done, 0 will be returned. */
3755 static tree
3756 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3758 tree common;
3759 tree left, right;
3761 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3762 || TREE_CODE (arg0) == code
3763 || (TREE_CODE (arg0) != BIT_AND_EXPR
3764 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3765 return 0;
3767 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3769 common = TREE_OPERAND (arg0, 0);
3770 left = TREE_OPERAND (arg0, 1);
3771 right = TREE_OPERAND (arg1, 1);
3773 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3775 common = TREE_OPERAND (arg0, 0);
3776 left = TREE_OPERAND (arg0, 1);
3777 right = TREE_OPERAND (arg1, 0);
3779 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3781 common = TREE_OPERAND (arg0, 1);
3782 left = TREE_OPERAND (arg0, 0);
3783 right = TREE_OPERAND (arg1, 1);
3785 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3787 common = TREE_OPERAND (arg0, 1);
3788 left = TREE_OPERAND (arg0, 0);
3789 right = TREE_OPERAND (arg1, 0);
3791 else
3792 return 0;
3794 return fold_build2 (TREE_CODE (arg0), type, common,
3795 fold_build2 (code, type, left, right));
3798 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3799 with code CODE. This optimization is unsafe. */
3800 static tree
3801 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3803 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3804 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3806 /* (A / C) +- (B / C) -> (A +- B) / C. */
3807 if (mul0 == mul1
3808 && operand_equal_p (TREE_OPERAND (arg0, 1),
3809 TREE_OPERAND (arg1, 1), 0))
3810 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3811 fold_build2 (code, type,
3812 TREE_OPERAND (arg0, 0),
3813 TREE_OPERAND (arg1, 0)),
3814 TREE_OPERAND (arg0, 1));
3816 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3817 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3818 TREE_OPERAND (arg1, 0), 0)
3819 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3820 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3822 REAL_VALUE_TYPE r0, r1;
3823 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3824 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3825 if (!mul0)
3826 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3827 if (!mul1)
3828 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3829 real_arithmetic (&r0, code, &r0, &r1);
3830 return fold_build2 (MULT_EXPR, type,
3831 TREE_OPERAND (arg0, 0),
3832 build_real (type, r0));
3835 return NULL_TREE;
3838 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3839 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3841 static tree
3842 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3843 int unsignedp)
3845 tree result;
3847 if (bitpos == 0)
3849 tree size = TYPE_SIZE (TREE_TYPE (inner));
3850 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3851 || POINTER_TYPE_P (TREE_TYPE (inner)))
3852 && host_integerp (size, 0)
3853 && tree_low_cst (size, 0) == bitsize)
3854 return fold_convert (type, inner);
3857 result = build3 (BIT_FIELD_REF, type, inner,
3858 size_int (bitsize), bitsize_int (bitpos));
3860 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3862 return result;
3865 /* Optimize a bit-field compare.
3867 There are two cases: First is a compare against a constant and the
3868 second is a comparison of two items where the fields are at the same
3869 bit position relative to the start of a chunk (byte, halfword, word)
3870 large enough to contain it. In these cases we can avoid the shift
3871 implicit in bitfield extractions.
3873 For constants, we emit a compare of the shifted constant with the
3874 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3875 compared. For two fields at the same position, we do the ANDs with the
3876 similar mask and compare the result of the ANDs.
3878 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3879 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3880 are the left and right operands of the comparison, respectively.
3882 If the optimization described above can be done, we return the resulting
3883 tree. Otherwise we return zero. */
3885 static tree
3886 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3887 tree lhs, tree rhs)
3889 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3890 tree type = TREE_TYPE (lhs);
3891 tree signed_type, unsigned_type;
3892 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3893 enum machine_mode lmode, rmode, nmode;
3894 int lunsignedp, runsignedp;
3895 int lvolatilep = 0, rvolatilep = 0;
3896 tree linner, rinner = NULL_TREE;
3897 tree mask;
3898 tree offset;
3900 /* Get all the information about the extractions being done. If the bit size
3901 if the same as the size of the underlying object, we aren't doing an
3902 extraction at all and so can do nothing. We also don't want to
3903 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3904 then will no longer be able to replace it. */
3905 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3906 &lunsignedp, &lvolatilep, false);
3907 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3908 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3909 return 0;
3911 if (!const_p)
3913 /* If this is not a constant, we can only do something if bit positions,
3914 sizes, and signedness are the same. */
3915 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3916 &runsignedp, &rvolatilep, false);
3918 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3919 || lunsignedp != runsignedp || offset != 0
3920 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3921 return 0;
3924 /* See if we can find a mode to refer to this field. We should be able to,
3925 but fail if we can't. */
3926 nmode = get_best_mode (lbitsize, lbitpos,
3927 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3928 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3929 TYPE_ALIGN (TREE_TYPE (rinner))),
3930 word_mode, lvolatilep || rvolatilep);
3931 if (nmode == VOIDmode)
3932 return 0;
3934 /* Set signed and unsigned types of the precision of this mode for the
3935 shifts below. */
3936 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3937 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3939 /* Compute the bit position and size for the new reference and our offset
3940 within it. If the new reference is the same size as the original, we
3941 won't optimize anything, so return zero. */
3942 nbitsize = GET_MODE_BITSIZE (nmode);
3943 nbitpos = lbitpos & ~ (nbitsize - 1);
3944 lbitpos -= nbitpos;
3945 if (nbitsize == lbitsize)
3946 return 0;
3948 if (BYTES_BIG_ENDIAN)
3949 lbitpos = nbitsize - lbitsize - lbitpos;
3951 /* Make the mask to be used against the extracted field. */
3952 mask = build_int_cst_type (unsigned_type, -1);
3953 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3954 mask = const_binop (RSHIFT_EXPR, mask,
3955 size_int (nbitsize - lbitsize - lbitpos), 0);
3957 if (! const_p)
3958 /* If not comparing with constant, just rework the comparison
3959 and return. */
3960 return fold_build2 (code, compare_type,
3961 fold_build2 (BIT_AND_EXPR, unsigned_type,
3962 make_bit_field_ref (linner,
3963 unsigned_type,
3964 nbitsize, nbitpos,
3966 mask),
3967 fold_build2 (BIT_AND_EXPR, unsigned_type,
3968 make_bit_field_ref (rinner,
3969 unsigned_type,
3970 nbitsize, nbitpos,
3972 mask));
3974 /* Otherwise, we are handling the constant case. See if the constant is too
3975 big for the field. Warn and return a tree of for 0 (false) if so. We do
3976 this not only for its own sake, but to avoid having to test for this
3977 error case below. If we didn't, we might generate wrong code.
3979 For unsigned fields, the constant shifted right by the field length should
3980 be all zero. For signed fields, the high-order bits should agree with
3981 the sign bit. */
3983 if (lunsignedp)
3985 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3986 fold_convert (unsigned_type, rhs),
3987 size_int (lbitsize), 0)))
3989 warning (0, "comparison is always %d due to width of bit-field",
3990 code == NE_EXPR);
3991 return constant_boolean_node (code == NE_EXPR, compare_type);
3994 else
3996 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3997 size_int (lbitsize - 1), 0);
3998 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4000 warning (0, "comparison is always %d due to width of bit-field",
4001 code == NE_EXPR);
4002 return constant_boolean_node (code == NE_EXPR, compare_type);
4006 /* Single-bit compares should always be against zero. */
4007 if (lbitsize == 1 && ! integer_zerop (rhs))
4009 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4010 rhs = build_int_cst (type, 0);
4013 /* Make a new bitfield reference, shift the constant over the
4014 appropriate number of bits and mask it with the computed mask
4015 (in case this was a signed field). If we changed it, make a new one. */
4016 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4017 if (lvolatilep)
4019 TREE_SIDE_EFFECTS (lhs) = 1;
4020 TREE_THIS_VOLATILE (lhs) = 1;
4023 rhs = const_binop (BIT_AND_EXPR,
4024 const_binop (LSHIFT_EXPR,
4025 fold_convert (unsigned_type, rhs),
4026 size_int (lbitpos), 0),
4027 mask, 0);
4029 return build2 (code, compare_type,
4030 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4031 rhs);
4034 /* Subroutine for fold_truthop: decode a field reference.
4036 If EXP is a comparison reference, we return the innermost reference.
4038 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4039 set to the starting bit number.
4041 If the innermost field can be completely contained in a mode-sized
4042 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4044 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4045 otherwise it is not changed.
4047 *PUNSIGNEDP is set to the signedness of the field.
4049 *PMASK is set to the mask used. This is either contained in a
4050 BIT_AND_EXPR or derived from the width of the field.
4052 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4054 Return 0 if this is not a component reference or is one that we can't
4055 do anything with. */
4057 static tree
4058 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4059 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4060 int *punsignedp, int *pvolatilep,
4061 tree *pmask, tree *pand_mask)
4063 tree outer_type = 0;
4064 tree and_mask = 0;
4065 tree mask, inner, offset;
4066 tree unsigned_type;
4067 unsigned int precision;
4069 /* All the optimizations using this function assume integer fields.
4070 There are problems with FP fields since the type_for_size call
4071 below can fail for, e.g., XFmode. */
4072 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4073 return 0;
4075 /* We are interested in the bare arrangement of bits, so strip everything
4076 that doesn't affect the machine mode. However, record the type of the
4077 outermost expression if it may matter below. */
4078 if (TREE_CODE (exp) == NOP_EXPR
4079 || TREE_CODE (exp) == CONVERT_EXPR
4080 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4081 outer_type = TREE_TYPE (exp);
4082 STRIP_NOPS (exp);
4084 if (TREE_CODE (exp) == BIT_AND_EXPR)
4086 and_mask = TREE_OPERAND (exp, 1);
4087 exp = TREE_OPERAND (exp, 0);
4088 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4089 if (TREE_CODE (and_mask) != INTEGER_CST)
4090 return 0;
4093 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4094 punsignedp, pvolatilep, false);
4095 if ((inner == exp && and_mask == 0)
4096 || *pbitsize < 0 || offset != 0
4097 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4098 return 0;
4100 /* If the number of bits in the reference is the same as the bitsize of
4101 the outer type, then the outer type gives the signedness. Otherwise
4102 (in case of a small bitfield) the signedness is unchanged. */
4103 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4104 *punsignedp = TYPE_UNSIGNED (outer_type);
4106 /* Compute the mask to access the bitfield. */
4107 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4108 precision = TYPE_PRECISION (unsigned_type);
4110 mask = build_int_cst_type (unsigned_type, -1);
4112 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4113 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4115 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4116 if (and_mask != 0)
4117 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4118 fold_convert (unsigned_type, and_mask), mask);
4120 *pmask = mask;
4121 *pand_mask = and_mask;
4122 return inner;
4125 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4126 bit positions. */
4128 static int
4129 all_ones_mask_p (const_tree mask, int size)
4131 tree type = TREE_TYPE (mask);
4132 unsigned int precision = TYPE_PRECISION (type);
4133 tree tmask;
4135 tmask = build_int_cst_type (signed_type_for (type), -1);
4137 return
4138 tree_int_cst_equal (mask,
4139 const_binop (RSHIFT_EXPR,
4140 const_binop (LSHIFT_EXPR, tmask,
4141 size_int (precision - size),
4143 size_int (precision - size), 0));
4146 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4147 represents the sign bit of EXP's type. If EXP represents a sign
4148 or zero extension, also test VAL against the unextended type.
4149 The return value is the (sub)expression whose sign bit is VAL,
4150 or NULL_TREE otherwise. */
4152 static tree
4153 sign_bit_p (tree exp, const_tree val)
4155 unsigned HOST_WIDE_INT mask_lo, lo;
4156 HOST_WIDE_INT mask_hi, hi;
4157 int width;
4158 tree t;
4160 /* Tree EXP must have an integral type. */
4161 t = TREE_TYPE (exp);
4162 if (! INTEGRAL_TYPE_P (t))
4163 return NULL_TREE;
4165 /* Tree VAL must be an integer constant. */
4166 if (TREE_CODE (val) != INTEGER_CST
4167 || TREE_OVERFLOW (val))
4168 return NULL_TREE;
4170 width = TYPE_PRECISION (t);
4171 if (width > HOST_BITS_PER_WIDE_INT)
4173 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4174 lo = 0;
4176 mask_hi = ((unsigned HOST_WIDE_INT) -1
4177 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4178 mask_lo = -1;
4180 else
4182 hi = 0;
4183 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4185 mask_hi = 0;
4186 mask_lo = ((unsigned HOST_WIDE_INT) -1
4187 >> (HOST_BITS_PER_WIDE_INT - width));
4190 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4191 treat VAL as if it were unsigned. */
4192 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4193 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4194 return exp;
4196 /* Handle extension from a narrower type. */
4197 if (TREE_CODE (exp) == NOP_EXPR
4198 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4199 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4201 return NULL_TREE;
4204 /* Subroutine for fold_truthop: determine if an operand is simple enough
4205 to be evaluated unconditionally. */
4207 static int
4208 simple_operand_p (const_tree exp)
4210 /* Strip any conversions that don't change the machine mode. */
4211 STRIP_NOPS (exp);
4213 return (CONSTANT_CLASS_P (exp)
4214 || TREE_CODE (exp) == SSA_NAME
4215 || (DECL_P (exp)
4216 && ! TREE_ADDRESSABLE (exp)
4217 && ! TREE_THIS_VOLATILE (exp)
4218 && ! DECL_NONLOCAL (exp)
4219 /* Don't regard global variables as simple. They may be
4220 allocated in ways unknown to the compiler (shared memory,
4221 #pragma weak, etc). */
4222 && ! TREE_PUBLIC (exp)
4223 && ! DECL_EXTERNAL (exp)
4224 /* Loading a static variable is unduly expensive, but global
4225 registers aren't expensive. */
4226 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4229 /* The following functions are subroutines to fold_range_test and allow it to
4230 try to change a logical combination of comparisons into a range test.
4232 For example, both
4233 X == 2 || X == 3 || X == 4 || X == 5
4235 X >= 2 && X <= 5
4236 are converted to
4237 (unsigned) (X - 2) <= 3
4239 We describe each set of comparisons as being either inside or outside
4240 a range, using a variable named like IN_P, and then describe the
4241 range with a lower and upper bound. If one of the bounds is omitted,
4242 it represents either the highest or lowest value of the type.
4244 In the comments below, we represent a range by two numbers in brackets
4245 preceded by a "+" to designate being inside that range, or a "-" to
4246 designate being outside that range, so the condition can be inverted by
4247 flipping the prefix. An omitted bound is represented by a "-". For
4248 example, "- [-, 10]" means being outside the range starting at the lowest
4249 possible value and ending at 10, in other words, being greater than 10.
4250 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4251 always false.
4253 We set up things so that the missing bounds are handled in a consistent
4254 manner so neither a missing bound nor "true" and "false" need to be
4255 handled using a special case. */
4257 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4258 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4259 and UPPER1_P are nonzero if the respective argument is an upper bound
4260 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4261 must be specified for a comparison. ARG1 will be converted to ARG0's
4262 type if both are specified. */
4264 static tree
4265 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4266 tree arg1, int upper1_p)
4268 tree tem;
4269 int result;
4270 int sgn0, sgn1;
4272 /* If neither arg represents infinity, do the normal operation.
4273 Else, if not a comparison, return infinity. Else handle the special
4274 comparison rules. Note that most of the cases below won't occur, but
4275 are handled for consistency. */
4277 if (arg0 != 0 && arg1 != 0)
4279 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4280 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4281 STRIP_NOPS (tem);
4282 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4285 if (TREE_CODE_CLASS (code) != tcc_comparison)
4286 return 0;
4288 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4289 for neither. In real maths, we cannot assume open ended ranges are
4290 the same. But, this is computer arithmetic, where numbers are finite.
4291 We can therefore make the transformation of any unbounded range with
4292 the value Z, Z being greater than any representable number. This permits
4293 us to treat unbounded ranges as equal. */
4294 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4295 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4296 switch (code)
4298 case EQ_EXPR:
4299 result = sgn0 == sgn1;
4300 break;
4301 case NE_EXPR:
4302 result = sgn0 != sgn1;
4303 break;
4304 case LT_EXPR:
4305 result = sgn0 < sgn1;
4306 break;
4307 case LE_EXPR:
4308 result = sgn0 <= sgn1;
4309 break;
4310 case GT_EXPR:
4311 result = sgn0 > sgn1;
4312 break;
4313 case GE_EXPR:
4314 result = sgn0 >= sgn1;
4315 break;
4316 default:
4317 gcc_unreachable ();
4320 return constant_boolean_node (result, type);
4323 /* Given EXP, a logical expression, set the range it is testing into
4324 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4325 actually being tested. *PLOW and *PHIGH will be made of the same
4326 type as the returned expression. If EXP is not a comparison, we
4327 will most likely not be returning a useful value and range. Set
4328 *STRICT_OVERFLOW_P to true if the return value is only valid
4329 because signed overflow is undefined; otherwise, do not change
4330 *STRICT_OVERFLOW_P. */
4332 static tree
4333 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4334 bool *strict_overflow_p)
4336 enum tree_code code;
4337 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4338 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4339 int in_p, n_in_p;
4340 tree low, high, n_low, n_high;
4342 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4343 and see if we can refine the range. Some of the cases below may not
4344 happen, but it doesn't seem worth worrying about this. We "continue"
4345 the outer loop when we've changed something; otherwise we "break"
4346 the switch, which will "break" the while. */
4348 in_p = 0;
4349 low = high = build_int_cst (TREE_TYPE (exp), 0);
4351 while (1)
4353 code = TREE_CODE (exp);
4354 exp_type = TREE_TYPE (exp);
4356 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4358 if (TREE_OPERAND_LENGTH (exp) > 0)
4359 arg0 = TREE_OPERAND (exp, 0);
4360 if (TREE_CODE_CLASS (code) == tcc_comparison
4361 || TREE_CODE_CLASS (code) == tcc_unary
4362 || TREE_CODE_CLASS (code) == tcc_binary)
4363 arg0_type = TREE_TYPE (arg0);
4364 if (TREE_CODE_CLASS (code) == tcc_binary
4365 || TREE_CODE_CLASS (code) == tcc_comparison
4366 || (TREE_CODE_CLASS (code) == tcc_expression
4367 && TREE_OPERAND_LENGTH (exp) > 1))
4368 arg1 = TREE_OPERAND (exp, 1);
4371 switch (code)
4373 case TRUTH_NOT_EXPR:
4374 in_p = ! in_p, exp = arg0;
4375 continue;
4377 case EQ_EXPR: case NE_EXPR:
4378 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4379 /* We can only do something if the range is testing for zero
4380 and if the second operand is an integer constant. Note that
4381 saying something is "in" the range we make is done by
4382 complementing IN_P since it will set in the initial case of
4383 being not equal to zero; "out" is leaving it alone. */
4384 if (low == 0 || high == 0
4385 || ! integer_zerop (low) || ! integer_zerop (high)
4386 || TREE_CODE (arg1) != INTEGER_CST)
4387 break;
4389 switch (code)
4391 case NE_EXPR: /* - [c, c] */
4392 low = high = arg1;
4393 break;
4394 case EQ_EXPR: /* + [c, c] */
4395 in_p = ! in_p, low = high = arg1;
4396 break;
4397 case GT_EXPR: /* - [-, c] */
4398 low = 0, high = arg1;
4399 break;
4400 case GE_EXPR: /* + [c, -] */
4401 in_p = ! in_p, low = arg1, high = 0;
4402 break;
4403 case LT_EXPR: /* - [c, -] */
4404 low = arg1, high = 0;
4405 break;
4406 case LE_EXPR: /* + [-, c] */
4407 in_p = ! in_p, low = 0, high = arg1;
4408 break;
4409 default:
4410 gcc_unreachable ();
4413 /* If this is an unsigned comparison, we also know that EXP is
4414 greater than or equal to zero. We base the range tests we make
4415 on that fact, so we record it here so we can parse existing
4416 range tests. We test arg0_type since often the return type
4417 of, e.g. EQ_EXPR, is boolean. */
4418 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4420 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4421 in_p, low, high, 1,
4422 build_int_cst (arg0_type, 0),
4423 NULL_TREE))
4424 break;
4426 in_p = n_in_p, low = n_low, high = n_high;
4428 /* If the high bound is missing, but we have a nonzero low
4429 bound, reverse the range so it goes from zero to the low bound
4430 minus 1. */
4431 if (high == 0 && low && ! integer_zerop (low))
4433 in_p = ! in_p;
4434 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4435 integer_one_node, 0);
4436 low = build_int_cst (arg0_type, 0);
4440 exp = arg0;
4441 continue;
4443 case NEGATE_EXPR:
4444 /* (-x) IN [a,b] -> x in [-b, -a] */
4445 n_low = range_binop (MINUS_EXPR, exp_type,
4446 build_int_cst (exp_type, 0),
4447 0, high, 1);
4448 n_high = range_binop (MINUS_EXPR, exp_type,
4449 build_int_cst (exp_type, 0),
4450 0, low, 0);
4451 low = n_low, high = n_high;
4452 exp = arg0;
4453 continue;
4455 case BIT_NOT_EXPR:
4456 /* ~ X -> -X - 1 */
4457 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4458 build_int_cst (exp_type, 1));
4459 continue;
4461 case PLUS_EXPR: case MINUS_EXPR:
4462 if (TREE_CODE (arg1) != INTEGER_CST)
4463 break;
4465 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4466 move a constant to the other side. */
4467 if (!TYPE_UNSIGNED (arg0_type)
4468 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4469 break;
4471 /* If EXP is signed, any overflow in the computation is undefined,
4472 so we don't worry about it so long as our computations on
4473 the bounds don't overflow. For unsigned, overflow is defined
4474 and this is exactly the right thing. */
4475 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4476 arg0_type, low, 0, arg1, 0);
4477 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4478 arg0_type, high, 1, arg1, 0);
4479 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4480 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4481 break;
4483 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4484 *strict_overflow_p = true;
4486 /* Check for an unsigned range which has wrapped around the maximum
4487 value thus making n_high < n_low, and normalize it. */
4488 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4490 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4491 integer_one_node, 0);
4492 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4493 integer_one_node, 0);
4495 /* If the range is of the form +/- [ x+1, x ], we won't
4496 be able to normalize it. But then, it represents the
4497 whole range or the empty set, so make it
4498 +/- [ -, - ]. */
4499 if (tree_int_cst_equal (n_low, low)
4500 && tree_int_cst_equal (n_high, high))
4501 low = high = 0;
4502 else
4503 in_p = ! in_p;
4505 else
4506 low = n_low, high = n_high;
4508 exp = arg0;
4509 continue;
4511 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4512 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4513 break;
4515 if (! INTEGRAL_TYPE_P (arg0_type)
4516 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4517 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4518 break;
4520 n_low = low, n_high = high;
4522 if (n_low != 0)
4523 n_low = fold_convert (arg0_type, n_low);
4525 if (n_high != 0)
4526 n_high = fold_convert (arg0_type, n_high);
4529 /* If we're converting arg0 from an unsigned type, to exp,
4530 a signed type, we will be doing the comparison as unsigned.
4531 The tests above have already verified that LOW and HIGH
4532 are both positive.
4534 So we have to ensure that we will handle large unsigned
4535 values the same way that the current signed bounds treat
4536 negative values. */
4538 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4540 tree high_positive;
4541 tree equiv_type;
4542 /* For fixed-point modes, we need to pass the saturating flag
4543 as the 2nd parameter. */
4544 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4545 equiv_type = lang_hooks.types.type_for_mode
4546 (TYPE_MODE (arg0_type),
4547 TYPE_SATURATING (arg0_type));
4548 else
4549 equiv_type = lang_hooks.types.type_for_mode
4550 (TYPE_MODE (arg0_type), 1);
4552 /* A range without an upper bound is, naturally, unbounded.
4553 Since convert would have cropped a very large value, use
4554 the max value for the destination type. */
4555 high_positive
4556 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4557 : TYPE_MAX_VALUE (arg0_type);
4559 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4560 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4561 fold_convert (arg0_type,
4562 high_positive),
4563 build_int_cst (arg0_type, 1));
4565 /* If the low bound is specified, "and" the range with the
4566 range for which the original unsigned value will be
4567 positive. */
4568 if (low != 0)
4570 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4571 1, n_low, n_high, 1,
4572 fold_convert (arg0_type,
4573 integer_zero_node),
4574 high_positive))
4575 break;
4577 in_p = (n_in_p == in_p);
4579 else
4581 /* Otherwise, "or" the range with the range of the input
4582 that will be interpreted as negative. */
4583 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4584 0, n_low, n_high, 1,
4585 fold_convert (arg0_type,
4586 integer_zero_node),
4587 high_positive))
4588 break;
4590 in_p = (in_p != n_in_p);
4594 exp = arg0;
4595 low = n_low, high = n_high;
4596 continue;
4598 default:
4599 break;
4602 break;
4605 /* If EXP is a constant, we can evaluate whether this is true or false. */
4606 if (TREE_CODE (exp) == INTEGER_CST)
4608 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4609 exp, 0, low, 0))
4610 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4611 exp, 1, high, 1)));
4612 low = high = 0;
4613 exp = 0;
4616 *pin_p = in_p, *plow = low, *phigh = high;
4617 return exp;
4620 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4621 type, TYPE, return an expression to test if EXP is in (or out of, depending
4622 on IN_P) the range. Return 0 if the test couldn't be created. */
4624 static tree
4625 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4627 tree etype = TREE_TYPE (exp);
4628 tree value;
4630 #ifdef HAVE_canonicalize_funcptr_for_compare
4631 /* Disable this optimization for function pointer expressions
4632 on targets that require function pointer canonicalization. */
4633 if (HAVE_canonicalize_funcptr_for_compare
4634 && TREE_CODE (etype) == POINTER_TYPE
4635 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4636 return NULL_TREE;
4637 #endif
4639 if (! in_p)
4641 value = build_range_check (type, exp, 1, low, high);
4642 if (value != 0)
4643 return invert_truthvalue (value);
4645 return 0;
4648 if (low == 0 && high == 0)
4649 return build_int_cst (type, 1);
4651 if (low == 0)
4652 return fold_build2 (LE_EXPR, type, exp,
4653 fold_convert (etype, high));
4655 if (high == 0)
4656 return fold_build2 (GE_EXPR, type, exp,
4657 fold_convert (etype, low));
4659 if (operand_equal_p (low, high, 0))
4660 return fold_build2 (EQ_EXPR, type, exp,
4661 fold_convert (etype, low));
4663 if (integer_zerop (low))
4665 if (! TYPE_UNSIGNED (etype))
4667 etype = unsigned_type_for (etype);
4668 high = fold_convert (etype, high);
4669 exp = fold_convert (etype, exp);
4671 return build_range_check (type, exp, 1, 0, high);
4674 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4675 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4677 unsigned HOST_WIDE_INT lo;
4678 HOST_WIDE_INT hi;
4679 int prec;
4681 prec = TYPE_PRECISION (etype);
4682 if (prec <= HOST_BITS_PER_WIDE_INT)
4684 hi = 0;
4685 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4687 else
4689 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4690 lo = (unsigned HOST_WIDE_INT) -1;
4693 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4695 if (TYPE_UNSIGNED (etype))
4697 etype = signed_type_for (etype);
4698 exp = fold_convert (etype, exp);
4700 return fold_build2 (GT_EXPR, type, exp,
4701 build_int_cst (etype, 0));
4705 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4706 This requires wrap-around arithmetics for the type of the expression. */
4707 switch (TREE_CODE (etype))
4709 case INTEGER_TYPE:
4710 /* There is no requirement that LOW be within the range of ETYPE
4711 if the latter is a subtype. It must, however, be within the base
4712 type of ETYPE. So be sure we do the subtraction in that type. */
4713 if (TREE_TYPE (etype))
4714 etype = TREE_TYPE (etype);
4715 break;
4717 case ENUMERAL_TYPE:
4718 case BOOLEAN_TYPE:
4719 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4720 TYPE_UNSIGNED (etype));
4721 break;
4723 default:
4724 break;
4727 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4728 if (TREE_CODE (etype) == INTEGER_TYPE
4729 && !TYPE_OVERFLOW_WRAPS (etype))
4731 tree utype, minv, maxv;
4733 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4734 for the type in question, as we rely on this here. */
4735 utype = unsigned_type_for (etype);
4736 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4737 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4738 integer_one_node, 1);
4739 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4741 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4742 minv, 1, maxv, 1)))
4743 etype = utype;
4744 else
4745 return 0;
4748 high = fold_convert (etype, high);
4749 low = fold_convert (etype, low);
4750 exp = fold_convert (etype, exp);
4752 value = const_binop (MINUS_EXPR, high, low, 0);
4755 if (POINTER_TYPE_P (etype))
4757 if (value != 0 && !TREE_OVERFLOW (value))
4759 low = fold_convert (sizetype, low);
4760 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4761 return build_range_check (type,
4762 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4763 1, build_int_cst (etype, 0), value);
4765 return 0;
4768 if (value != 0 && !TREE_OVERFLOW (value))
4769 return build_range_check (type,
4770 fold_build2 (MINUS_EXPR, etype, exp, low),
4771 1, build_int_cst (etype, 0), value);
4773 return 0;
4776 /* Return the predecessor of VAL in its type, handling the infinite case. */
4778 static tree
4779 range_predecessor (tree val)
4781 tree type = TREE_TYPE (val);
4783 if (INTEGRAL_TYPE_P (type)
4784 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4785 return 0;
4786 else
4787 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4790 /* Return the successor of VAL in its type, handling the infinite case. */
4792 static tree
4793 range_successor (tree val)
4795 tree type = TREE_TYPE (val);
4797 if (INTEGRAL_TYPE_P (type)
4798 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4799 return 0;
4800 else
4801 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4804 /* Given two ranges, see if we can merge them into one. Return 1 if we
4805 can, 0 if we can't. Set the output range into the specified parameters. */
4807 static int
4808 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4809 tree high0, int in1_p, tree low1, tree high1)
4811 int no_overlap;
4812 int subset;
4813 int temp;
4814 tree tem;
4815 int in_p;
4816 tree low, high;
4817 int lowequal = ((low0 == 0 && low1 == 0)
4818 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4819 low0, 0, low1, 0)));
4820 int highequal = ((high0 == 0 && high1 == 0)
4821 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4822 high0, 1, high1, 1)));
4824 /* Make range 0 be the range that starts first, or ends last if they
4825 start at the same value. Swap them if it isn't. */
4826 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4827 low0, 0, low1, 0))
4828 || (lowequal
4829 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4830 high1, 1, high0, 1))))
4832 temp = in0_p, in0_p = in1_p, in1_p = temp;
4833 tem = low0, low0 = low1, low1 = tem;
4834 tem = high0, high0 = high1, high1 = tem;
4837 /* Now flag two cases, whether the ranges are disjoint or whether the
4838 second range is totally subsumed in the first. Note that the tests
4839 below are simplified by the ones above. */
4840 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4841 high0, 1, low1, 0));
4842 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4843 high1, 1, high0, 1));
4845 /* We now have four cases, depending on whether we are including or
4846 excluding the two ranges. */
4847 if (in0_p && in1_p)
4849 /* If they don't overlap, the result is false. If the second range
4850 is a subset it is the result. Otherwise, the range is from the start
4851 of the second to the end of the first. */
4852 if (no_overlap)
4853 in_p = 0, low = high = 0;
4854 else if (subset)
4855 in_p = 1, low = low1, high = high1;
4856 else
4857 in_p = 1, low = low1, high = high0;
4860 else if (in0_p && ! in1_p)
4862 /* If they don't overlap, the result is the first range. If they are
4863 equal, the result is false. If the second range is a subset of the
4864 first, and the ranges begin at the same place, we go from just after
4865 the end of the second range to the end of the first. If the second
4866 range is not a subset of the first, or if it is a subset and both
4867 ranges end at the same place, the range starts at the start of the
4868 first range and ends just before the second range.
4869 Otherwise, we can't describe this as a single range. */
4870 if (no_overlap)
4871 in_p = 1, low = low0, high = high0;
4872 else if (lowequal && highequal)
4873 in_p = 0, low = high = 0;
4874 else if (subset && lowequal)
4876 low = range_successor (high1);
4877 high = high0;
4878 in_p = 1;
4879 if (low == 0)
4881 /* We are in the weird situation where high0 > high1 but
4882 high1 has no successor. Punt. */
4883 return 0;
4886 else if (! subset || highequal)
4888 low = low0;
4889 high = range_predecessor (low1);
4890 in_p = 1;
4891 if (high == 0)
4893 /* low0 < low1 but low1 has no predecessor. Punt. */
4894 return 0;
4897 else
4898 return 0;
4901 else if (! in0_p && in1_p)
4903 /* If they don't overlap, the result is the second range. If the second
4904 is a subset of the first, the result is false. Otherwise,
4905 the range starts just after the first range and ends at the
4906 end of the second. */
4907 if (no_overlap)
4908 in_p = 1, low = low1, high = high1;
4909 else if (subset || highequal)
4910 in_p = 0, low = high = 0;
4911 else
4913 low = range_successor (high0);
4914 high = high1;
4915 in_p = 1;
4916 if (low == 0)
4918 /* high1 > high0 but high0 has no successor. Punt. */
4919 return 0;
4924 else
4926 /* The case where we are excluding both ranges. Here the complex case
4927 is if they don't overlap. In that case, the only time we have a
4928 range is if they are adjacent. If the second is a subset of the
4929 first, the result is the first. Otherwise, the range to exclude
4930 starts at the beginning of the first range and ends at the end of the
4931 second. */
4932 if (no_overlap)
4934 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4935 range_successor (high0),
4936 1, low1, 0)))
4937 in_p = 0, low = low0, high = high1;
4938 else
4940 /* Canonicalize - [min, x] into - [-, x]. */
4941 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4942 switch (TREE_CODE (TREE_TYPE (low0)))
4944 case ENUMERAL_TYPE:
4945 if (TYPE_PRECISION (TREE_TYPE (low0))
4946 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4947 break;
4948 /* FALLTHROUGH */
4949 case INTEGER_TYPE:
4950 if (tree_int_cst_equal (low0,
4951 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4952 low0 = 0;
4953 break;
4954 case POINTER_TYPE:
4955 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4956 && integer_zerop (low0))
4957 low0 = 0;
4958 break;
4959 default:
4960 break;
4963 /* Canonicalize - [x, max] into - [x, -]. */
4964 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4965 switch (TREE_CODE (TREE_TYPE (high1)))
4967 case ENUMERAL_TYPE:
4968 if (TYPE_PRECISION (TREE_TYPE (high1))
4969 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4970 break;
4971 /* FALLTHROUGH */
4972 case INTEGER_TYPE:
4973 if (tree_int_cst_equal (high1,
4974 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4975 high1 = 0;
4976 break;
4977 case POINTER_TYPE:
4978 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4979 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4980 high1, 1,
4981 integer_one_node, 1)))
4982 high1 = 0;
4983 break;
4984 default:
4985 break;
4988 /* The ranges might be also adjacent between the maximum and
4989 minimum values of the given type. For
4990 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4991 return + [x + 1, y - 1]. */
4992 if (low0 == 0 && high1 == 0)
4994 low = range_successor (high0);
4995 high = range_predecessor (low1);
4996 if (low == 0 || high == 0)
4997 return 0;
4999 in_p = 1;
5001 else
5002 return 0;
5005 else if (subset)
5006 in_p = 0, low = low0, high = high0;
5007 else
5008 in_p = 0, low = low0, high = high1;
5011 *pin_p = in_p, *plow = low, *phigh = high;
5012 return 1;
5016 /* Subroutine of fold, looking inside expressions of the form
5017 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5018 of the COND_EXPR. This function is being used also to optimize
5019 A op B ? C : A, by reversing the comparison first.
5021 Return a folded expression whose code is not a COND_EXPR
5022 anymore, or NULL_TREE if no folding opportunity is found. */
5024 static tree
5025 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5027 enum tree_code comp_code = TREE_CODE (arg0);
5028 tree arg00 = TREE_OPERAND (arg0, 0);
5029 tree arg01 = TREE_OPERAND (arg0, 1);
5030 tree arg1_type = TREE_TYPE (arg1);
5031 tree tem;
5033 STRIP_NOPS (arg1);
5034 STRIP_NOPS (arg2);
5036 /* If we have A op 0 ? A : -A, consider applying the following
5037 transformations:
5039 A == 0? A : -A same as -A
5040 A != 0? A : -A same as A
5041 A >= 0? A : -A same as abs (A)
5042 A > 0? A : -A same as abs (A)
5043 A <= 0? A : -A same as -abs (A)
5044 A < 0? A : -A same as -abs (A)
5046 None of these transformations work for modes with signed
5047 zeros. If A is +/-0, the first two transformations will
5048 change the sign of the result (from +0 to -0, or vice
5049 versa). The last four will fix the sign of the result,
5050 even though the original expressions could be positive or
5051 negative, depending on the sign of A.
5053 Note that all these transformations are correct if A is
5054 NaN, since the two alternatives (A and -A) are also NaNs. */
5055 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
5056 ? real_zerop (arg01)
5057 : integer_zerop (arg01))
5058 && ((TREE_CODE (arg2) == NEGATE_EXPR
5059 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5060 /* In the case that A is of the form X-Y, '-A' (arg2) may
5061 have already been folded to Y-X, check for that. */
5062 || (TREE_CODE (arg1) == MINUS_EXPR
5063 && TREE_CODE (arg2) == MINUS_EXPR
5064 && operand_equal_p (TREE_OPERAND (arg1, 0),
5065 TREE_OPERAND (arg2, 1), 0)
5066 && operand_equal_p (TREE_OPERAND (arg1, 1),
5067 TREE_OPERAND (arg2, 0), 0))))
5068 switch (comp_code)
5070 case EQ_EXPR:
5071 case UNEQ_EXPR:
5072 tem = fold_convert (arg1_type, arg1);
5073 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5074 case NE_EXPR:
5075 case LTGT_EXPR:
5076 return pedantic_non_lvalue (fold_convert (type, arg1));
5077 case UNGE_EXPR:
5078 case UNGT_EXPR:
5079 if (flag_trapping_math)
5080 break;
5081 /* Fall through. */
5082 case GE_EXPR:
5083 case GT_EXPR:
5084 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5085 arg1 = fold_convert (signed_type_for
5086 (TREE_TYPE (arg1)), arg1);
5087 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5088 return pedantic_non_lvalue (fold_convert (type, tem));
5089 case UNLE_EXPR:
5090 case UNLT_EXPR:
5091 if (flag_trapping_math)
5092 break;
5093 case LE_EXPR:
5094 case LT_EXPR:
5095 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5096 arg1 = fold_convert (signed_type_for
5097 (TREE_TYPE (arg1)), arg1);
5098 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5099 return negate_expr (fold_convert (type, tem));
5100 default:
5101 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5102 break;
5105 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5106 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5107 both transformations are correct when A is NaN: A != 0
5108 is then true, and A == 0 is false. */
5110 if (integer_zerop (arg01) && integer_zerop (arg2))
5112 if (comp_code == NE_EXPR)
5113 return pedantic_non_lvalue (fold_convert (type, arg1));
5114 else if (comp_code == EQ_EXPR)
5115 return build_int_cst (type, 0);
5118 /* Try some transformations of A op B ? A : B.
5120 A == B? A : B same as B
5121 A != B? A : B same as A
5122 A >= B? A : B same as max (A, B)
5123 A > B? A : B same as max (B, A)
5124 A <= B? A : B same as min (A, B)
5125 A < B? A : B same as min (B, A)
5127 As above, these transformations don't work in the presence
5128 of signed zeros. For example, if A and B are zeros of
5129 opposite sign, the first two transformations will change
5130 the sign of the result. In the last four, the original
5131 expressions give different results for (A=+0, B=-0) and
5132 (A=-0, B=+0), but the transformed expressions do not.
5134 The first two transformations are correct if either A or B
5135 is a NaN. In the first transformation, the condition will
5136 be false, and B will indeed be chosen. In the case of the
5137 second transformation, the condition A != B will be true,
5138 and A will be chosen.
5140 The conversions to max() and min() are not correct if B is
5141 a number and A is not. The conditions in the original
5142 expressions will be false, so all four give B. The min()
5143 and max() versions would give a NaN instead. */
5144 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
5145 /* Avoid these transformations if the COND_EXPR may be used
5146 as an lvalue in the C++ front-end. PR c++/19199. */
5147 && (in_gimple_form
5148 || (strcmp (lang_hooks.name, "GNU C++") != 0
5149 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5150 || ! maybe_lvalue_p (arg1)
5151 || ! maybe_lvalue_p (arg2)))
5153 tree comp_op0 = arg00;
5154 tree comp_op1 = arg01;
5155 tree comp_type = TREE_TYPE (comp_op0);
5157 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5158 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5160 comp_type = type;
5161 comp_op0 = arg1;
5162 comp_op1 = arg2;
5165 switch (comp_code)
5167 case EQ_EXPR:
5168 return pedantic_non_lvalue (fold_convert (type, arg2));
5169 case NE_EXPR:
5170 return pedantic_non_lvalue (fold_convert (type, arg1));
5171 case LE_EXPR:
5172 case LT_EXPR:
5173 case UNLE_EXPR:
5174 case UNLT_EXPR:
5175 /* In C++ a ?: expression can be an lvalue, so put the
5176 operand which will be used if they are equal first
5177 so that we can convert this back to the
5178 corresponding COND_EXPR. */
5179 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5181 comp_op0 = fold_convert (comp_type, comp_op0);
5182 comp_op1 = fold_convert (comp_type, comp_op1);
5183 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5184 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5185 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5186 return pedantic_non_lvalue (fold_convert (type, tem));
5188 break;
5189 case GE_EXPR:
5190 case GT_EXPR:
5191 case UNGE_EXPR:
5192 case UNGT_EXPR:
5193 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5195 comp_op0 = fold_convert (comp_type, comp_op0);
5196 comp_op1 = fold_convert (comp_type, comp_op1);
5197 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5198 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5199 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5200 return pedantic_non_lvalue (fold_convert (type, tem));
5202 break;
5203 case UNEQ_EXPR:
5204 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5205 return pedantic_non_lvalue (fold_convert (type, arg2));
5206 break;
5207 case LTGT_EXPR:
5208 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5209 return pedantic_non_lvalue (fold_convert (type, arg1));
5210 break;
5211 default:
5212 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5213 break;
5217 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5218 we might still be able to simplify this. For example,
5219 if C1 is one less or one more than C2, this might have started
5220 out as a MIN or MAX and been transformed by this function.
5221 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5223 if (INTEGRAL_TYPE_P (type)
5224 && TREE_CODE (arg01) == INTEGER_CST
5225 && TREE_CODE (arg2) == INTEGER_CST)
5226 switch (comp_code)
5228 case EQ_EXPR:
5229 /* We can replace A with C1 in this case. */
5230 arg1 = fold_convert (type, arg01);
5231 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5233 case LT_EXPR:
5234 /* If C1 is C2 + 1, this is min(A, C2). */
5235 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5236 OEP_ONLY_CONST)
5237 && operand_equal_p (arg01,
5238 const_binop (PLUS_EXPR, arg2,
5239 build_int_cst (type, 1), 0),
5240 OEP_ONLY_CONST))
5241 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5242 type,
5243 fold_convert (type, arg1),
5244 arg2));
5245 break;
5247 case LE_EXPR:
5248 /* If C1 is C2 - 1, this is min(A, C2). */
5249 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5250 OEP_ONLY_CONST)
5251 && operand_equal_p (arg01,
5252 const_binop (MINUS_EXPR, arg2,
5253 build_int_cst (type, 1), 0),
5254 OEP_ONLY_CONST))
5255 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5256 type,
5257 fold_convert (type, arg1),
5258 arg2));
5259 break;
5261 case GT_EXPR:
5262 /* If C1 is C2 - 1, this is max(A, C2). */
5263 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5264 OEP_ONLY_CONST)
5265 && operand_equal_p (arg01,
5266 const_binop (MINUS_EXPR, arg2,
5267 build_int_cst (type, 1), 0),
5268 OEP_ONLY_CONST))
5269 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5270 type,
5271 fold_convert (type, arg1),
5272 arg2));
5273 break;
5275 case GE_EXPR:
5276 /* If C1 is C2 + 1, this is max(A, C2). */
5277 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5278 OEP_ONLY_CONST)
5279 && operand_equal_p (arg01,
5280 const_binop (PLUS_EXPR, arg2,
5281 build_int_cst (type, 1), 0),
5282 OEP_ONLY_CONST))
5283 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5284 type,
5285 fold_convert (type, arg1),
5286 arg2));
5287 break;
5288 case NE_EXPR:
5289 break;
5290 default:
5291 gcc_unreachable ();
5294 return NULL_TREE;
5299 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5300 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5301 #endif
5303 /* EXP is some logical combination of boolean tests. See if we can
5304 merge it into some range test. Return the new tree if so. */
5306 static tree
5307 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5309 int or_op = (code == TRUTH_ORIF_EXPR
5310 || code == TRUTH_OR_EXPR);
5311 int in0_p, in1_p, in_p;
5312 tree low0, low1, low, high0, high1, high;
5313 bool strict_overflow_p = false;
5314 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5315 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5316 tree tem;
5317 const char * const warnmsg = G_("assuming signed overflow does not occur "
5318 "when simplifying range test");
5320 /* If this is an OR operation, invert both sides; we will invert
5321 again at the end. */
5322 if (or_op)
5323 in0_p = ! in0_p, in1_p = ! in1_p;
5325 /* If both expressions are the same, if we can merge the ranges, and we
5326 can build the range test, return it or it inverted. If one of the
5327 ranges is always true or always false, consider it to be the same
5328 expression as the other. */
5329 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5330 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5331 in1_p, low1, high1)
5332 && 0 != (tem = (build_range_check (type,
5333 lhs != 0 ? lhs
5334 : rhs != 0 ? rhs : integer_zero_node,
5335 in_p, low, high))))
5337 if (strict_overflow_p)
5338 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5339 return or_op ? invert_truthvalue (tem) : tem;
5342 /* On machines where the branch cost is expensive, if this is a
5343 short-circuited branch and the underlying object on both sides
5344 is the same, make a non-short-circuit operation. */
5345 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5346 && lhs != 0 && rhs != 0
5347 && (code == TRUTH_ANDIF_EXPR
5348 || code == TRUTH_ORIF_EXPR)
5349 && operand_equal_p (lhs, rhs, 0))
5351 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5352 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5353 which cases we can't do this. */
5354 if (simple_operand_p (lhs))
5355 return build2 (code == TRUTH_ANDIF_EXPR
5356 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5357 type, op0, op1);
5359 else if (lang_hooks.decls.global_bindings_p () == 0
5360 && ! CONTAINS_PLACEHOLDER_P (lhs))
5362 tree common = save_expr (lhs);
5364 if (0 != (lhs = build_range_check (type, common,
5365 or_op ? ! in0_p : in0_p,
5366 low0, high0))
5367 && (0 != (rhs = build_range_check (type, common,
5368 or_op ? ! in1_p : in1_p,
5369 low1, high1))))
5371 if (strict_overflow_p)
5372 fold_overflow_warning (warnmsg,
5373 WARN_STRICT_OVERFLOW_COMPARISON);
5374 return build2 (code == TRUTH_ANDIF_EXPR
5375 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5376 type, lhs, rhs);
5381 return 0;
5384 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5385 bit value. Arrange things so the extra bits will be set to zero if and
5386 only if C is signed-extended to its full width. If MASK is nonzero,
5387 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5389 static tree
5390 unextend (tree c, int p, int unsignedp, tree mask)
5392 tree type = TREE_TYPE (c);
5393 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5394 tree temp;
5396 if (p == modesize || unsignedp)
5397 return c;
5399 /* We work by getting just the sign bit into the low-order bit, then
5400 into the high-order bit, then sign-extend. We then XOR that value
5401 with C. */
5402 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5403 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5405 /* We must use a signed type in order to get an arithmetic right shift.
5406 However, we must also avoid introducing accidental overflows, so that
5407 a subsequent call to integer_zerop will work. Hence we must
5408 do the type conversion here. At this point, the constant is either
5409 zero or one, and the conversion to a signed type can never overflow.
5410 We could get an overflow if this conversion is done anywhere else. */
5411 if (TYPE_UNSIGNED (type))
5412 temp = fold_convert (signed_type_for (type), temp);
5414 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5415 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5416 if (mask != 0)
5417 temp = const_binop (BIT_AND_EXPR, temp,
5418 fold_convert (TREE_TYPE (c), mask), 0);
5419 /* If necessary, convert the type back to match the type of C. */
5420 if (TYPE_UNSIGNED (type))
5421 temp = fold_convert (type, temp);
5423 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5426 /* Find ways of folding logical expressions of LHS and RHS:
5427 Try to merge two comparisons to the same innermost item.
5428 Look for range tests like "ch >= '0' && ch <= '9'".
5429 Look for combinations of simple terms on machines with expensive branches
5430 and evaluate the RHS unconditionally.
5432 For example, if we have p->a == 2 && p->b == 4 and we can make an
5433 object large enough to span both A and B, we can do this with a comparison
5434 against the object ANDed with the a mask.
5436 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5437 operations to do this with one comparison.
5439 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5440 function and the one above.
5442 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5443 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5445 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5446 two operands.
5448 We return the simplified tree or 0 if no optimization is possible. */
5450 static tree
5451 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5453 /* If this is the "or" of two comparisons, we can do something if
5454 the comparisons are NE_EXPR. If this is the "and", we can do something
5455 if the comparisons are EQ_EXPR. I.e.,
5456 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5458 WANTED_CODE is this operation code. For single bit fields, we can
5459 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5460 comparison for one-bit fields. */
5462 enum tree_code wanted_code;
5463 enum tree_code lcode, rcode;
5464 tree ll_arg, lr_arg, rl_arg, rr_arg;
5465 tree ll_inner, lr_inner, rl_inner, rr_inner;
5466 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5467 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5468 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5469 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5470 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5471 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5472 enum machine_mode lnmode, rnmode;
5473 tree ll_mask, lr_mask, rl_mask, rr_mask;
5474 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5475 tree l_const, r_const;
5476 tree lntype, rntype, result;
5477 int first_bit, end_bit;
5478 int volatilep;
5479 tree orig_lhs = lhs, orig_rhs = rhs;
5480 enum tree_code orig_code = code;
5482 /* Start by getting the comparison codes. Fail if anything is volatile.
5483 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5484 it were surrounded with a NE_EXPR. */
5486 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5487 return 0;
5489 lcode = TREE_CODE (lhs);
5490 rcode = TREE_CODE (rhs);
5492 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5494 lhs = build2 (NE_EXPR, truth_type, lhs,
5495 build_int_cst (TREE_TYPE (lhs), 0));
5496 lcode = NE_EXPR;
5499 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5501 rhs = build2 (NE_EXPR, truth_type, rhs,
5502 build_int_cst (TREE_TYPE (rhs), 0));
5503 rcode = NE_EXPR;
5506 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5507 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5508 return 0;
5510 ll_arg = TREE_OPERAND (lhs, 0);
5511 lr_arg = TREE_OPERAND (lhs, 1);
5512 rl_arg = TREE_OPERAND (rhs, 0);
5513 rr_arg = TREE_OPERAND (rhs, 1);
5515 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5516 if (simple_operand_p (ll_arg)
5517 && simple_operand_p (lr_arg))
5519 tree result;
5520 if (operand_equal_p (ll_arg, rl_arg, 0)
5521 && operand_equal_p (lr_arg, rr_arg, 0))
5523 result = combine_comparisons (code, lcode, rcode,
5524 truth_type, ll_arg, lr_arg);
5525 if (result)
5526 return result;
5528 else if (operand_equal_p (ll_arg, rr_arg, 0)
5529 && operand_equal_p (lr_arg, rl_arg, 0))
5531 result = combine_comparisons (code, lcode,
5532 swap_tree_comparison (rcode),
5533 truth_type, ll_arg, lr_arg);
5534 if (result)
5535 return result;
5539 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5540 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5542 /* If the RHS can be evaluated unconditionally and its operands are
5543 simple, it wins to evaluate the RHS unconditionally on machines
5544 with expensive branches. In this case, this isn't a comparison
5545 that can be merged. Avoid doing this if the RHS is a floating-point
5546 comparison since those can trap. */
5548 if (BRANCH_COST >= 2
5549 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5550 && simple_operand_p (rl_arg)
5551 && simple_operand_p (rr_arg))
5553 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5554 if (code == TRUTH_OR_EXPR
5555 && lcode == NE_EXPR && integer_zerop (lr_arg)
5556 && rcode == NE_EXPR && integer_zerop (rr_arg)
5557 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5558 return build2 (NE_EXPR, truth_type,
5559 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5560 ll_arg, rl_arg),
5561 build_int_cst (TREE_TYPE (ll_arg), 0));
5563 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5564 if (code == TRUTH_AND_EXPR
5565 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5566 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5567 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5568 return build2 (EQ_EXPR, truth_type,
5569 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5570 ll_arg, rl_arg),
5571 build_int_cst (TREE_TYPE (ll_arg), 0));
5573 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5575 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5576 return build2 (code, truth_type, lhs, rhs);
5577 return NULL_TREE;
5581 /* See if the comparisons can be merged. Then get all the parameters for
5582 each side. */
5584 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5585 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5586 return 0;
5588 volatilep = 0;
5589 ll_inner = decode_field_reference (ll_arg,
5590 &ll_bitsize, &ll_bitpos, &ll_mode,
5591 &ll_unsignedp, &volatilep, &ll_mask,
5592 &ll_and_mask);
5593 lr_inner = decode_field_reference (lr_arg,
5594 &lr_bitsize, &lr_bitpos, &lr_mode,
5595 &lr_unsignedp, &volatilep, &lr_mask,
5596 &lr_and_mask);
5597 rl_inner = decode_field_reference (rl_arg,
5598 &rl_bitsize, &rl_bitpos, &rl_mode,
5599 &rl_unsignedp, &volatilep, &rl_mask,
5600 &rl_and_mask);
5601 rr_inner = decode_field_reference (rr_arg,
5602 &rr_bitsize, &rr_bitpos, &rr_mode,
5603 &rr_unsignedp, &volatilep, &rr_mask,
5604 &rr_and_mask);
5606 /* It must be true that the inner operation on the lhs of each
5607 comparison must be the same if we are to be able to do anything.
5608 Then see if we have constants. If not, the same must be true for
5609 the rhs's. */
5610 if (volatilep || ll_inner == 0 || rl_inner == 0
5611 || ! operand_equal_p (ll_inner, rl_inner, 0))
5612 return 0;
5614 if (TREE_CODE (lr_arg) == INTEGER_CST
5615 && TREE_CODE (rr_arg) == INTEGER_CST)
5616 l_const = lr_arg, r_const = rr_arg;
5617 else if (lr_inner == 0 || rr_inner == 0
5618 || ! operand_equal_p (lr_inner, rr_inner, 0))
5619 return 0;
5620 else
5621 l_const = r_const = 0;
5623 /* If either comparison code is not correct for our logical operation,
5624 fail. However, we can convert a one-bit comparison against zero into
5625 the opposite comparison against that bit being set in the field. */
5627 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5628 if (lcode != wanted_code)
5630 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5632 /* Make the left operand unsigned, since we are only interested
5633 in the value of one bit. Otherwise we are doing the wrong
5634 thing below. */
5635 ll_unsignedp = 1;
5636 l_const = ll_mask;
5638 else
5639 return 0;
5642 /* This is analogous to the code for l_const above. */
5643 if (rcode != wanted_code)
5645 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5647 rl_unsignedp = 1;
5648 r_const = rl_mask;
5650 else
5651 return 0;
5654 /* See if we can find a mode that contains both fields being compared on
5655 the left. If we can't, fail. Otherwise, update all constants and masks
5656 to be relative to a field of that size. */
5657 first_bit = MIN (ll_bitpos, rl_bitpos);
5658 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5659 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5660 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5661 volatilep);
5662 if (lnmode == VOIDmode)
5663 return 0;
5665 lnbitsize = GET_MODE_BITSIZE (lnmode);
5666 lnbitpos = first_bit & ~ (lnbitsize - 1);
5667 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5668 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5670 if (BYTES_BIG_ENDIAN)
5672 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5673 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5676 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5677 size_int (xll_bitpos), 0);
5678 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5679 size_int (xrl_bitpos), 0);
5681 if (l_const)
5683 l_const = fold_convert (lntype, l_const);
5684 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5685 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5686 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5687 fold_build1 (BIT_NOT_EXPR,
5688 lntype, ll_mask),
5689 0)))
5691 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5693 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5696 if (r_const)
5698 r_const = fold_convert (lntype, r_const);
5699 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5700 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5701 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5702 fold_build1 (BIT_NOT_EXPR,
5703 lntype, rl_mask),
5704 0)))
5706 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5708 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5712 /* If the right sides are not constant, do the same for it. Also,
5713 disallow this optimization if a size or signedness mismatch occurs
5714 between the left and right sides. */
5715 if (l_const == 0)
5717 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5718 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5719 /* Make sure the two fields on the right
5720 correspond to the left without being swapped. */
5721 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5722 return 0;
5724 first_bit = MIN (lr_bitpos, rr_bitpos);
5725 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5726 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5727 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5728 volatilep);
5729 if (rnmode == VOIDmode)
5730 return 0;
5732 rnbitsize = GET_MODE_BITSIZE (rnmode);
5733 rnbitpos = first_bit & ~ (rnbitsize - 1);
5734 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5735 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5737 if (BYTES_BIG_ENDIAN)
5739 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5740 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5743 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5744 size_int (xlr_bitpos), 0);
5745 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5746 size_int (xrr_bitpos), 0);
5748 /* Make a mask that corresponds to both fields being compared.
5749 Do this for both items being compared. If the operands are the
5750 same size and the bits being compared are in the same position
5751 then we can do this by masking both and comparing the masked
5752 results. */
5753 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5754 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5755 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5757 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5758 ll_unsignedp || rl_unsignedp);
5759 if (! all_ones_mask_p (ll_mask, lnbitsize))
5760 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5762 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5763 lr_unsignedp || rr_unsignedp);
5764 if (! all_ones_mask_p (lr_mask, rnbitsize))
5765 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5767 return build2 (wanted_code, truth_type, lhs, rhs);
5770 /* There is still another way we can do something: If both pairs of
5771 fields being compared are adjacent, we may be able to make a wider
5772 field containing them both.
5774 Note that we still must mask the lhs/rhs expressions. Furthermore,
5775 the mask must be shifted to account for the shift done by
5776 make_bit_field_ref. */
5777 if ((ll_bitsize + ll_bitpos == rl_bitpos
5778 && lr_bitsize + lr_bitpos == rr_bitpos)
5779 || (ll_bitpos == rl_bitpos + rl_bitsize
5780 && lr_bitpos == rr_bitpos + rr_bitsize))
5782 tree type;
5784 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5785 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5786 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5787 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5789 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5790 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5791 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5792 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5794 /* Convert to the smaller type before masking out unwanted bits. */
5795 type = lntype;
5796 if (lntype != rntype)
5798 if (lnbitsize > rnbitsize)
5800 lhs = fold_convert (rntype, lhs);
5801 ll_mask = fold_convert (rntype, ll_mask);
5802 type = rntype;
5804 else if (lnbitsize < rnbitsize)
5806 rhs = fold_convert (lntype, rhs);
5807 lr_mask = fold_convert (lntype, lr_mask);
5808 type = lntype;
5812 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5813 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5815 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5816 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5818 return build2 (wanted_code, truth_type, lhs, rhs);
5821 return 0;
5824 /* Handle the case of comparisons with constants. If there is something in
5825 common between the masks, those bits of the constants must be the same.
5826 If not, the condition is always false. Test for this to avoid generating
5827 incorrect code below. */
5828 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5829 if (! integer_zerop (result)
5830 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5831 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5833 if (wanted_code == NE_EXPR)
5835 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5836 return constant_boolean_node (true, truth_type);
5838 else
5840 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5841 return constant_boolean_node (false, truth_type);
5845 /* Construct the expression we will return. First get the component
5846 reference we will make. Unless the mask is all ones the width of
5847 that field, perform the mask operation. Then compare with the
5848 merged constant. */
5849 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5850 ll_unsignedp || rl_unsignedp);
5852 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5853 if (! all_ones_mask_p (ll_mask, lnbitsize))
5854 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5856 return build2 (wanted_code, truth_type, result,
5857 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5860 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5861 constant. */
5863 static tree
5864 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5866 tree arg0 = op0;
5867 enum tree_code op_code;
5868 tree comp_const = op1;
5869 tree minmax_const;
5870 int consts_equal, consts_lt;
5871 tree inner;
5873 STRIP_SIGN_NOPS (arg0);
5875 op_code = TREE_CODE (arg0);
5876 minmax_const = TREE_OPERAND (arg0, 1);
5877 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5878 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5879 inner = TREE_OPERAND (arg0, 0);
5881 /* If something does not permit us to optimize, return the original tree. */
5882 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5883 || TREE_CODE (comp_const) != INTEGER_CST
5884 || TREE_OVERFLOW (comp_const)
5885 || TREE_CODE (minmax_const) != INTEGER_CST
5886 || TREE_OVERFLOW (minmax_const))
5887 return NULL_TREE;
5889 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5890 and GT_EXPR, doing the rest with recursive calls using logical
5891 simplifications. */
5892 switch (code)
5894 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5896 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5897 type, op0, op1);
5898 if (tem)
5899 return invert_truthvalue (tem);
5900 return NULL_TREE;
5903 case GE_EXPR:
5904 return
5905 fold_build2 (TRUTH_ORIF_EXPR, type,
5906 optimize_minmax_comparison
5907 (EQ_EXPR, type, arg0, comp_const),
5908 optimize_minmax_comparison
5909 (GT_EXPR, type, arg0, comp_const));
5911 case EQ_EXPR:
5912 if (op_code == MAX_EXPR && consts_equal)
5913 /* MAX (X, 0) == 0 -> X <= 0 */
5914 return fold_build2 (LE_EXPR, type, inner, comp_const);
5916 else if (op_code == MAX_EXPR && consts_lt)
5917 /* MAX (X, 0) == 5 -> X == 5 */
5918 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5920 else if (op_code == MAX_EXPR)
5921 /* MAX (X, 0) == -1 -> false */
5922 return omit_one_operand (type, integer_zero_node, inner);
5924 else if (consts_equal)
5925 /* MIN (X, 0) == 0 -> X >= 0 */
5926 return fold_build2 (GE_EXPR, type, inner, comp_const);
5928 else if (consts_lt)
5929 /* MIN (X, 0) == 5 -> false */
5930 return omit_one_operand (type, integer_zero_node, inner);
5932 else
5933 /* MIN (X, 0) == -1 -> X == -1 */
5934 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5936 case GT_EXPR:
5937 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5938 /* MAX (X, 0) > 0 -> X > 0
5939 MAX (X, 0) > 5 -> X > 5 */
5940 return fold_build2 (GT_EXPR, type, inner, comp_const);
5942 else if (op_code == MAX_EXPR)
5943 /* MAX (X, 0) > -1 -> true */
5944 return omit_one_operand (type, integer_one_node, inner);
5946 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5947 /* MIN (X, 0) > 0 -> false
5948 MIN (X, 0) > 5 -> false */
5949 return omit_one_operand (type, integer_zero_node, inner);
5951 else
5952 /* MIN (X, 0) > -1 -> X > -1 */
5953 return fold_build2 (GT_EXPR, type, inner, comp_const);
5955 default:
5956 return NULL_TREE;
5960 /* T is an integer expression that is being multiplied, divided, or taken a
5961 modulus (CODE says which and what kind of divide or modulus) by a
5962 constant C. See if we can eliminate that operation by folding it with
5963 other operations already in T. WIDE_TYPE, if non-null, is a type that
5964 should be used for the computation if wider than our type.
5966 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5967 (X * 2) + (Y * 4). We must, however, be assured that either the original
5968 expression would not overflow or that overflow is undefined for the type
5969 in the language in question.
5971 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5972 the machine has a multiply-accumulate insn or that this is part of an
5973 addressing calculation.
5975 If we return a non-null expression, it is an equivalent form of the
5976 original computation, but need not be in the original type.
5978 We set *STRICT_OVERFLOW_P to true if the return values depends on
5979 signed overflow being undefined. Otherwise we do not change
5980 *STRICT_OVERFLOW_P. */
5982 static tree
5983 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5984 bool *strict_overflow_p)
5986 /* To avoid exponential search depth, refuse to allow recursion past
5987 three levels. Beyond that (1) it's highly unlikely that we'll find
5988 something interesting and (2) we've probably processed it before
5989 when we built the inner expression. */
5991 static int depth;
5992 tree ret;
5994 if (depth > 3)
5995 return NULL;
5997 depth++;
5998 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5999 depth--;
6001 return ret;
6004 static tree
6005 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6006 bool *strict_overflow_p)
6008 tree type = TREE_TYPE (t);
6009 enum tree_code tcode = TREE_CODE (t);
6010 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6011 > GET_MODE_SIZE (TYPE_MODE (type)))
6012 ? wide_type : type);
6013 tree t1, t2;
6014 int same_p = tcode == code;
6015 tree op0 = NULL_TREE, op1 = NULL_TREE;
6016 bool sub_strict_overflow_p;
6018 /* Don't deal with constants of zero here; they confuse the code below. */
6019 if (integer_zerop (c))
6020 return NULL_TREE;
6022 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6023 op0 = TREE_OPERAND (t, 0);
6025 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6026 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6028 /* Note that we need not handle conditional operations here since fold
6029 already handles those cases. So just do arithmetic here. */
6030 switch (tcode)
6032 case INTEGER_CST:
6033 /* For a constant, we can always simplify if we are a multiply
6034 or (for divide and modulus) if it is a multiple of our constant. */
6035 if (code == MULT_EXPR
6036 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6037 return const_binop (code, fold_convert (ctype, t),
6038 fold_convert (ctype, c), 0);
6039 break;
6041 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
6042 /* If op0 is an expression ... */
6043 if ((COMPARISON_CLASS_P (op0)
6044 || UNARY_CLASS_P (op0)
6045 || BINARY_CLASS_P (op0)
6046 || VL_EXP_CLASS_P (op0)
6047 || EXPRESSION_CLASS_P (op0))
6048 /* ... and is unsigned, and its type is smaller than ctype,
6049 then we cannot pass through as widening. */
6050 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
6051 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6052 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6053 && (GET_MODE_SIZE (TYPE_MODE (ctype))
6054 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
6055 /* ... or this is a truncation (t is narrower than op0),
6056 then we cannot pass through this narrowing. */
6057 || (GET_MODE_SIZE (TYPE_MODE (type))
6058 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
6059 /* ... or signedness changes for division or modulus,
6060 then we cannot pass through this conversion. */
6061 || (code != MULT_EXPR
6062 && (TYPE_UNSIGNED (ctype)
6063 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6064 /* ... or has undefined overflow while the converted to
6065 type has not, we cannot do the operation in the inner type
6066 as that would introduce undefined overflow. */
6067 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6068 && !TYPE_OVERFLOW_UNDEFINED (type))))
6069 break;
6071 /* Pass the constant down and see if we can make a simplification. If
6072 we can, replace this expression with the inner simplification for
6073 possible later conversion to our or some other type. */
6074 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6075 && TREE_CODE (t2) == INTEGER_CST
6076 && !TREE_OVERFLOW (t2)
6077 && (0 != (t1 = extract_muldiv (op0, t2, code,
6078 code == MULT_EXPR
6079 ? ctype : NULL_TREE,
6080 strict_overflow_p))))
6081 return t1;
6082 break;
6084 case ABS_EXPR:
6085 /* If widening the type changes it from signed to unsigned, then we
6086 must avoid building ABS_EXPR itself as unsigned. */
6087 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6089 tree cstype = (*signed_type_for) (ctype);
6090 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6091 != 0)
6093 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6094 return fold_convert (ctype, t1);
6096 break;
6098 /* If the constant is negative, we cannot simplify this. */
6099 if (tree_int_cst_sgn (c) == -1)
6100 break;
6101 /* FALLTHROUGH */
6102 case NEGATE_EXPR:
6103 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6104 != 0)
6105 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6106 break;
6108 case MIN_EXPR: case MAX_EXPR:
6109 /* If widening the type changes the signedness, then we can't perform
6110 this optimization as that changes the result. */
6111 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6112 break;
6114 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6115 sub_strict_overflow_p = false;
6116 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6117 &sub_strict_overflow_p)) != 0
6118 && (t2 = extract_muldiv (op1, c, code, wide_type,
6119 &sub_strict_overflow_p)) != 0)
6121 if (tree_int_cst_sgn (c) < 0)
6122 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6123 if (sub_strict_overflow_p)
6124 *strict_overflow_p = true;
6125 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6126 fold_convert (ctype, t2));
6128 break;
6130 case LSHIFT_EXPR: case RSHIFT_EXPR:
6131 /* If the second operand is constant, this is a multiplication
6132 or floor division, by a power of two, so we can treat it that
6133 way unless the multiplier or divisor overflows. Signed
6134 left-shift overflow is implementation-defined rather than
6135 undefined in C90, so do not convert signed left shift into
6136 multiplication. */
6137 if (TREE_CODE (op1) == INTEGER_CST
6138 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6139 /* const_binop may not detect overflow correctly,
6140 so check for it explicitly here. */
6141 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6142 && TREE_INT_CST_HIGH (op1) == 0
6143 && 0 != (t1 = fold_convert (ctype,
6144 const_binop (LSHIFT_EXPR,
6145 size_one_node,
6146 op1, 0)))
6147 && !TREE_OVERFLOW (t1))
6148 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6149 ? MULT_EXPR : FLOOR_DIV_EXPR,
6150 ctype, fold_convert (ctype, op0), t1),
6151 c, code, wide_type, strict_overflow_p);
6152 break;
6154 case PLUS_EXPR: case MINUS_EXPR:
6155 /* See if we can eliminate the operation on both sides. If we can, we
6156 can return a new PLUS or MINUS. If we can't, the only remaining
6157 cases where we can do anything are if the second operand is a
6158 constant. */
6159 sub_strict_overflow_p = false;
6160 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6161 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6162 if (t1 != 0 && t2 != 0
6163 && (code == MULT_EXPR
6164 /* If not multiplication, we can only do this if both operands
6165 are divisible by c. */
6166 || (multiple_of_p (ctype, op0, c)
6167 && multiple_of_p (ctype, op1, c))))
6169 if (sub_strict_overflow_p)
6170 *strict_overflow_p = true;
6171 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6172 fold_convert (ctype, t2));
6175 /* If this was a subtraction, negate OP1 and set it to be an addition.
6176 This simplifies the logic below. */
6177 if (tcode == MINUS_EXPR)
6178 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6180 if (TREE_CODE (op1) != INTEGER_CST)
6181 break;
6183 /* If either OP1 or C are negative, this optimization is not safe for
6184 some of the division and remainder types while for others we need
6185 to change the code. */
6186 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6188 if (code == CEIL_DIV_EXPR)
6189 code = FLOOR_DIV_EXPR;
6190 else if (code == FLOOR_DIV_EXPR)
6191 code = CEIL_DIV_EXPR;
6192 else if (code != MULT_EXPR
6193 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6194 break;
6197 /* If it's a multiply or a division/modulus operation of a multiple
6198 of our constant, do the operation and verify it doesn't overflow. */
6199 if (code == MULT_EXPR
6200 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6202 op1 = const_binop (code, fold_convert (ctype, op1),
6203 fold_convert (ctype, c), 0);
6204 /* We allow the constant to overflow with wrapping semantics. */
6205 if (op1 == 0
6206 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6207 break;
6209 else
6210 break;
6212 /* If we have an unsigned type is not a sizetype, we cannot widen
6213 the operation since it will change the result if the original
6214 computation overflowed. */
6215 if (TYPE_UNSIGNED (ctype)
6216 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6217 && ctype != type)
6218 break;
6220 /* If we were able to eliminate our operation from the first side,
6221 apply our operation to the second side and reform the PLUS. */
6222 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6223 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6225 /* The last case is if we are a multiply. In that case, we can
6226 apply the distributive law to commute the multiply and addition
6227 if the multiplication of the constants doesn't overflow. */
6228 if (code == MULT_EXPR)
6229 return fold_build2 (tcode, ctype,
6230 fold_build2 (code, ctype,
6231 fold_convert (ctype, op0),
6232 fold_convert (ctype, c)),
6233 op1);
6235 break;
6237 case MULT_EXPR:
6238 /* We have a special case here if we are doing something like
6239 (C * 8) % 4 since we know that's zero. */
6240 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6241 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6242 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6243 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6244 return omit_one_operand (type, integer_zero_node, op0);
6246 /* ... fall through ... */
6248 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6249 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6250 /* If we can extract our operation from the LHS, do so and return a
6251 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6252 do something only if the second operand is a constant. */
6253 if (same_p
6254 && (t1 = extract_muldiv (op0, c, code, wide_type,
6255 strict_overflow_p)) != 0)
6256 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6257 fold_convert (ctype, op1));
6258 else if (tcode == MULT_EXPR && code == MULT_EXPR
6259 && (t1 = extract_muldiv (op1, c, code, wide_type,
6260 strict_overflow_p)) != 0)
6261 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6262 fold_convert (ctype, t1));
6263 else if (TREE_CODE (op1) != INTEGER_CST)
6264 return 0;
6266 /* If these are the same operation types, we can associate them
6267 assuming no overflow. */
6268 if (tcode == code
6269 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
6270 fold_convert (ctype, c), 0))
6271 && !TREE_OVERFLOW (t1))
6272 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6274 /* If these operations "cancel" each other, we have the main
6275 optimizations of this pass, which occur when either constant is a
6276 multiple of the other, in which case we replace this with either an
6277 operation or CODE or TCODE.
6279 If we have an unsigned type that is not a sizetype, we cannot do
6280 this since it will change the result if the original computation
6281 overflowed. */
6282 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6283 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6284 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6285 || (tcode == MULT_EXPR
6286 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6287 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6288 && code != MULT_EXPR)))
6290 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6292 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6293 *strict_overflow_p = true;
6294 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6295 fold_convert (ctype,
6296 const_binop (TRUNC_DIV_EXPR,
6297 op1, c, 0)));
6299 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6301 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6302 *strict_overflow_p = true;
6303 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6304 fold_convert (ctype,
6305 const_binop (TRUNC_DIV_EXPR,
6306 c, op1, 0)));
6309 break;
6311 default:
6312 break;
6315 return 0;
6318 /* Return a node which has the indicated constant VALUE (either 0 or
6319 1), and is of the indicated TYPE. */
6321 tree
6322 constant_boolean_node (int value, tree type)
6324 if (type == integer_type_node)
6325 return value ? integer_one_node : integer_zero_node;
6326 else if (type == boolean_type_node)
6327 return value ? boolean_true_node : boolean_false_node;
6328 else
6329 return build_int_cst (type, value);
6333 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6334 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6335 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6336 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6337 COND is the first argument to CODE; otherwise (as in the example
6338 given here), it is the second argument. TYPE is the type of the
6339 original expression. Return NULL_TREE if no simplification is
6340 possible. */
6342 static tree
6343 fold_binary_op_with_conditional_arg (enum tree_code code,
6344 tree type, tree op0, tree op1,
6345 tree cond, tree arg, int cond_first_p)
6347 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6348 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6349 tree test, true_value, false_value;
6350 tree lhs = NULL_TREE;
6351 tree rhs = NULL_TREE;
6353 /* This transformation is only worthwhile if we don't have to wrap
6354 arg in a SAVE_EXPR, and the operation can be simplified on at least
6355 one of the branches once its pushed inside the COND_EXPR. */
6356 if (!TREE_CONSTANT (arg))
6357 return NULL_TREE;
6359 if (TREE_CODE (cond) == COND_EXPR)
6361 test = TREE_OPERAND (cond, 0);
6362 true_value = TREE_OPERAND (cond, 1);
6363 false_value = TREE_OPERAND (cond, 2);
6364 /* If this operand throws an expression, then it does not make
6365 sense to try to perform a logical or arithmetic operation
6366 involving it. */
6367 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6368 lhs = true_value;
6369 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6370 rhs = false_value;
6372 else
6374 tree testtype = TREE_TYPE (cond);
6375 test = cond;
6376 true_value = constant_boolean_node (true, testtype);
6377 false_value = constant_boolean_node (false, testtype);
6380 arg = fold_convert (arg_type, arg);
6381 if (lhs == 0)
6383 true_value = fold_convert (cond_type, true_value);
6384 if (cond_first_p)
6385 lhs = fold_build2 (code, type, true_value, arg);
6386 else
6387 lhs = fold_build2 (code, type, arg, true_value);
6389 if (rhs == 0)
6391 false_value = fold_convert (cond_type, false_value);
6392 if (cond_first_p)
6393 rhs = fold_build2 (code, type, false_value, arg);
6394 else
6395 rhs = fold_build2 (code, type, arg, false_value);
6398 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6399 return fold_convert (type, test);
6403 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6405 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6406 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6407 ADDEND is the same as X.
6409 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6410 and finite. The problematic cases are when X is zero, and its mode
6411 has signed zeros. In the case of rounding towards -infinity,
6412 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6413 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6415 static bool
6416 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6418 if (!real_zerop (addend))
6419 return false;
6421 /* Don't allow the fold with -fsignaling-nans. */
6422 if (HONOR_SNANS (TYPE_MODE (type)))
6423 return false;
6425 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6426 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6427 return true;
6429 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6430 if (TREE_CODE (addend) == REAL_CST
6431 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6432 negate = !negate;
6434 /* The mode has signed zeros, and we have to honor their sign.
6435 In this situation, there is only one case we can return true for.
6436 X - 0 is the same as X unless rounding towards -infinity is
6437 supported. */
6438 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6441 /* Subroutine of fold() that checks comparisons of built-in math
6442 functions against real constants.
6444 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6445 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6446 is the type of the result and ARG0 and ARG1 are the operands of the
6447 comparison. ARG1 must be a TREE_REAL_CST.
6449 The function returns the constant folded tree if a simplification
6450 can be made, and NULL_TREE otherwise. */
6452 static tree
6453 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6454 tree type, tree arg0, tree arg1)
6456 REAL_VALUE_TYPE c;
6458 if (BUILTIN_SQRT_P (fcode))
6460 tree arg = CALL_EXPR_ARG (arg0, 0);
6461 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6463 c = TREE_REAL_CST (arg1);
6464 if (REAL_VALUE_NEGATIVE (c))
6466 /* sqrt(x) < y is always false, if y is negative. */
6467 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6468 return omit_one_operand (type, integer_zero_node, arg);
6470 /* sqrt(x) > y is always true, if y is negative and we
6471 don't care about NaNs, i.e. negative values of x. */
6472 if (code == NE_EXPR || !HONOR_NANS (mode))
6473 return omit_one_operand (type, integer_one_node, arg);
6475 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6476 return fold_build2 (GE_EXPR, type, arg,
6477 build_real (TREE_TYPE (arg), dconst0));
6479 else if (code == GT_EXPR || code == GE_EXPR)
6481 REAL_VALUE_TYPE c2;
6483 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6484 real_convert (&c2, mode, &c2);
6486 if (REAL_VALUE_ISINF (c2))
6488 /* sqrt(x) > y is x == +Inf, when y is very large. */
6489 if (HONOR_INFINITIES (mode))
6490 return fold_build2 (EQ_EXPR, type, arg,
6491 build_real (TREE_TYPE (arg), c2));
6493 /* sqrt(x) > y is always false, when y is very large
6494 and we don't care about infinities. */
6495 return omit_one_operand (type, integer_zero_node, arg);
6498 /* sqrt(x) > c is the same as x > c*c. */
6499 return fold_build2 (code, type, arg,
6500 build_real (TREE_TYPE (arg), c2));
6502 else if (code == LT_EXPR || code == LE_EXPR)
6504 REAL_VALUE_TYPE c2;
6506 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6507 real_convert (&c2, mode, &c2);
6509 if (REAL_VALUE_ISINF (c2))
6511 /* sqrt(x) < y is always true, when y is a very large
6512 value and we don't care about NaNs or Infinities. */
6513 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6514 return omit_one_operand (type, integer_one_node, arg);
6516 /* sqrt(x) < y is x != +Inf when y is very large and we
6517 don't care about NaNs. */
6518 if (! HONOR_NANS (mode))
6519 return fold_build2 (NE_EXPR, type, arg,
6520 build_real (TREE_TYPE (arg), c2));
6522 /* sqrt(x) < y is x >= 0 when y is very large and we
6523 don't care about Infinities. */
6524 if (! HONOR_INFINITIES (mode))
6525 return fold_build2 (GE_EXPR, type, arg,
6526 build_real (TREE_TYPE (arg), dconst0));
6528 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6529 if (lang_hooks.decls.global_bindings_p () != 0
6530 || CONTAINS_PLACEHOLDER_P (arg))
6531 return NULL_TREE;
6533 arg = save_expr (arg);
6534 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6535 fold_build2 (GE_EXPR, type, arg,
6536 build_real (TREE_TYPE (arg),
6537 dconst0)),
6538 fold_build2 (NE_EXPR, type, arg,
6539 build_real (TREE_TYPE (arg),
6540 c2)));
6543 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6544 if (! HONOR_NANS (mode))
6545 return fold_build2 (code, type, arg,
6546 build_real (TREE_TYPE (arg), c2));
6548 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6549 if (lang_hooks.decls.global_bindings_p () == 0
6550 && ! CONTAINS_PLACEHOLDER_P (arg))
6552 arg = save_expr (arg);
6553 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6554 fold_build2 (GE_EXPR, type, arg,
6555 build_real (TREE_TYPE (arg),
6556 dconst0)),
6557 fold_build2 (code, type, arg,
6558 build_real (TREE_TYPE (arg),
6559 c2)));
6564 return NULL_TREE;
6567 /* Subroutine of fold() that optimizes comparisons against Infinities,
6568 either +Inf or -Inf.
6570 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6571 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6572 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6574 The function returns the constant folded tree if a simplification
6575 can be made, and NULL_TREE otherwise. */
6577 static tree
6578 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6580 enum machine_mode mode;
6581 REAL_VALUE_TYPE max;
6582 tree temp;
6583 bool neg;
6585 mode = TYPE_MODE (TREE_TYPE (arg0));
6587 /* For negative infinity swap the sense of the comparison. */
6588 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6589 if (neg)
6590 code = swap_tree_comparison (code);
6592 switch (code)
6594 case GT_EXPR:
6595 /* x > +Inf is always false, if with ignore sNANs. */
6596 if (HONOR_SNANS (mode))
6597 return NULL_TREE;
6598 return omit_one_operand (type, integer_zero_node, arg0);
6600 case LE_EXPR:
6601 /* x <= +Inf is always true, if we don't case about NaNs. */
6602 if (! HONOR_NANS (mode))
6603 return omit_one_operand (type, integer_one_node, arg0);
6605 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6606 if (lang_hooks.decls.global_bindings_p () == 0
6607 && ! CONTAINS_PLACEHOLDER_P (arg0))
6609 arg0 = save_expr (arg0);
6610 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6612 break;
6614 case EQ_EXPR:
6615 case GE_EXPR:
6616 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6617 real_maxval (&max, neg, mode);
6618 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6619 arg0, build_real (TREE_TYPE (arg0), max));
6621 case LT_EXPR:
6622 /* x < +Inf is always equal to x <= DBL_MAX. */
6623 real_maxval (&max, neg, mode);
6624 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6625 arg0, build_real (TREE_TYPE (arg0), max));
6627 case NE_EXPR:
6628 /* x != +Inf is always equal to !(x > DBL_MAX). */
6629 real_maxval (&max, neg, mode);
6630 if (! HONOR_NANS (mode))
6631 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6632 arg0, build_real (TREE_TYPE (arg0), max));
6634 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6635 arg0, build_real (TREE_TYPE (arg0), max));
6636 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6638 default:
6639 break;
6642 return NULL_TREE;
6645 /* Subroutine of fold() that optimizes comparisons of a division by
6646 a nonzero integer constant against an integer constant, i.e.
6647 X/C1 op C2.
6649 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6650 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6651 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6653 The function returns the constant folded tree if a simplification
6654 can be made, and NULL_TREE otherwise. */
6656 static tree
6657 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6659 tree prod, tmp, hi, lo;
6660 tree arg00 = TREE_OPERAND (arg0, 0);
6661 tree arg01 = TREE_OPERAND (arg0, 1);
6662 unsigned HOST_WIDE_INT lpart;
6663 HOST_WIDE_INT hpart;
6664 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6665 bool neg_overflow;
6666 int overflow;
6668 /* We have to do this the hard way to detect unsigned overflow.
6669 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6670 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6671 TREE_INT_CST_HIGH (arg01),
6672 TREE_INT_CST_LOW (arg1),
6673 TREE_INT_CST_HIGH (arg1),
6674 &lpart, &hpart, unsigned_p);
6675 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6676 -1, overflow);
6677 neg_overflow = false;
6679 if (unsigned_p)
6681 tmp = int_const_binop (MINUS_EXPR, arg01,
6682 build_int_cst (TREE_TYPE (arg01), 1), 0);
6683 lo = prod;
6685 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6686 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6687 TREE_INT_CST_HIGH (prod),
6688 TREE_INT_CST_LOW (tmp),
6689 TREE_INT_CST_HIGH (tmp),
6690 &lpart, &hpart, unsigned_p);
6691 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6692 -1, overflow | TREE_OVERFLOW (prod));
6694 else if (tree_int_cst_sgn (arg01) >= 0)
6696 tmp = int_const_binop (MINUS_EXPR, arg01,
6697 build_int_cst (TREE_TYPE (arg01), 1), 0);
6698 switch (tree_int_cst_sgn (arg1))
6700 case -1:
6701 neg_overflow = true;
6702 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6703 hi = prod;
6704 break;
6706 case 0:
6707 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6708 hi = tmp;
6709 break;
6711 case 1:
6712 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6713 lo = prod;
6714 break;
6716 default:
6717 gcc_unreachable ();
6720 else
6722 /* A negative divisor reverses the relational operators. */
6723 code = swap_tree_comparison (code);
6725 tmp = int_const_binop (PLUS_EXPR, arg01,
6726 build_int_cst (TREE_TYPE (arg01), 1), 0);
6727 switch (tree_int_cst_sgn (arg1))
6729 case -1:
6730 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6731 lo = prod;
6732 break;
6734 case 0:
6735 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6736 lo = tmp;
6737 break;
6739 case 1:
6740 neg_overflow = true;
6741 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6742 hi = prod;
6743 break;
6745 default:
6746 gcc_unreachable ();
6750 switch (code)
6752 case EQ_EXPR:
6753 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6754 return omit_one_operand (type, integer_zero_node, arg00);
6755 if (TREE_OVERFLOW (hi))
6756 return fold_build2 (GE_EXPR, type, arg00, lo);
6757 if (TREE_OVERFLOW (lo))
6758 return fold_build2 (LE_EXPR, type, arg00, hi);
6759 return build_range_check (type, arg00, 1, lo, hi);
6761 case NE_EXPR:
6762 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6763 return omit_one_operand (type, integer_one_node, arg00);
6764 if (TREE_OVERFLOW (hi))
6765 return fold_build2 (LT_EXPR, type, arg00, lo);
6766 if (TREE_OVERFLOW (lo))
6767 return fold_build2 (GT_EXPR, type, arg00, hi);
6768 return build_range_check (type, arg00, 0, lo, hi);
6770 case LT_EXPR:
6771 if (TREE_OVERFLOW (lo))
6773 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6774 return omit_one_operand (type, tmp, arg00);
6776 return fold_build2 (LT_EXPR, type, arg00, lo);
6778 case LE_EXPR:
6779 if (TREE_OVERFLOW (hi))
6781 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6782 return omit_one_operand (type, tmp, arg00);
6784 return fold_build2 (LE_EXPR, type, arg00, hi);
6786 case GT_EXPR:
6787 if (TREE_OVERFLOW (hi))
6789 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6790 return omit_one_operand (type, tmp, arg00);
6792 return fold_build2 (GT_EXPR, type, arg00, hi);
6794 case GE_EXPR:
6795 if (TREE_OVERFLOW (lo))
6797 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6798 return omit_one_operand (type, tmp, arg00);
6800 return fold_build2 (GE_EXPR, type, arg00, lo);
6802 default:
6803 break;
6806 return NULL_TREE;
6810 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6811 equality/inequality test, then return a simplified form of the test
6812 using a sign testing. Otherwise return NULL. TYPE is the desired
6813 result type. */
6815 static tree
6816 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6817 tree result_type)
6819 /* If this is testing a single bit, we can optimize the test. */
6820 if ((code == NE_EXPR || code == EQ_EXPR)
6821 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6822 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6824 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6825 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6826 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6828 if (arg00 != NULL_TREE
6829 /* This is only a win if casting to a signed type is cheap,
6830 i.e. when arg00's type is not a partial mode. */
6831 && TYPE_PRECISION (TREE_TYPE (arg00))
6832 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6834 tree stype = signed_type_for (TREE_TYPE (arg00));
6835 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6836 result_type, fold_convert (stype, arg00),
6837 build_int_cst (stype, 0));
6841 return NULL_TREE;
6844 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6845 equality/inequality test, then return a simplified form of
6846 the test using shifts and logical operations. Otherwise return
6847 NULL. TYPE is the desired result type. */
6849 tree
6850 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6851 tree result_type)
6853 /* If this is testing a single bit, we can optimize the test. */
6854 if ((code == NE_EXPR || code == EQ_EXPR)
6855 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6856 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6858 tree inner = TREE_OPERAND (arg0, 0);
6859 tree type = TREE_TYPE (arg0);
6860 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6861 enum machine_mode operand_mode = TYPE_MODE (type);
6862 int ops_unsigned;
6863 tree signed_type, unsigned_type, intermediate_type;
6864 tree tem, one;
6866 /* First, see if we can fold the single bit test into a sign-bit
6867 test. */
6868 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6869 result_type);
6870 if (tem)
6871 return tem;
6873 /* Otherwise we have (A & C) != 0 where C is a single bit,
6874 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6875 Similarly for (A & C) == 0. */
6877 /* If INNER is a right shift of a constant and it plus BITNUM does
6878 not overflow, adjust BITNUM and INNER. */
6879 if (TREE_CODE (inner) == RSHIFT_EXPR
6880 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6881 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6882 && bitnum < TYPE_PRECISION (type)
6883 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6884 bitnum - TYPE_PRECISION (type)))
6886 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6887 inner = TREE_OPERAND (inner, 0);
6890 /* If we are going to be able to omit the AND below, we must do our
6891 operations as unsigned. If we must use the AND, we have a choice.
6892 Normally unsigned is faster, but for some machines signed is. */
6893 #ifdef LOAD_EXTEND_OP
6894 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6895 && !flag_syntax_only) ? 0 : 1;
6896 #else
6897 ops_unsigned = 1;
6898 #endif
6900 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6901 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6902 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6903 inner = fold_convert (intermediate_type, inner);
6905 if (bitnum != 0)
6906 inner = build2 (RSHIFT_EXPR, intermediate_type,
6907 inner, size_int (bitnum));
6909 one = build_int_cst (intermediate_type, 1);
6911 if (code == EQ_EXPR)
6912 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6914 /* Put the AND last so it can combine with more things. */
6915 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6917 /* Make sure to return the proper type. */
6918 inner = fold_convert (result_type, inner);
6920 return inner;
6922 return NULL_TREE;
6925 /* Check whether we are allowed to reorder operands arg0 and arg1,
6926 such that the evaluation of arg1 occurs before arg0. */
6928 static bool
6929 reorder_operands_p (const_tree arg0, const_tree arg1)
6931 if (! flag_evaluation_order)
6932 return true;
6933 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6934 return true;
6935 return ! TREE_SIDE_EFFECTS (arg0)
6936 && ! TREE_SIDE_EFFECTS (arg1);
6939 /* Test whether it is preferable two swap two operands, ARG0 and
6940 ARG1, for example because ARG0 is an integer constant and ARG1
6941 isn't. If REORDER is true, only recommend swapping if we can
6942 evaluate the operands in reverse order. */
6944 bool
6945 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6947 STRIP_SIGN_NOPS (arg0);
6948 STRIP_SIGN_NOPS (arg1);
6950 if (TREE_CODE (arg1) == INTEGER_CST)
6951 return 0;
6952 if (TREE_CODE (arg0) == INTEGER_CST)
6953 return 1;
6955 if (TREE_CODE (arg1) == REAL_CST)
6956 return 0;
6957 if (TREE_CODE (arg0) == REAL_CST)
6958 return 1;
6960 if (TREE_CODE (arg1) == FIXED_CST)
6961 return 0;
6962 if (TREE_CODE (arg0) == FIXED_CST)
6963 return 1;
6965 if (TREE_CODE (arg1) == COMPLEX_CST)
6966 return 0;
6967 if (TREE_CODE (arg0) == COMPLEX_CST)
6968 return 1;
6970 if (TREE_CONSTANT (arg1))
6971 return 0;
6972 if (TREE_CONSTANT (arg0))
6973 return 1;
6975 if (optimize_size)
6976 return 0;
6978 if (reorder && flag_evaluation_order
6979 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6980 return 0;
6982 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6983 for commutative and comparison operators. Ensuring a canonical
6984 form allows the optimizers to find additional redundancies without
6985 having to explicitly check for both orderings. */
6986 if (TREE_CODE (arg0) == SSA_NAME
6987 && TREE_CODE (arg1) == SSA_NAME
6988 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6989 return 1;
6991 /* Put SSA_NAMEs last. */
6992 if (TREE_CODE (arg1) == SSA_NAME)
6993 return 0;
6994 if (TREE_CODE (arg0) == SSA_NAME)
6995 return 1;
6997 /* Put variables last. */
6998 if (DECL_P (arg1))
6999 return 0;
7000 if (DECL_P (arg0))
7001 return 1;
7003 return 0;
7006 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7007 ARG0 is extended to a wider type. */
7009 static tree
7010 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7012 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7013 tree arg1_unw;
7014 tree shorter_type, outer_type;
7015 tree min, max;
7016 bool above, below;
7018 if (arg0_unw == arg0)
7019 return NULL_TREE;
7020 shorter_type = TREE_TYPE (arg0_unw);
7022 #ifdef HAVE_canonicalize_funcptr_for_compare
7023 /* Disable this optimization if we're casting a function pointer
7024 type on targets that require function pointer canonicalization. */
7025 if (HAVE_canonicalize_funcptr_for_compare
7026 && TREE_CODE (shorter_type) == POINTER_TYPE
7027 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7028 return NULL_TREE;
7029 #endif
7031 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7032 return NULL_TREE;
7034 arg1_unw = get_unwidened (arg1, shorter_type);
7036 /* If possible, express the comparison in the shorter mode. */
7037 if ((code == EQ_EXPR || code == NE_EXPR
7038 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7039 && (TREE_TYPE (arg1_unw) == shorter_type
7040 || (TREE_CODE (arg1_unw) == INTEGER_CST
7041 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7042 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7043 && int_fits_type_p (arg1_unw, shorter_type))))
7044 return fold_build2 (code, type, arg0_unw,
7045 fold_convert (shorter_type, arg1_unw));
7047 if (TREE_CODE (arg1_unw) != INTEGER_CST
7048 || TREE_CODE (shorter_type) != INTEGER_TYPE
7049 || !int_fits_type_p (arg1_unw, shorter_type))
7050 return NULL_TREE;
7052 /* If we are comparing with the integer that does not fit into the range
7053 of the shorter type, the result is known. */
7054 outer_type = TREE_TYPE (arg1_unw);
7055 min = lower_bound_in_type (outer_type, shorter_type);
7056 max = upper_bound_in_type (outer_type, shorter_type);
7058 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7059 max, arg1_unw));
7060 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7061 arg1_unw, min));
7063 switch (code)
7065 case EQ_EXPR:
7066 if (above || below)
7067 return omit_one_operand (type, integer_zero_node, arg0);
7068 break;
7070 case NE_EXPR:
7071 if (above || below)
7072 return omit_one_operand (type, integer_one_node, arg0);
7073 break;
7075 case LT_EXPR:
7076 case LE_EXPR:
7077 if (above)
7078 return omit_one_operand (type, integer_one_node, arg0);
7079 else if (below)
7080 return omit_one_operand (type, integer_zero_node, arg0);
7082 case GT_EXPR:
7083 case GE_EXPR:
7084 if (above)
7085 return omit_one_operand (type, integer_zero_node, arg0);
7086 else if (below)
7087 return omit_one_operand (type, integer_one_node, arg0);
7089 default:
7090 break;
7093 return NULL_TREE;
7096 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7097 ARG0 just the signedness is changed. */
7099 static tree
7100 fold_sign_changed_comparison (enum tree_code code, tree type,
7101 tree arg0, tree arg1)
7103 tree arg0_inner;
7104 tree inner_type, outer_type;
7106 if (TREE_CODE (arg0) != NOP_EXPR
7107 && TREE_CODE (arg0) != CONVERT_EXPR)
7108 return NULL_TREE;
7110 outer_type = TREE_TYPE (arg0);
7111 arg0_inner = TREE_OPERAND (arg0, 0);
7112 inner_type = TREE_TYPE (arg0_inner);
7114 #ifdef HAVE_canonicalize_funcptr_for_compare
7115 /* Disable this optimization if we're casting a function pointer
7116 type on targets that require function pointer canonicalization. */
7117 if (HAVE_canonicalize_funcptr_for_compare
7118 && TREE_CODE (inner_type) == POINTER_TYPE
7119 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7120 return NULL_TREE;
7121 #endif
7123 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7124 return NULL_TREE;
7126 if (TREE_CODE (arg1) != INTEGER_CST
7127 && !((TREE_CODE (arg1) == NOP_EXPR
7128 || TREE_CODE (arg1) == CONVERT_EXPR)
7129 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7130 return NULL_TREE;
7132 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7133 && code != NE_EXPR
7134 && code != EQ_EXPR)
7135 return NULL_TREE;
7137 if (TREE_CODE (arg1) == INTEGER_CST)
7138 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7139 TREE_INT_CST_HIGH (arg1), 0,
7140 TREE_OVERFLOW (arg1));
7141 else
7142 arg1 = fold_convert (inner_type, arg1);
7144 return fold_build2 (code, type, arg0_inner, arg1);
7147 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7148 step of the array. Reconstructs s and delta in the case of s * delta
7149 being an integer constant (and thus already folded).
7150 ADDR is the address. MULT is the multiplicative expression.
7151 If the function succeeds, the new address expression is returned. Otherwise
7152 NULL_TREE is returned. */
7154 static tree
7155 try_move_mult_to_index (tree addr, tree op1)
7157 tree s, delta, step;
7158 tree ref = TREE_OPERAND (addr, 0), pref;
7159 tree ret, pos;
7160 tree itype;
7161 bool mdim = false;
7163 /* Strip the nops that might be added when converting op1 to sizetype. */
7164 STRIP_NOPS (op1);
7166 /* Canonicalize op1 into a possibly non-constant delta
7167 and an INTEGER_CST s. */
7168 if (TREE_CODE (op1) == MULT_EXPR)
7170 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7172 STRIP_NOPS (arg0);
7173 STRIP_NOPS (arg1);
7175 if (TREE_CODE (arg0) == INTEGER_CST)
7177 s = arg0;
7178 delta = arg1;
7180 else if (TREE_CODE (arg1) == INTEGER_CST)
7182 s = arg1;
7183 delta = arg0;
7185 else
7186 return NULL_TREE;
7188 else if (TREE_CODE (op1) == INTEGER_CST)
7190 delta = op1;
7191 s = NULL_TREE;
7193 else
7195 /* Simulate we are delta * 1. */
7196 delta = op1;
7197 s = integer_one_node;
7200 for (;; ref = TREE_OPERAND (ref, 0))
7202 if (TREE_CODE (ref) == ARRAY_REF)
7204 /* Remember if this was a multi-dimensional array. */
7205 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7206 mdim = true;
7208 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7209 if (! itype)
7210 continue;
7212 step = array_ref_element_size (ref);
7213 if (TREE_CODE (step) != INTEGER_CST)
7214 continue;
7216 if (s)
7218 if (! tree_int_cst_equal (step, s))
7219 continue;
7221 else
7223 /* Try if delta is a multiple of step. */
7224 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
7225 if (! tmp)
7226 continue;
7227 delta = tmp;
7230 /* Only fold here if we can verify we do not overflow one
7231 dimension of a multi-dimensional array. */
7232 if (mdim)
7234 tree tmp;
7236 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7237 || !INTEGRAL_TYPE_P (itype)
7238 || !TYPE_MAX_VALUE (itype)
7239 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7240 continue;
7242 tmp = fold_binary (PLUS_EXPR, itype,
7243 fold_convert (itype,
7244 TREE_OPERAND (ref, 1)),
7245 fold_convert (itype, delta));
7246 if (!tmp
7247 || TREE_CODE (tmp) != INTEGER_CST
7248 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7249 continue;
7252 break;
7254 else
7255 mdim = false;
7257 if (!handled_component_p (ref))
7258 return NULL_TREE;
7261 /* We found the suitable array reference. So copy everything up to it,
7262 and replace the index. */
7264 pref = TREE_OPERAND (addr, 0);
7265 ret = copy_node (pref);
7266 pos = ret;
7268 while (pref != ref)
7270 pref = TREE_OPERAND (pref, 0);
7271 TREE_OPERAND (pos, 0) = copy_node (pref);
7272 pos = TREE_OPERAND (pos, 0);
7275 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7276 fold_convert (itype,
7277 TREE_OPERAND (pos, 1)),
7278 fold_convert (itype, delta));
7280 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7284 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7285 means A >= Y && A != MAX, but in this case we know that
7286 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7288 static tree
7289 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7291 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7293 if (TREE_CODE (bound) == LT_EXPR)
7294 a = TREE_OPERAND (bound, 0);
7295 else if (TREE_CODE (bound) == GT_EXPR)
7296 a = TREE_OPERAND (bound, 1);
7297 else
7298 return NULL_TREE;
7300 typea = TREE_TYPE (a);
7301 if (!INTEGRAL_TYPE_P (typea)
7302 && !POINTER_TYPE_P (typea))
7303 return NULL_TREE;
7305 if (TREE_CODE (ineq) == LT_EXPR)
7307 a1 = TREE_OPERAND (ineq, 1);
7308 y = TREE_OPERAND (ineq, 0);
7310 else if (TREE_CODE (ineq) == GT_EXPR)
7312 a1 = TREE_OPERAND (ineq, 0);
7313 y = TREE_OPERAND (ineq, 1);
7315 else
7316 return NULL_TREE;
7318 if (TREE_TYPE (a1) != typea)
7319 return NULL_TREE;
7321 if (POINTER_TYPE_P (typea))
7323 /* Convert the pointer types into integer before taking the difference. */
7324 tree ta = fold_convert (ssizetype, a);
7325 tree ta1 = fold_convert (ssizetype, a1);
7326 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7328 else
7329 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7331 if (!diff || !integer_onep (diff))
7332 return NULL_TREE;
7334 return fold_build2 (GE_EXPR, type, a, y);
7337 /* Fold a sum or difference of at least one multiplication.
7338 Returns the folded tree or NULL if no simplification could be made. */
7340 static tree
7341 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7343 tree arg00, arg01, arg10, arg11;
7344 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7346 /* (A * C) +- (B * C) -> (A+-B) * C.
7347 (A * C) +- A -> A * (C+-1).
7348 We are most concerned about the case where C is a constant,
7349 but other combinations show up during loop reduction. Since
7350 it is not difficult, try all four possibilities. */
7352 if (TREE_CODE (arg0) == MULT_EXPR)
7354 arg00 = TREE_OPERAND (arg0, 0);
7355 arg01 = TREE_OPERAND (arg0, 1);
7357 else if (TREE_CODE (arg0) == INTEGER_CST)
7359 arg00 = build_one_cst (type);
7360 arg01 = arg0;
7362 else
7364 /* We cannot generate constant 1 for fract. */
7365 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7366 return NULL_TREE;
7367 arg00 = arg0;
7368 arg01 = build_one_cst (type);
7370 if (TREE_CODE (arg1) == MULT_EXPR)
7372 arg10 = TREE_OPERAND (arg1, 0);
7373 arg11 = TREE_OPERAND (arg1, 1);
7375 else if (TREE_CODE (arg1) == INTEGER_CST)
7377 arg10 = build_one_cst (type);
7378 arg11 = arg1;
7380 else
7382 /* We cannot generate constant 1 for fract. */
7383 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7384 return NULL_TREE;
7385 arg10 = arg1;
7386 arg11 = build_one_cst (type);
7388 same = NULL_TREE;
7390 if (operand_equal_p (arg01, arg11, 0))
7391 same = arg01, alt0 = arg00, alt1 = arg10;
7392 else if (operand_equal_p (arg00, arg10, 0))
7393 same = arg00, alt0 = arg01, alt1 = arg11;
7394 else if (operand_equal_p (arg00, arg11, 0))
7395 same = arg00, alt0 = arg01, alt1 = arg10;
7396 else if (operand_equal_p (arg01, arg10, 0))
7397 same = arg01, alt0 = arg00, alt1 = arg11;
7399 /* No identical multiplicands; see if we can find a common
7400 power-of-two factor in non-power-of-two multiplies. This
7401 can help in multi-dimensional array access. */
7402 else if (host_integerp (arg01, 0)
7403 && host_integerp (arg11, 0))
7405 HOST_WIDE_INT int01, int11, tmp;
7406 bool swap = false;
7407 tree maybe_same;
7408 int01 = TREE_INT_CST_LOW (arg01);
7409 int11 = TREE_INT_CST_LOW (arg11);
7411 /* Move min of absolute values to int11. */
7412 if ((int01 >= 0 ? int01 : -int01)
7413 < (int11 >= 0 ? int11 : -int11))
7415 tmp = int01, int01 = int11, int11 = tmp;
7416 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7417 maybe_same = arg01;
7418 swap = true;
7420 else
7421 maybe_same = arg11;
7423 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7425 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7426 build_int_cst (TREE_TYPE (arg00),
7427 int01 / int11));
7428 alt1 = arg10;
7429 same = maybe_same;
7430 if (swap)
7431 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7435 if (same)
7436 return fold_build2 (MULT_EXPR, type,
7437 fold_build2 (code, type,
7438 fold_convert (type, alt0),
7439 fold_convert (type, alt1)),
7440 fold_convert (type, same));
7442 return NULL_TREE;
7445 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7446 specified by EXPR into the buffer PTR of length LEN bytes.
7447 Return the number of bytes placed in the buffer, or zero
7448 upon failure. */
7450 static int
7451 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7453 tree type = TREE_TYPE (expr);
7454 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7455 int byte, offset, word, words;
7456 unsigned char value;
7458 if (total_bytes > len)
7459 return 0;
7460 words = total_bytes / UNITS_PER_WORD;
7462 for (byte = 0; byte < total_bytes; byte++)
7464 int bitpos = byte * BITS_PER_UNIT;
7465 if (bitpos < HOST_BITS_PER_WIDE_INT)
7466 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7467 else
7468 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7469 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7471 if (total_bytes > UNITS_PER_WORD)
7473 word = byte / UNITS_PER_WORD;
7474 if (WORDS_BIG_ENDIAN)
7475 word = (words - 1) - word;
7476 offset = word * UNITS_PER_WORD;
7477 if (BYTES_BIG_ENDIAN)
7478 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7479 else
7480 offset += byte % UNITS_PER_WORD;
7482 else
7483 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7484 ptr[offset] = value;
7486 return total_bytes;
7490 /* Subroutine of native_encode_expr. Encode the REAL_CST
7491 specified by EXPR into the buffer PTR of length LEN bytes.
7492 Return the number of bytes placed in the buffer, or zero
7493 upon failure. */
7495 static int
7496 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7498 tree type = TREE_TYPE (expr);
7499 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7500 int byte, offset, word, words, bitpos;
7501 unsigned char value;
7503 /* There are always 32 bits in each long, no matter the size of
7504 the hosts long. We handle floating point representations with
7505 up to 192 bits. */
7506 long tmp[6];
7508 if (total_bytes > len)
7509 return 0;
7510 words = 32 / UNITS_PER_WORD;
7512 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7514 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7515 bitpos += BITS_PER_UNIT)
7517 byte = (bitpos / BITS_PER_UNIT) & 3;
7518 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7520 if (UNITS_PER_WORD < 4)
7522 word = byte / UNITS_PER_WORD;
7523 if (WORDS_BIG_ENDIAN)
7524 word = (words - 1) - word;
7525 offset = word * UNITS_PER_WORD;
7526 if (BYTES_BIG_ENDIAN)
7527 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7528 else
7529 offset += byte % UNITS_PER_WORD;
7531 else
7532 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7533 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7535 return total_bytes;
7538 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7539 specified by EXPR into the buffer PTR of length LEN bytes.
7540 Return the number of bytes placed in the buffer, or zero
7541 upon failure. */
7543 static int
7544 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7546 int rsize, isize;
7547 tree part;
7549 part = TREE_REALPART (expr);
7550 rsize = native_encode_expr (part, ptr, len);
7551 if (rsize == 0)
7552 return 0;
7553 part = TREE_IMAGPART (expr);
7554 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7555 if (isize != rsize)
7556 return 0;
7557 return rsize + isize;
7561 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7562 specified by EXPR into the buffer PTR of length LEN bytes.
7563 Return the number of bytes placed in the buffer, or zero
7564 upon failure. */
7566 static int
7567 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7569 int i, size, offset, count;
7570 tree itype, elem, elements;
7572 offset = 0;
7573 elements = TREE_VECTOR_CST_ELTS (expr);
7574 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7575 itype = TREE_TYPE (TREE_TYPE (expr));
7576 size = GET_MODE_SIZE (TYPE_MODE (itype));
7577 for (i = 0; i < count; i++)
7579 if (elements)
7581 elem = TREE_VALUE (elements);
7582 elements = TREE_CHAIN (elements);
7584 else
7585 elem = NULL_TREE;
7587 if (elem)
7589 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7590 return 0;
7592 else
7594 if (offset + size > len)
7595 return 0;
7596 memset (ptr+offset, 0, size);
7598 offset += size;
7600 return offset;
7604 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7605 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7606 buffer PTR of length LEN bytes. Return the number of bytes
7607 placed in the buffer, or zero upon failure. */
7610 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7612 switch (TREE_CODE (expr))
7614 case INTEGER_CST:
7615 return native_encode_int (expr, ptr, len);
7617 case REAL_CST:
7618 return native_encode_real (expr, ptr, len);
7620 case COMPLEX_CST:
7621 return native_encode_complex (expr, ptr, len);
7623 case VECTOR_CST:
7624 return native_encode_vector (expr, ptr, len);
7626 default:
7627 return 0;
7632 /* Subroutine of native_interpret_expr. Interpret the contents of
7633 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7634 If the buffer cannot be interpreted, return NULL_TREE. */
7636 static tree
7637 native_interpret_int (tree type, const unsigned char *ptr, int len)
7639 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7640 int byte, offset, word, words;
7641 unsigned char value;
7642 unsigned int HOST_WIDE_INT lo = 0;
7643 HOST_WIDE_INT hi = 0;
7645 if (total_bytes > len)
7646 return NULL_TREE;
7647 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7648 return NULL_TREE;
7649 words = total_bytes / UNITS_PER_WORD;
7651 for (byte = 0; byte < total_bytes; byte++)
7653 int bitpos = byte * BITS_PER_UNIT;
7654 if (total_bytes > UNITS_PER_WORD)
7656 word = byte / UNITS_PER_WORD;
7657 if (WORDS_BIG_ENDIAN)
7658 word = (words - 1) - word;
7659 offset = word * UNITS_PER_WORD;
7660 if (BYTES_BIG_ENDIAN)
7661 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7662 else
7663 offset += byte % UNITS_PER_WORD;
7665 else
7666 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7667 value = ptr[offset];
7669 if (bitpos < HOST_BITS_PER_WIDE_INT)
7670 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7671 else
7672 hi |= (unsigned HOST_WIDE_INT) value
7673 << (bitpos - HOST_BITS_PER_WIDE_INT);
7676 return build_int_cst_wide_type (type, lo, hi);
7680 /* Subroutine of native_interpret_expr. Interpret the contents of
7681 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7682 If the buffer cannot be interpreted, return NULL_TREE. */
7684 static tree
7685 native_interpret_real (tree type, const unsigned char *ptr, int len)
7687 enum machine_mode mode = TYPE_MODE (type);
7688 int total_bytes = GET_MODE_SIZE (mode);
7689 int byte, offset, word, words, bitpos;
7690 unsigned char value;
7691 /* There are always 32 bits in each long, no matter the size of
7692 the hosts long. We handle floating point representations with
7693 up to 192 bits. */
7694 REAL_VALUE_TYPE r;
7695 long tmp[6];
7697 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7698 if (total_bytes > len || total_bytes > 24)
7699 return NULL_TREE;
7700 words = 32 / UNITS_PER_WORD;
7702 memset (tmp, 0, sizeof (tmp));
7703 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7704 bitpos += BITS_PER_UNIT)
7706 byte = (bitpos / BITS_PER_UNIT) & 3;
7707 if (UNITS_PER_WORD < 4)
7709 word = byte / UNITS_PER_WORD;
7710 if (WORDS_BIG_ENDIAN)
7711 word = (words - 1) - word;
7712 offset = word * UNITS_PER_WORD;
7713 if (BYTES_BIG_ENDIAN)
7714 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7715 else
7716 offset += byte % UNITS_PER_WORD;
7718 else
7719 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7720 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7722 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7725 real_from_target (&r, tmp, mode);
7726 return build_real (type, r);
7730 /* Subroutine of native_interpret_expr. Interpret the contents of
7731 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7732 If the buffer cannot be interpreted, return NULL_TREE. */
7734 static tree
7735 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7737 tree etype, rpart, ipart;
7738 int size;
7740 etype = TREE_TYPE (type);
7741 size = GET_MODE_SIZE (TYPE_MODE (etype));
7742 if (size * 2 > len)
7743 return NULL_TREE;
7744 rpart = native_interpret_expr (etype, ptr, size);
7745 if (!rpart)
7746 return NULL_TREE;
7747 ipart = native_interpret_expr (etype, ptr+size, size);
7748 if (!ipart)
7749 return NULL_TREE;
7750 return build_complex (type, rpart, ipart);
7754 /* Subroutine of native_interpret_expr. Interpret the contents of
7755 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7756 If the buffer cannot be interpreted, return NULL_TREE. */
7758 static tree
7759 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7761 tree etype, elem, elements;
7762 int i, size, count;
7764 etype = TREE_TYPE (type);
7765 size = GET_MODE_SIZE (TYPE_MODE (etype));
7766 count = TYPE_VECTOR_SUBPARTS (type);
7767 if (size * count > len)
7768 return NULL_TREE;
7770 elements = NULL_TREE;
7771 for (i = count - 1; i >= 0; i--)
7773 elem = native_interpret_expr (etype, ptr+(i*size), size);
7774 if (!elem)
7775 return NULL_TREE;
7776 elements = tree_cons (NULL_TREE, elem, elements);
7778 return build_vector (type, elements);
7782 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7783 the buffer PTR of length LEN as a constant of type TYPE. For
7784 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7785 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7786 return NULL_TREE. */
7788 tree
7789 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7791 switch (TREE_CODE (type))
7793 case INTEGER_TYPE:
7794 case ENUMERAL_TYPE:
7795 case BOOLEAN_TYPE:
7796 return native_interpret_int (type, ptr, len);
7798 case REAL_TYPE:
7799 return native_interpret_real (type, ptr, len);
7801 case COMPLEX_TYPE:
7802 return native_interpret_complex (type, ptr, len);
7804 case VECTOR_TYPE:
7805 return native_interpret_vector (type, ptr, len);
7807 default:
7808 return NULL_TREE;
7813 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7814 TYPE at compile-time. If we're unable to perform the conversion
7815 return NULL_TREE. */
7817 static tree
7818 fold_view_convert_expr (tree type, tree expr)
7820 /* We support up to 512-bit values (for V8DFmode). */
7821 unsigned char buffer[64];
7822 int len;
7824 /* Check that the host and target are sane. */
7825 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7826 return NULL_TREE;
7828 len = native_encode_expr (expr, buffer, sizeof (buffer));
7829 if (len == 0)
7830 return NULL_TREE;
7832 return native_interpret_expr (type, buffer, len);
7835 /* Build an expression for the address of T. Folds away INDIRECT_REF
7836 to avoid confusing the gimplify process. When IN_FOLD is true
7837 avoid modifications of T. */
7839 static tree
7840 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7842 /* The size of the object is not relevant when talking about its address. */
7843 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7844 t = TREE_OPERAND (t, 0);
7846 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7847 if (TREE_CODE (t) == INDIRECT_REF
7848 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7850 t = TREE_OPERAND (t, 0);
7852 if (TREE_TYPE (t) != ptrtype)
7853 t = build1 (NOP_EXPR, ptrtype, t);
7855 else if (!in_fold)
7857 tree base = t;
7859 while (handled_component_p (base))
7860 base = TREE_OPERAND (base, 0);
7862 if (DECL_P (base))
7863 TREE_ADDRESSABLE (base) = 1;
7865 t = build1 (ADDR_EXPR, ptrtype, t);
7867 else
7868 t = build1 (ADDR_EXPR, ptrtype, t);
7870 return t;
7873 /* Build an expression for the address of T with type PTRTYPE. This
7874 function modifies the input parameter 'T' by sometimes setting the
7875 TREE_ADDRESSABLE flag. */
7877 tree
7878 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7880 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7883 /* Build an expression for the address of T. This function modifies
7884 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7885 flag. When called from fold functions, use fold_addr_expr instead. */
7887 tree
7888 build_fold_addr_expr (tree t)
7890 return build_fold_addr_expr_with_type_1 (t,
7891 build_pointer_type (TREE_TYPE (t)),
7892 false);
7895 /* Same as build_fold_addr_expr, builds an expression for the address
7896 of T, but avoids touching the input node 't'. Fold functions
7897 should use this version. */
7899 static tree
7900 fold_addr_expr (tree t)
7902 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7904 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7907 /* Fold a unary expression of code CODE and type TYPE with operand
7908 OP0. Return the folded expression if folding is successful.
7909 Otherwise, return NULL_TREE. */
7911 tree
7912 fold_unary (enum tree_code code, tree type, tree op0)
7914 tree tem;
7915 tree arg0;
7916 enum tree_code_class kind = TREE_CODE_CLASS (code);
7918 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7919 && TREE_CODE_LENGTH (code) == 1);
7921 arg0 = op0;
7922 if (arg0)
7924 if (code == NOP_EXPR || code == CONVERT_EXPR
7925 || code == FLOAT_EXPR || code == ABS_EXPR)
7927 /* Don't use STRIP_NOPS, because signedness of argument type
7928 matters. */
7929 STRIP_SIGN_NOPS (arg0);
7931 else
7933 /* Strip any conversions that don't change the mode. This
7934 is safe for every expression, except for a comparison
7935 expression because its signedness is derived from its
7936 operands.
7938 Note that this is done as an internal manipulation within
7939 the constant folder, in order to find the simplest
7940 representation of the arguments so that their form can be
7941 studied. In any cases, the appropriate type conversions
7942 should be put back in the tree that will get out of the
7943 constant folder. */
7944 STRIP_NOPS (arg0);
7948 if (TREE_CODE_CLASS (code) == tcc_unary)
7950 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7951 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7952 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7953 else if (TREE_CODE (arg0) == COND_EXPR)
7955 tree arg01 = TREE_OPERAND (arg0, 1);
7956 tree arg02 = TREE_OPERAND (arg0, 2);
7957 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7958 arg01 = fold_build1 (code, type, arg01);
7959 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7960 arg02 = fold_build1 (code, type, arg02);
7961 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7962 arg01, arg02);
7964 /* If this was a conversion, and all we did was to move into
7965 inside the COND_EXPR, bring it back out. But leave it if
7966 it is a conversion from integer to integer and the
7967 result precision is no wider than a word since such a
7968 conversion is cheap and may be optimized away by combine,
7969 while it couldn't if it were outside the COND_EXPR. Then return
7970 so we don't get into an infinite recursion loop taking the
7971 conversion out and then back in. */
7973 if ((code == NOP_EXPR || code == CONVERT_EXPR
7974 || code == NON_LVALUE_EXPR)
7975 && TREE_CODE (tem) == COND_EXPR
7976 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7977 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7978 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7979 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7980 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7981 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7982 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7983 && (INTEGRAL_TYPE_P
7984 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7985 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7986 || flag_syntax_only))
7987 tem = build1 (code, type,
7988 build3 (COND_EXPR,
7989 TREE_TYPE (TREE_OPERAND
7990 (TREE_OPERAND (tem, 1), 0)),
7991 TREE_OPERAND (tem, 0),
7992 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7993 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7994 return tem;
7996 else if (COMPARISON_CLASS_P (arg0))
7998 if (TREE_CODE (type) == BOOLEAN_TYPE)
8000 arg0 = copy_node (arg0);
8001 TREE_TYPE (arg0) = type;
8002 return arg0;
8004 else if (TREE_CODE (type) != INTEGER_TYPE)
8005 return fold_build3 (COND_EXPR, type, arg0,
8006 fold_build1 (code, type,
8007 integer_one_node),
8008 fold_build1 (code, type,
8009 integer_zero_node));
8013 switch (code)
8015 case NOP_EXPR:
8016 case FLOAT_EXPR:
8017 case CONVERT_EXPR:
8018 case FIX_TRUNC_EXPR:
8019 if (TREE_TYPE (op0) == type)
8020 return op0;
8022 /* If we have (type) (a CMP b) and type is an integral type, return
8023 new expression involving the new type. */
8024 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8025 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8026 TREE_OPERAND (op0, 1));
8028 /* Handle cases of two conversions in a row. */
8029 if (TREE_CODE (op0) == NOP_EXPR
8030 || TREE_CODE (op0) == CONVERT_EXPR)
8032 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8033 tree inter_type = TREE_TYPE (op0);
8034 int inside_int = INTEGRAL_TYPE_P (inside_type);
8035 int inside_ptr = POINTER_TYPE_P (inside_type);
8036 int inside_float = FLOAT_TYPE_P (inside_type);
8037 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8038 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8039 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8040 int inter_int = INTEGRAL_TYPE_P (inter_type);
8041 int inter_ptr = POINTER_TYPE_P (inter_type);
8042 int inter_float = FLOAT_TYPE_P (inter_type);
8043 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8044 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8045 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8046 int final_int = INTEGRAL_TYPE_P (type);
8047 int final_ptr = POINTER_TYPE_P (type);
8048 int final_float = FLOAT_TYPE_P (type);
8049 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8050 unsigned int final_prec = TYPE_PRECISION (type);
8051 int final_unsignedp = TYPE_UNSIGNED (type);
8053 /* In addition to the cases of two conversions in a row
8054 handled below, if we are converting something to its own
8055 type via an object of identical or wider precision, neither
8056 conversion is needed. */
8057 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8058 && (((inter_int || inter_ptr) && final_int)
8059 || (inter_float && final_float))
8060 && inter_prec >= final_prec)
8061 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8063 /* Likewise, if the intermediate and final types are either both
8064 float or both integer, we don't need the middle conversion if
8065 it is wider than the final type and doesn't change the signedness
8066 (for integers). Avoid this if the final type is a pointer
8067 since then we sometimes need the inner conversion. Likewise if
8068 the outer has a precision not equal to the size of its mode. */
8069 if (((inter_int && inside_int)
8070 || (inter_float && inside_float)
8071 || (inter_vec && inside_vec))
8072 && inter_prec >= inside_prec
8073 && (inter_float || inter_vec
8074 || inter_unsignedp == inside_unsignedp)
8075 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8076 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8077 && ! final_ptr
8078 && (! final_vec || inter_prec == inside_prec))
8079 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8081 /* If we have a sign-extension of a zero-extended value, we can
8082 replace that by a single zero-extension. */
8083 if (inside_int && inter_int && final_int
8084 && inside_prec < inter_prec && inter_prec < final_prec
8085 && inside_unsignedp && !inter_unsignedp)
8086 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8088 /* Two conversions in a row are not needed unless:
8089 - some conversion is floating-point (overstrict for now), or
8090 - some conversion is a vector (overstrict for now), or
8091 - the intermediate type is narrower than both initial and
8092 final, or
8093 - the intermediate type and innermost type differ in signedness,
8094 and the outermost type is wider than the intermediate, or
8095 - the initial type is a pointer type and the precisions of the
8096 intermediate and final types differ, or
8097 - the final type is a pointer type and the precisions of the
8098 initial and intermediate types differ.
8099 - the initial type is a pointer to an array and the final type
8100 not. */
8101 if (! inside_float && ! inter_float && ! final_float
8102 && ! inside_vec && ! inter_vec && ! final_vec
8103 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8104 && ! (inside_int && inter_int
8105 && inter_unsignedp != inside_unsignedp
8106 && inter_prec < final_prec)
8107 && ((inter_unsignedp && inter_prec > inside_prec)
8108 == (final_unsignedp && final_prec > inter_prec))
8109 && ! (inside_ptr && inter_prec != final_prec)
8110 && ! (final_ptr && inside_prec != inter_prec)
8111 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8112 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8113 && ! (inside_ptr && final_ptr
8114 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
8115 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
8116 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8119 /* Handle (T *)&A.B.C for A being of type T and B and C
8120 living at offset zero. This occurs frequently in
8121 C++ upcasting and then accessing the base. */
8122 if (TREE_CODE (op0) == ADDR_EXPR
8123 && POINTER_TYPE_P (type)
8124 && handled_component_p (TREE_OPERAND (op0, 0)))
8126 HOST_WIDE_INT bitsize, bitpos;
8127 tree offset;
8128 enum machine_mode mode;
8129 int unsignedp, volatilep;
8130 tree base = TREE_OPERAND (op0, 0);
8131 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8132 &mode, &unsignedp, &volatilep, false);
8133 /* If the reference was to a (constant) zero offset, we can use
8134 the address of the base if it has the same base type
8135 as the result type. */
8136 if (! offset && bitpos == 0
8137 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8138 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8139 return fold_convert (type, fold_addr_expr (base));
8142 if ((TREE_CODE (op0) == MODIFY_EXPR
8143 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
8144 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
8145 /* Detect assigning a bitfield. */
8146 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
8147 && DECL_BIT_FIELD
8148 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
8150 /* Don't leave an assignment inside a conversion
8151 unless assigning a bitfield. */
8152 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
8153 /* First do the assignment, then return converted constant. */
8154 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8155 TREE_NO_WARNING (tem) = 1;
8156 TREE_USED (tem) = 1;
8157 return tem;
8160 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8161 constants (if x has signed type, the sign bit cannot be set
8162 in c). This folds extension into the BIT_AND_EXPR. */
8163 if (INTEGRAL_TYPE_P (type)
8164 && TREE_CODE (type) != BOOLEAN_TYPE
8165 && TREE_CODE (op0) == BIT_AND_EXPR
8166 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8168 tree and = op0;
8169 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8170 int change = 0;
8172 if (TYPE_UNSIGNED (TREE_TYPE (and))
8173 || (TYPE_PRECISION (type)
8174 <= TYPE_PRECISION (TREE_TYPE (and))))
8175 change = 1;
8176 else if (TYPE_PRECISION (TREE_TYPE (and1))
8177 <= HOST_BITS_PER_WIDE_INT
8178 && host_integerp (and1, 1))
8180 unsigned HOST_WIDE_INT cst;
8182 cst = tree_low_cst (and1, 1);
8183 cst &= (HOST_WIDE_INT) -1
8184 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8185 change = (cst == 0);
8186 #ifdef LOAD_EXTEND_OP
8187 if (change
8188 && !flag_syntax_only
8189 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8190 == ZERO_EXTEND))
8192 tree uns = unsigned_type_for (TREE_TYPE (and0));
8193 and0 = fold_convert (uns, and0);
8194 and1 = fold_convert (uns, and1);
8196 #endif
8198 if (change)
8200 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8201 TREE_INT_CST_HIGH (and1), 0,
8202 TREE_OVERFLOW (and1));
8203 return fold_build2 (BIT_AND_EXPR, type,
8204 fold_convert (type, and0), tem);
8208 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8209 when one of the new casts will fold away. Conservatively we assume
8210 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8211 if (POINTER_TYPE_P (type)
8212 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8213 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8214 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8215 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8217 tree arg00 = TREE_OPERAND (arg0, 0);
8218 tree arg01 = TREE_OPERAND (arg0, 1);
8220 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8221 fold_convert (sizetype, arg01));
8224 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8225 of the same precision, and X is an integer type not narrower than
8226 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8227 if (INTEGRAL_TYPE_P (type)
8228 && TREE_CODE (op0) == BIT_NOT_EXPR
8229 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8230 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
8231 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
8232 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8234 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8235 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8236 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8237 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8240 tem = fold_convert_const (code, type, op0);
8241 return tem ? tem : NULL_TREE;
8243 case FIXED_CONVERT_EXPR:
8244 tem = fold_convert_const (code, type, arg0);
8245 return tem ? tem : NULL_TREE;
8247 case VIEW_CONVERT_EXPR:
8248 if (TREE_TYPE (op0) == type)
8249 return op0;
8250 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8251 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8252 return fold_view_convert_expr (type, op0);
8254 case NEGATE_EXPR:
8255 tem = fold_negate_expr (arg0);
8256 if (tem)
8257 return fold_convert (type, tem);
8258 return NULL_TREE;
8260 case ABS_EXPR:
8261 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8262 return fold_abs_const (arg0, type);
8263 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8264 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8265 /* Convert fabs((double)float) into (double)fabsf(float). */
8266 else if (TREE_CODE (arg0) == NOP_EXPR
8267 && TREE_CODE (type) == REAL_TYPE)
8269 tree targ0 = strip_float_extensions (arg0);
8270 if (targ0 != arg0)
8271 return fold_convert (type, fold_build1 (ABS_EXPR,
8272 TREE_TYPE (targ0),
8273 targ0));
8275 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8276 else if (TREE_CODE (arg0) == ABS_EXPR)
8277 return arg0;
8278 else if (tree_expr_nonnegative_p (arg0))
8279 return arg0;
8281 /* Strip sign ops from argument. */
8282 if (TREE_CODE (type) == REAL_TYPE)
8284 tem = fold_strip_sign_ops (arg0);
8285 if (tem)
8286 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8288 return NULL_TREE;
8290 case CONJ_EXPR:
8291 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8292 return fold_convert (type, arg0);
8293 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8295 tree itype = TREE_TYPE (type);
8296 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8297 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8298 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8300 if (TREE_CODE (arg0) == COMPLEX_CST)
8302 tree itype = TREE_TYPE (type);
8303 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8304 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8305 return build_complex (type, rpart, negate_expr (ipart));
8307 if (TREE_CODE (arg0) == CONJ_EXPR)
8308 return fold_convert (type, TREE_OPERAND (arg0, 0));
8309 return NULL_TREE;
8311 case BIT_NOT_EXPR:
8312 if (TREE_CODE (arg0) == INTEGER_CST)
8313 return fold_not_const (arg0, type);
8314 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8315 return TREE_OPERAND (op0, 0);
8316 /* Convert ~ (-A) to A - 1. */
8317 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8318 return fold_build2 (MINUS_EXPR, type,
8319 fold_convert (type, TREE_OPERAND (arg0, 0)),
8320 build_int_cst (type, 1));
8321 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8322 else if (INTEGRAL_TYPE_P (type)
8323 && ((TREE_CODE (arg0) == MINUS_EXPR
8324 && integer_onep (TREE_OPERAND (arg0, 1)))
8325 || (TREE_CODE (arg0) == PLUS_EXPR
8326 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8327 return fold_build1 (NEGATE_EXPR, type,
8328 fold_convert (type, TREE_OPERAND (arg0, 0)));
8329 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8330 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8331 && (tem = fold_unary (BIT_NOT_EXPR, type,
8332 fold_convert (type,
8333 TREE_OPERAND (arg0, 0)))))
8334 return fold_build2 (BIT_XOR_EXPR, type, tem,
8335 fold_convert (type, TREE_OPERAND (arg0, 1)));
8336 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8337 && (tem = fold_unary (BIT_NOT_EXPR, type,
8338 fold_convert (type,
8339 TREE_OPERAND (arg0, 1)))))
8340 return fold_build2 (BIT_XOR_EXPR, type,
8341 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8342 /* Perform BIT_NOT_EXPR on each element individually. */
8343 else if (TREE_CODE (arg0) == VECTOR_CST)
8345 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8346 int count = TYPE_VECTOR_SUBPARTS (type), i;
8348 for (i = 0; i < count; i++)
8350 if (elements)
8352 elem = TREE_VALUE (elements);
8353 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8354 if (elem == NULL_TREE)
8355 break;
8356 elements = TREE_CHAIN (elements);
8358 else
8359 elem = build_int_cst (TREE_TYPE (type), -1);
8360 list = tree_cons (NULL_TREE, elem, list);
8362 if (i == count)
8363 return build_vector (type, nreverse (list));
8366 return NULL_TREE;
8368 case TRUTH_NOT_EXPR:
8369 /* The argument to invert_truthvalue must have Boolean type. */
8370 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8371 arg0 = fold_convert (boolean_type_node, arg0);
8373 /* Note that the operand of this must be an int
8374 and its values must be 0 or 1.
8375 ("true" is a fixed value perhaps depending on the language,
8376 but we don't handle values other than 1 correctly yet.) */
8377 tem = fold_truth_not_expr (arg0);
8378 if (!tem)
8379 return NULL_TREE;
8380 return fold_convert (type, tem);
8382 case REALPART_EXPR:
8383 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8384 return fold_convert (type, arg0);
8385 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8386 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8387 TREE_OPERAND (arg0, 1));
8388 if (TREE_CODE (arg0) == COMPLEX_CST)
8389 return fold_convert (type, TREE_REALPART (arg0));
8390 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8392 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8393 tem = fold_build2 (TREE_CODE (arg0), itype,
8394 fold_build1 (REALPART_EXPR, itype,
8395 TREE_OPERAND (arg0, 0)),
8396 fold_build1 (REALPART_EXPR, itype,
8397 TREE_OPERAND (arg0, 1)));
8398 return fold_convert (type, tem);
8400 if (TREE_CODE (arg0) == CONJ_EXPR)
8402 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8403 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8404 return fold_convert (type, tem);
8406 if (TREE_CODE (arg0) == CALL_EXPR)
8408 tree fn = get_callee_fndecl (arg0);
8409 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8410 switch (DECL_FUNCTION_CODE (fn))
8412 CASE_FLT_FN (BUILT_IN_CEXPI):
8413 fn = mathfn_built_in (type, BUILT_IN_COS);
8414 if (fn)
8415 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8416 break;
8418 default:
8419 break;
8422 return NULL_TREE;
8424 case IMAGPART_EXPR:
8425 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8426 return fold_convert (type, integer_zero_node);
8427 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8428 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8429 TREE_OPERAND (arg0, 0));
8430 if (TREE_CODE (arg0) == COMPLEX_CST)
8431 return fold_convert (type, TREE_IMAGPART (arg0));
8432 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8434 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8435 tem = fold_build2 (TREE_CODE (arg0), itype,
8436 fold_build1 (IMAGPART_EXPR, itype,
8437 TREE_OPERAND (arg0, 0)),
8438 fold_build1 (IMAGPART_EXPR, itype,
8439 TREE_OPERAND (arg0, 1)));
8440 return fold_convert (type, tem);
8442 if (TREE_CODE (arg0) == CONJ_EXPR)
8444 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8445 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8446 return fold_convert (type, negate_expr (tem));
8448 if (TREE_CODE (arg0) == CALL_EXPR)
8450 tree fn = get_callee_fndecl (arg0);
8451 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8452 switch (DECL_FUNCTION_CODE (fn))
8454 CASE_FLT_FN (BUILT_IN_CEXPI):
8455 fn = mathfn_built_in (type, BUILT_IN_SIN);
8456 if (fn)
8457 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8458 break;
8460 default:
8461 break;
8464 return NULL_TREE;
8466 default:
8467 return NULL_TREE;
8468 } /* switch (code) */
8471 /* Fold a binary expression of code CODE and type TYPE with operands
8472 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8473 Return the folded expression if folding is successful. Otherwise,
8474 return NULL_TREE. */
8476 static tree
8477 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8479 enum tree_code compl_code;
8481 if (code == MIN_EXPR)
8482 compl_code = MAX_EXPR;
8483 else if (code == MAX_EXPR)
8484 compl_code = MIN_EXPR;
8485 else
8486 gcc_unreachable ();
8488 /* MIN (MAX (a, b), b) == b. */
8489 if (TREE_CODE (op0) == compl_code
8490 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8491 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8493 /* MIN (MAX (b, a), b) == b. */
8494 if (TREE_CODE (op0) == compl_code
8495 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8496 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8497 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8499 /* MIN (a, MAX (a, b)) == a. */
8500 if (TREE_CODE (op1) == compl_code
8501 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8502 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8503 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8505 /* MIN (a, MAX (b, a)) == a. */
8506 if (TREE_CODE (op1) == compl_code
8507 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8508 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8509 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8511 return NULL_TREE;
8514 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8515 by changing CODE to reduce the magnitude of constants involved in
8516 ARG0 of the comparison.
8517 Returns a canonicalized comparison tree if a simplification was
8518 possible, otherwise returns NULL_TREE.
8519 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8520 valid if signed overflow is undefined. */
8522 static tree
8523 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8524 tree arg0, tree arg1,
8525 bool *strict_overflow_p)
8527 enum tree_code code0 = TREE_CODE (arg0);
8528 tree t, cst0 = NULL_TREE;
8529 int sgn0;
8530 bool swap = false;
8532 /* Match A +- CST code arg1 and CST code arg1. */
8533 if (!(((code0 == MINUS_EXPR
8534 || code0 == PLUS_EXPR)
8535 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8536 || code0 == INTEGER_CST))
8537 return NULL_TREE;
8539 /* Identify the constant in arg0 and its sign. */
8540 if (code0 == INTEGER_CST)
8541 cst0 = arg0;
8542 else
8543 cst0 = TREE_OPERAND (arg0, 1);
8544 sgn0 = tree_int_cst_sgn (cst0);
8546 /* Overflowed constants and zero will cause problems. */
8547 if (integer_zerop (cst0)
8548 || TREE_OVERFLOW (cst0))
8549 return NULL_TREE;
8551 /* See if we can reduce the magnitude of the constant in
8552 arg0 by changing the comparison code. */
8553 if (code0 == INTEGER_CST)
8555 /* CST <= arg1 -> CST-1 < arg1. */
8556 if (code == LE_EXPR && sgn0 == 1)
8557 code = LT_EXPR;
8558 /* -CST < arg1 -> -CST-1 <= arg1. */
8559 else if (code == LT_EXPR && sgn0 == -1)
8560 code = LE_EXPR;
8561 /* CST > arg1 -> CST-1 >= arg1. */
8562 else if (code == GT_EXPR && sgn0 == 1)
8563 code = GE_EXPR;
8564 /* -CST >= arg1 -> -CST-1 > arg1. */
8565 else if (code == GE_EXPR && sgn0 == -1)
8566 code = GT_EXPR;
8567 else
8568 return NULL_TREE;
8569 /* arg1 code' CST' might be more canonical. */
8570 swap = true;
8572 else
8574 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8575 if (code == LT_EXPR
8576 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8577 code = LE_EXPR;
8578 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8579 else if (code == GT_EXPR
8580 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8581 code = GE_EXPR;
8582 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8583 else if (code == LE_EXPR
8584 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8585 code = LT_EXPR;
8586 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8587 else if (code == GE_EXPR
8588 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8589 code = GT_EXPR;
8590 else
8591 return NULL_TREE;
8592 *strict_overflow_p = true;
8595 /* Now build the constant reduced in magnitude. */
8596 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8597 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8598 if (code0 != INTEGER_CST)
8599 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8601 /* If swapping might yield to a more canonical form, do so. */
8602 if (swap)
8603 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8604 else
8605 return fold_build2 (code, type, t, arg1);
8608 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8609 overflow further. Try to decrease the magnitude of constants involved
8610 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8611 and put sole constants at the second argument position.
8612 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8614 static tree
8615 maybe_canonicalize_comparison (enum tree_code code, tree type,
8616 tree arg0, tree arg1)
8618 tree t;
8619 bool strict_overflow_p;
8620 const char * const warnmsg = G_("assuming signed overflow does not occur "
8621 "when reducing constant in comparison");
8623 /* In principle pointers also have undefined overflow behavior,
8624 but that causes problems elsewhere. */
8625 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8626 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8627 return NULL_TREE;
8629 /* Try canonicalization by simplifying arg0. */
8630 strict_overflow_p = false;
8631 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8632 &strict_overflow_p);
8633 if (t)
8635 if (strict_overflow_p)
8636 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8637 return t;
8640 /* Try canonicalization by simplifying arg1 using the swapped
8641 comparison. */
8642 code = swap_tree_comparison (code);
8643 strict_overflow_p = false;
8644 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8645 &strict_overflow_p);
8646 if (t && strict_overflow_p)
8647 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8648 return t;
8651 /* Subroutine of fold_binary. This routine performs all of the
8652 transformations that are common to the equality/inequality
8653 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8654 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8655 fold_binary should call fold_binary. Fold a comparison with
8656 tree code CODE and type TYPE with operands OP0 and OP1. Return
8657 the folded comparison or NULL_TREE. */
8659 static tree
8660 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8662 tree arg0, arg1, tem;
8664 arg0 = op0;
8665 arg1 = op1;
8667 STRIP_SIGN_NOPS (arg0);
8668 STRIP_SIGN_NOPS (arg1);
8670 tem = fold_relational_const (code, type, arg0, arg1);
8671 if (tem != NULL_TREE)
8672 return tem;
8674 /* If one arg is a real or integer constant, put it last. */
8675 if (tree_swap_operands_p (arg0, arg1, true))
8676 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8678 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8679 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8680 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8681 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8682 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8683 && (TREE_CODE (arg1) == INTEGER_CST
8684 && !TREE_OVERFLOW (arg1)))
8686 tree const1 = TREE_OPERAND (arg0, 1);
8687 tree const2 = arg1;
8688 tree variable = TREE_OPERAND (arg0, 0);
8689 tree lhs;
8690 int lhs_add;
8691 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8693 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8694 TREE_TYPE (arg1), const2, const1);
8696 /* If the constant operation overflowed this can be
8697 simplified as a comparison against INT_MAX/INT_MIN. */
8698 if (TREE_CODE (lhs) == INTEGER_CST
8699 && TREE_OVERFLOW (lhs))
8701 int const1_sgn = tree_int_cst_sgn (const1);
8702 enum tree_code code2 = code;
8704 /* Get the sign of the constant on the lhs if the
8705 operation were VARIABLE + CONST1. */
8706 if (TREE_CODE (arg0) == MINUS_EXPR)
8707 const1_sgn = -const1_sgn;
8709 /* The sign of the constant determines if we overflowed
8710 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8711 Canonicalize to the INT_MIN overflow by swapping the comparison
8712 if necessary. */
8713 if (const1_sgn == -1)
8714 code2 = swap_tree_comparison (code);
8716 /* We now can look at the canonicalized case
8717 VARIABLE + 1 CODE2 INT_MIN
8718 and decide on the result. */
8719 if (code2 == LT_EXPR
8720 || code2 == LE_EXPR
8721 || code2 == EQ_EXPR)
8722 return omit_one_operand (type, boolean_false_node, variable);
8723 else if (code2 == NE_EXPR
8724 || code2 == GE_EXPR
8725 || code2 == GT_EXPR)
8726 return omit_one_operand (type, boolean_true_node, variable);
8729 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8730 && (TREE_CODE (lhs) != INTEGER_CST
8731 || !TREE_OVERFLOW (lhs)))
8733 fold_overflow_warning (("assuming signed overflow does not occur "
8734 "when changing X +- C1 cmp C2 to "
8735 "X cmp C1 +- C2"),
8736 WARN_STRICT_OVERFLOW_COMPARISON);
8737 return fold_build2 (code, type, variable, lhs);
8741 /* For comparisons of pointers we can decompose it to a compile time
8742 comparison of the base objects and the offsets into the object.
8743 This requires at least one operand being an ADDR_EXPR or a
8744 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8745 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8746 && (TREE_CODE (arg0) == ADDR_EXPR
8747 || TREE_CODE (arg1) == ADDR_EXPR
8748 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8749 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8751 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8752 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8753 enum machine_mode mode;
8754 int volatilep, unsignedp;
8755 bool indirect_base0 = false;
8757 /* Get base and offset for the access. Strip ADDR_EXPR for
8758 get_inner_reference, but put it back by stripping INDIRECT_REF
8759 off the base object if possible. */
8760 base0 = arg0;
8761 if (TREE_CODE (arg0) == ADDR_EXPR)
8763 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8764 &bitsize, &bitpos0, &offset0, &mode,
8765 &unsignedp, &volatilep, false);
8766 if (TREE_CODE (base0) == INDIRECT_REF)
8767 base0 = TREE_OPERAND (base0, 0);
8768 else
8769 indirect_base0 = true;
8771 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8773 base0 = TREE_OPERAND (arg0, 0);
8774 offset0 = TREE_OPERAND (arg0, 1);
8777 base1 = arg1;
8778 if (TREE_CODE (arg1) == ADDR_EXPR)
8780 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8781 &bitsize, &bitpos1, &offset1, &mode,
8782 &unsignedp, &volatilep, false);
8783 /* We have to make sure to have an indirect/non-indirect base1
8784 just the same as we did for base0. */
8785 if (TREE_CODE (base1) == INDIRECT_REF
8786 && !indirect_base0)
8787 base1 = TREE_OPERAND (base1, 0);
8788 else if (!indirect_base0)
8789 base1 = NULL_TREE;
8791 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8793 base1 = TREE_OPERAND (arg1, 0);
8794 offset1 = TREE_OPERAND (arg1, 1);
8796 else if (indirect_base0)
8797 base1 = NULL_TREE;
8799 /* If we have equivalent bases we might be able to simplify. */
8800 if (base0 && base1
8801 && operand_equal_p (base0, base1, 0))
8803 /* We can fold this expression to a constant if the non-constant
8804 offset parts are equal. */
8805 if (offset0 == offset1
8806 || (offset0 && offset1
8807 && operand_equal_p (offset0, offset1, 0)))
8809 switch (code)
8811 case EQ_EXPR:
8812 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8813 case NE_EXPR:
8814 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8815 case LT_EXPR:
8816 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8817 case LE_EXPR:
8818 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8819 case GE_EXPR:
8820 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8821 case GT_EXPR:
8822 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8823 default:;
8826 /* We can simplify the comparison to a comparison of the variable
8827 offset parts if the constant offset parts are equal.
8828 Be careful to use signed size type here because otherwise we
8829 mess with array offsets in the wrong way. This is possible
8830 because pointer arithmetic is restricted to retain within an
8831 object and overflow on pointer differences is undefined as of
8832 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8833 else if (bitpos0 == bitpos1)
8835 tree signed_size_type_node;
8836 signed_size_type_node = signed_type_for (size_type_node);
8838 /* By converting to signed size type we cover middle-end pointer
8839 arithmetic which operates on unsigned pointer types of size
8840 type size and ARRAY_REF offsets which are properly sign or
8841 zero extended from their type in case it is narrower than
8842 size type. */
8843 if (offset0 == NULL_TREE)
8844 offset0 = build_int_cst (signed_size_type_node, 0);
8845 else
8846 offset0 = fold_convert (signed_size_type_node, offset0);
8847 if (offset1 == NULL_TREE)
8848 offset1 = build_int_cst (signed_size_type_node, 0);
8849 else
8850 offset1 = fold_convert (signed_size_type_node, offset1);
8852 return fold_build2 (code, type, offset0, offset1);
8857 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8858 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8859 the resulting offset is smaller in absolute value than the
8860 original one. */
8861 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8862 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8863 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8864 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8865 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8866 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8867 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8869 tree const1 = TREE_OPERAND (arg0, 1);
8870 tree const2 = TREE_OPERAND (arg1, 1);
8871 tree variable1 = TREE_OPERAND (arg0, 0);
8872 tree variable2 = TREE_OPERAND (arg1, 0);
8873 tree cst;
8874 const char * const warnmsg = G_("assuming signed overflow does not "
8875 "occur when combining constants around "
8876 "a comparison");
8878 /* Put the constant on the side where it doesn't overflow and is
8879 of lower absolute value than before. */
8880 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8881 ? MINUS_EXPR : PLUS_EXPR,
8882 const2, const1, 0);
8883 if (!TREE_OVERFLOW (cst)
8884 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8886 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8887 return fold_build2 (code, type,
8888 variable1,
8889 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8890 variable2, cst));
8893 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8894 ? MINUS_EXPR : PLUS_EXPR,
8895 const1, const2, 0);
8896 if (!TREE_OVERFLOW (cst)
8897 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8899 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8900 return fold_build2 (code, type,
8901 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8902 variable1, cst),
8903 variable2);
8907 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8908 signed arithmetic case. That form is created by the compiler
8909 often enough for folding it to be of value. One example is in
8910 computing loop trip counts after Operator Strength Reduction. */
8911 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8912 && TREE_CODE (arg0) == MULT_EXPR
8913 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8914 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8915 && integer_zerop (arg1))
8917 tree const1 = TREE_OPERAND (arg0, 1);
8918 tree const2 = arg1; /* zero */
8919 tree variable1 = TREE_OPERAND (arg0, 0);
8920 enum tree_code cmp_code = code;
8922 gcc_assert (!integer_zerop (const1));
8924 fold_overflow_warning (("assuming signed overflow does not occur when "
8925 "eliminating multiplication in comparison "
8926 "with zero"),
8927 WARN_STRICT_OVERFLOW_COMPARISON);
8929 /* If const1 is negative we swap the sense of the comparison. */
8930 if (tree_int_cst_sgn (const1) < 0)
8931 cmp_code = swap_tree_comparison (cmp_code);
8933 return fold_build2 (cmp_code, type, variable1, const2);
8936 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8937 if (tem)
8938 return tem;
8940 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8942 tree targ0 = strip_float_extensions (arg0);
8943 tree targ1 = strip_float_extensions (arg1);
8944 tree newtype = TREE_TYPE (targ0);
8946 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8947 newtype = TREE_TYPE (targ1);
8949 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8950 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8951 return fold_build2 (code, type, fold_convert (newtype, targ0),
8952 fold_convert (newtype, targ1));
8954 /* (-a) CMP (-b) -> b CMP a */
8955 if (TREE_CODE (arg0) == NEGATE_EXPR
8956 && TREE_CODE (arg1) == NEGATE_EXPR)
8957 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8958 TREE_OPERAND (arg0, 0));
8960 if (TREE_CODE (arg1) == REAL_CST)
8962 REAL_VALUE_TYPE cst;
8963 cst = TREE_REAL_CST (arg1);
8965 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8966 if (TREE_CODE (arg0) == NEGATE_EXPR)
8967 return fold_build2 (swap_tree_comparison (code), type,
8968 TREE_OPERAND (arg0, 0),
8969 build_real (TREE_TYPE (arg1),
8970 REAL_VALUE_NEGATE (cst)));
8972 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8973 /* a CMP (-0) -> a CMP 0 */
8974 if (REAL_VALUE_MINUS_ZERO (cst))
8975 return fold_build2 (code, type, arg0,
8976 build_real (TREE_TYPE (arg1), dconst0));
8978 /* x != NaN is always true, other ops are always false. */
8979 if (REAL_VALUE_ISNAN (cst)
8980 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8982 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8983 return omit_one_operand (type, tem, arg0);
8986 /* Fold comparisons against infinity. */
8987 if (REAL_VALUE_ISINF (cst))
8989 tem = fold_inf_compare (code, type, arg0, arg1);
8990 if (tem != NULL_TREE)
8991 return tem;
8995 /* If this is a comparison of a real constant with a PLUS_EXPR
8996 or a MINUS_EXPR of a real constant, we can convert it into a
8997 comparison with a revised real constant as long as no overflow
8998 occurs when unsafe_math_optimizations are enabled. */
8999 if (flag_unsafe_math_optimizations
9000 && TREE_CODE (arg1) == REAL_CST
9001 && (TREE_CODE (arg0) == PLUS_EXPR
9002 || TREE_CODE (arg0) == MINUS_EXPR)
9003 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9004 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9005 ? MINUS_EXPR : PLUS_EXPR,
9006 arg1, TREE_OPERAND (arg0, 1), 0))
9007 && !TREE_OVERFLOW (tem))
9008 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9010 /* Likewise, we can simplify a comparison of a real constant with
9011 a MINUS_EXPR whose first operand is also a real constant, i.e.
9012 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9013 floating-point types only if -fassociative-math is set. */
9014 if (flag_associative_math
9015 && TREE_CODE (arg1) == REAL_CST
9016 && TREE_CODE (arg0) == MINUS_EXPR
9017 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9018 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9019 arg1, 0))
9020 && !TREE_OVERFLOW (tem))
9021 return fold_build2 (swap_tree_comparison (code), type,
9022 TREE_OPERAND (arg0, 1), tem);
9024 /* Fold comparisons against built-in math functions. */
9025 if (TREE_CODE (arg1) == REAL_CST
9026 && flag_unsafe_math_optimizations
9027 && ! flag_errno_math)
9029 enum built_in_function fcode = builtin_mathfn_code (arg0);
9031 if (fcode != END_BUILTINS)
9033 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9034 if (tem != NULL_TREE)
9035 return tem;
9040 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9041 && (TREE_CODE (arg0) == NOP_EXPR
9042 || TREE_CODE (arg0) == CONVERT_EXPR))
9044 /* If we are widening one operand of an integer comparison,
9045 see if the other operand is similarly being widened. Perhaps we
9046 can do the comparison in the narrower type. */
9047 tem = fold_widened_comparison (code, type, arg0, arg1);
9048 if (tem)
9049 return tem;
9051 /* Or if we are changing signedness. */
9052 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9053 if (tem)
9054 return tem;
9057 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9058 constant, we can simplify it. */
9059 if (TREE_CODE (arg1) == INTEGER_CST
9060 && (TREE_CODE (arg0) == MIN_EXPR
9061 || TREE_CODE (arg0) == MAX_EXPR)
9062 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9064 tem = optimize_minmax_comparison (code, type, op0, op1);
9065 if (tem)
9066 return tem;
9069 /* Simplify comparison of something with itself. (For IEEE
9070 floating-point, we can only do some of these simplifications.) */
9071 if (operand_equal_p (arg0, arg1, 0))
9073 switch (code)
9075 case EQ_EXPR:
9076 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9077 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9078 return constant_boolean_node (1, type);
9079 break;
9081 case GE_EXPR:
9082 case LE_EXPR:
9083 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9084 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9085 return constant_boolean_node (1, type);
9086 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9088 case NE_EXPR:
9089 /* For NE, we can only do this simplification if integer
9090 or we don't honor IEEE floating point NaNs. */
9091 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9092 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9093 break;
9094 /* ... fall through ... */
9095 case GT_EXPR:
9096 case LT_EXPR:
9097 return constant_boolean_node (0, type);
9098 default:
9099 gcc_unreachable ();
9103 /* If we are comparing an expression that just has comparisons
9104 of two integer values, arithmetic expressions of those comparisons,
9105 and constants, we can simplify it. There are only three cases
9106 to check: the two values can either be equal, the first can be
9107 greater, or the second can be greater. Fold the expression for
9108 those three values. Since each value must be 0 or 1, we have
9109 eight possibilities, each of which corresponds to the constant 0
9110 or 1 or one of the six possible comparisons.
9112 This handles common cases like (a > b) == 0 but also handles
9113 expressions like ((x > y) - (y > x)) > 0, which supposedly
9114 occur in macroized code. */
9116 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9118 tree cval1 = 0, cval2 = 0;
9119 int save_p = 0;
9121 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9122 /* Don't handle degenerate cases here; they should already
9123 have been handled anyway. */
9124 && cval1 != 0 && cval2 != 0
9125 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9126 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9127 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9128 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9129 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9130 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9131 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9133 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9134 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9136 /* We can't just pass T to eval_subst in case cval1 or cval2
9137 was the same as ARG1. */
9139 tree high_result
9140 = fold_build2 (code, type,
9141 eval_subst (arg0, cval1, maxval,
9142 cval2, minval),
9143 arg1);
9144 tree equal_result
9145 = fold_build2 (code, type,
9146 eval_subst (arg0, cval1, maxval,
9147 cval2, maxval),
9148 arg1);
9149 tree low_result
9150 = fold_build2 (code, type,
9151 eval_subst (arg0, cval1, minval,
9152 cval2, maxval),
9153 arg1);
9155 /* All three of these results should be 0 or 1. Confirm they are.
9156 Then use those values to select the proper code to use. */
9158 if (TREE_CODE (high_result) == INTEGER_CST
9159 && TREE_CODE (equal_result) == INTEGER_CST
9160 && TREE_CODE (low_result) == INTEGER_CST)
9162 /* Make a 3-bit mask with the high-order bit being the
9163 value for `>', the next for '=', and the low for '<'. */
9164 switch ((integer_onep (high_result) * 4)
9165 + (integer_onep (equal_result) * 2)
9166 + integer_onep (low_result))
9168 case 0:
9169 /* Always false. */
9170 return omit_one_operand (type, integer_zero_node, arg0);
9171 case 1:
9172 code = LT_EXPR;
9173 break;
9174 case 2:
9175 code = EQ_EXPR;
9176 break;
9177 case 3:
9178 code = LE_EXPR;
9179 break;
9180 case 4:
9181 code = GT_EXPR;
9182 break;
9183 case 5:
9184 code = NE_EXPR;
9185 break;
9186 case 6:
9187 code = GE_EXPR;
9188 break;
9189 case 7:
9190 /* Always true. */
9191 return omit_one_operand (type, integer_one_node, arg0);
9194 if (save_p)
9195 return save_expr (build2 (code, type, cval1, cval2));
9196 return fold_build2 (code, type, cval1, cval2);
9201 /* Fold a comparison of the address of COMPONENT_REFs with the same
9202 type and component to a comparison of the address of the base
9203 object. In short, &x->a OP &y->a to x OP y and
9204 &x->a OP &y.a to x OP &y */
9205 if (TREE_CODE (arg0) == ADDR_EXPR
9206 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9207 && TREE_CODE (arg1) == ADDR_EXPR
9208 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9210 tree cref0 = TREE_OPERAND (arg0, 0);
9211 tree cref1 = TREE_OPERAND (arg1, 0);
9212 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9214 tree op0 = TREE_OPERAND (cref0, 0);
9215 tree op1 = TREE_OPERAND (cref1, 0);
9216 return fold_build2 (code, type,
9217 fold_addr_expr (op0),
9218 fold_addr_expr (op1));
9222 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9223 into a single range test. */
9224 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9225 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9226 && TREE_CODE (arg1) == INTEGER_CST
9227 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9228 && !integer_zerop (TREE_OPERAND (arg0, 1))
9229 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9230 && !TREE_OVERFLOW (arg1))
9232 tem = fold_div_compare (code, type, arg0, arg1);
9233 if (tem != NULL_TREE)
9234 return tem;
9237 /* Fold ~X op ~Y as Y op X. */
9238 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9239 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9241 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9242 return fold_build2 (code, type,
9243 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9244 TREE_OPERAND (arg0, 0));
9247 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9248 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9249 && TREE_CODE (arg1) == INTEGER_CST)
9251 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9252 return fold_build2 (swap_tree_comparison (code), type,
9253 TREE_OPERAND (arg0, 0),
9254 fold_build1 (BIT_NOT_EXPR, cmp_type,
9255 fold_convert (cmp_type, arg1)));
9258 return NULL_TREE;
9262 /* Subroutine of fold_binary. Optimize complex multiplications of the
9263 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9264 argument EXPR represents the expression "z" of type TYPE. */
9266 static tree
9267 fold_mult_zconjz (tree type, tree expr)
9269 tree itype = TREE_TYPE (type);
9270 tree rpart, ipart, tem;
9272 if (TREE_CODE (expr) == COMPLEX_EXPR)
9274 rpart = TREE_OPERAND (expr, 0);
9275 ipart = TREE_OPERAND (expr, 1);
9277 else if (TREE_CODE (expr) == COMPLEX_CST)
9279 rpart = TREE_REALPART (expr);
9280 ipart = TREE_IMAGPART (expr);
9282 else
9284 expr = save_expr (expr);
9285 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9286 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9289 rpart = save_expr (rpart);
9290 ipart = save_expr (ipart);
9291 tem = fold_build2 (PLUS_EXPR, itype,
9292 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9293 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9294 return fold_build2 (COMPLEX_EXPR, type, tem,
9295 fold_convert (itype, integer_zero_node));
9299 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9300 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9301 guarantees that P and N have the same least significant log2(M) bits.
9302 N is not otherwise constrained. In particular, N is not normalized to
9303 0 <= N < M as is common. In general, the precise value of P is unknown.
9304 M is chosen as large as possible such that constant N can be determined.
9306 Returns M and sets *RESIDUE to N. */
9308 static unsigned HOST_WIDE_INT
9309 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9311 enum tree_code code;
9313 *residue = 0;
9315 code = TREE_CODE (expr);
9316 if (code == ADDR_EXPR)
9318 expr = TREE_OPERAND (expr, 0);
9319 if (handled_component_p (expr))
9321 HOST_WIDE_INT bitsize, bitpos;
9322 tree offset;
9323 enum machine_mode mode;
9324 int unsignedp, volatilep;
9326 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9327 &mode, &unsignedp, &volatilep, false);
9328 *residue = bitpos / BITS_PER_UNIT;
9329 if (offset)
9331 if (TREE_CODE (offset) == INTEGER_CST)
9332 *residue += TREE_INT_CST_LOW (offset);
9333 else
9334 /* We don't handle more complicated offset expressions. */
9335 return 1;
9339 if (DECL_P (expr))
9340 return DECL_ALIGN_UNIT (expr);
9342 else if (code == POINTER_PLUS_EXPR)
9344 tree op0, op1;
9345 unsigned HOST_WIDE_INT modulus;
9346 enum tree_code inner_code;
9348 op0 = TREE_OPERAND (expr, 0);
9349 STRIP_NOPS (op0);
9350 modulus = get_pointer_modulus_and_residue (op0, residue);
9352 op1 = TREE_OPERAND (expr, 1);
9353 STRIP_NOPS (op1);
9354 inner_code = TREE_CODE (op1);
9355 if (inner_code == INTEGER_CST)
9357 *residue += TREE_INT_CST_LOW (op1);
9358 return modulus;
9360 else if (inner_code == MULT_EXPR)
9362 op1 = TREE_OPERAND (op1, 1);
9363 if (TREE_CODE (op1) == INTEGER_CST)
9365 unsigned HOST_WIDE_INT align;
9367 /* Compute the greatest power-of-2 divisor of op1. */
9368 align = TREE_INT_CST_LOW (op1);
9369 align &= -align;
9371 /* If align is non-zero and less than *modulus, replace
9372 *modulus with align., If align is 0, then either op1 is 0
9373 or the greatest power-of-2 divisor of op1 doesn't fit in an
9374 unsigned HOST_WIDE_INT. In either case, no additional
9375 constraint is imposed. */
9376 if (align)
9377 modulus = MIN (modulus, align);
9379 return modulus;
9384 /* If we get here, we were unable to determine anything useful about the
9385 expression. */
9386 return 1;
9390 /* Fold a binary expression of code CODE and type TYPE with operands
9391 OP0 and OP1. Return the folded expression if folding is
9392 successful. Otherwise, return NULL_TREE. */
9394 tree
9395 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9397 enum tree_code_class kind = TREE_CODE_CLASS (code);
9398 tree arg0, arg1, tem;
9399 tree t1 = NULL_TREE;
9400 bool strict_overflow_p;
9402 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9403 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9404 && TREE_CODE_LENGTH (code) == 2
9405 && op0 != NULL_TREE
9406 && op1 != NULL_TREE);
9408 arg0 = op0;
9409 arg1 = op1;
9411 /* Strip any conversions that don't change the mode. This is
9412 safe for every expression, except for a comparison expression
9413 because its signedness is derived from its operands. So, in
9414 the latter case, only strip conversions that don't change the
9415 signedness.
9417 Note that this is done as an internal manipulation within the
9418 constant folder, in order to find the simplest representation
9419 of the arguments so that their form can be studied. In any
9420 cases, the appropriate type conversions should be put back in
9421 the tree that will get out of the constant folder. */
9423 if (kind == tcc_comparison)
9425 STRIP_SIGN_NOPS (arg0);
9426 STRIP_SIGN_NOPS (arg1);
9428 else
9430 STRIP_NOPS (arg0);
9431 STRIP_NOPS (arg1);
9434 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9435 constant but we can't do arithmetic on them. */
9436 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9437 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9438 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9439 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9440 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9441 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9443 if (kind == tcc_binary)
9445 /* Make sure type and arg0 have the same saturating flag. */
9446 gcc_assert (TYPE_SATURATING (type)
9447 == TYPE_SATURATING (TREE_TYPE (arg0)));
9448 tem = const_binop (code, arg0, arg1, 0);
9450 else if (kind == tcc_comparison)
9451 tem = fold_relational_const (code, type, arg0, arg1);
9452 else
9453 tem = NULL_TREE;
9455 if (tem != NULL_TREE)
9457 if (TREE_TYPE (tem) != type)
9458 tem = fold_convert (type, tem);
9459 return tem;
9463 /* If this is a commutative operation, and ARG0 is a constant, move it
9464 to ARG1 to reduce the number of tests below. */
9465 if (commutative_tree_code (code)
9466 && tree_swap_operands_p (arg0, arg1, true))
9467 return fold_build2 (code, type, op1, op0);
9469 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9471 First check for cases where an arithmetic operation is applied to a
9472 compound, conditional, or comparison operation. Push the arithmetic
9473 operation inside the compound or conditional to see if any folding
9474 can then be done. Convert comparison to conditional for this purpose.
9475 The also optimizes non-constant cases that used to be done in
9476 expand_expr.
9478 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9479 one of the operands is a comparison and the other is a comparison, a
9480 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9481 code below would make the expression more complex. Change it to a
9482 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9483 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9485 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9486 || code == EQ_EXPR || code == NE_EXPR)
9487 && ((truth_value_p (TREE_CODE (arg0))
9488 && (truth_value_p (TREE_CODE (arg1))
9489 || (TREE_CODE (arg1) == BIT_AND_EXPR
9490 && integer_onep (TREE_OPERAND (arg1, 1)))))
9491 || (truth_value_p (TREE_CODE (arg1))
9492 && (truth_value_p (TREE_CODE (arg0))
9493 || (TREE_CODE (arg0) == BIT_AND_EXPR
9494 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9496 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9497 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9498 : TRUTH_XOR_EXPR,
9499 boolean_type_node,
9500 fold_convert (boolean_type_node, arg0),
9501 fold_convert (boolean_type_node, arg1));
9503 if (code == EQ_EXPR)
9504 tem = invert_truthvalue (tem);
9506 return fold_convert (type, tem);
9509 if (TREE_CODE_CLASS (code) == tcc_binary
9510 || TREE_CODE_CLASS (code) == tcc_comparison)
9512 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9513 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9514 fold_build2 (code, type,
9515 fold_convert (TREE_TYPE (op0),
9516 TREE_OPERAND (arg0, 1)),
9517 op1));
9518 if (TREE_CODE (arg1) == COMPOUND_EXPR
9519 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9520 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9521 fold_build2 (code, type, op0,
9522 fold_convert (TREE_TYPE (op1),
9523 TREE_OPERAND (arg1, 1))));
9525 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9527 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9528 arg0, arg1,
9529 /*cond_first_p=*/1);
9530 if (tem != NULL_TREE)
9531 return tem;
9534 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9536 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9537 arg1, arg0,
9538 /*cond_first_p=*/0);
9539 if (tem != NULL_TREE)
9540 return tem;
9544 switch (code)
9546 case POINTER_PLUS_EXPR:
9547 /* 0 +p index -> (type)index */
9548 if (integer_zerop (arg0))
9549 return non_lvalue (fold_convert (type, arg1));
9551 /* PTR +p 0 -> PTR */
9552 if (integer_zerop (arg1))
9553 return non_lvalue (fold_convert (type, arg0));
9555 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9556 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9557 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9558 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9559 fold_convert (sizetype, arg1),
9560 fold_convert (sizetype, arg0)));
9562 /* index +p PTR -> PTR +p index */
9563 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9564 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9565 return fold_build2 (POINTER_PLUS_EXPR, type,
9566 fold_convert (type, arg1),
9567 fold_convert (sizetype, arg0));
9569 /* (PTR +p B) +p A -> PTR +p (B + A) */
9570 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9572 tree inner;
9573 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9574 tree arg00 = TREE_OPERAND (arg0, 0);
9575 inner = fold_build2 (PLUS_EXPR, sizetype,
9576 arg01, fold_convert (sizetype, arg1));
9577 return fold_convert (type,
9578 fold_build2 (POINTER_PLUS_EXPR,
9579 TREE_TYPE (arg00), arg00, inner));
9582 /* PTR_CST +p CST -> CST1 */
9583 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9584 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9586 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9587 of the array. Loop optimizer sometimes produce this type of
9588 expressions. */
9589 if (TREE_CODE (arg0) == ADDR_EXPR)
9591 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9592 if (tem)
9593 return fold_convert (type, tem);
9596 return NULL_TREE;
9598 case PLUS_EXPR:
9599 /* PTR + INT -> (INT)(PTR p+ INT) */
9600 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9601 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9602 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9603 TREE_TYPE (arg0),
9604 arg0,
9605 fold_convert (sizetype, arg1)));
9606 /* INT + PTR -> (INT)(PTR p+ INT) */
9607 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9608 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9609 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9610 TREE_TYPE (arg1),
9611 arg1,
9612 fold_convert (sizetype, arg0)));
9613 /* A + (-B) -> A - B */
9614 if (TREE_CODE (arg1) == NEGATE_EXPR)
9615 return fold_build2 (MINUS_EXPR, type,
9616 fold_convert (type, arg0),
9617 fold_convert (type, TREE_OPERAND (arg1, 0)));
9618 /* (-A) + B -> B - A */
9619 if (TREE_CODE (arg0) == NEGATE_EXPR
9620 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9621 return fold_build2 (MINUS_EXPR, type,
9622 fold_convert (type, arg1),
9623 fold_convert (type, TREE_OPERAND (arg0, 0)));
9625 if (INTEGRAL_TYPE_P (type))
9627 /* Convert ~A + 1 to -A. */
9628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9629 && integer_onep (arg1))
9630 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9632 /* ~X + X is -1. */
9633 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9634 && !TYPE_OVERFLOW_TRAPS (type))
9636 tree tem = TREE_OPERAND (arg0, 0);
9638 STRIP_NOPS (tem);
9639 if (operand_equal_p (tem, arg1, 0))
9641 t1 = build_int_cst_type (type, -1);
9642 return omit_one_operand (type, t1, arg1);
9646 /* X + ~X is -1. */
9647 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9648 && !TYPE_OVERFLOW_TRAPS (type))
9650 tree tem = TREE_OPERAND (arg1, 0);
9652 STRIP_NOPS (tem);
9653 if (operand_equal_p (arg0, tem, 0))
9655 t1 = build_int_cst_type (type, -1);
9656 return omit_one_operand (type, t1, arg0);
9660 /* X + (X / CST) * -CST is X % CST. */
9661 if (TREE_CODE (arg1) == MULT_EXPR
9662 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9663 && operand_equal_p (arg0,
9664 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9666 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9667 tree cst1 = TREE_OPERAND (arg1, 1);
9668 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9669 if (sum && integer_zerop (sum))
9670 return fold_convert (type,
9671 fold_build2 (TRUNC_MOD_EXPR,
9672 TREE_TYPE (arg0), arg0, cst0));
9676 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9677 same or one. Make sure type is not saturating.
9678 fold_plusminus_mult_expr will re-associate. */
9679 if ((TREE_CODE (arg0) == MULT_EXPR
9680 || TREE_CODE (arg1) == MULT_EXPR)
9681 && !TYPE_SATURATING (type)
9682 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9684 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9685 if (tem)
9686 return tem;
9689 if (! FLOAT_TYPE_P (type))
9691 if (integer_zerop (arg1))
9692 return non_lvalue (fold_convert (type, arg0));
9694 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9695 with a constant, and the two constants have no bits in common,
9696 we should treat this as a BIT_IOR_EXPR since this may produce more
9697 simplifications. */
9698 if (TREE_CODE (arg0) == BIT_AND_EXPR
9699 && TREE_CODE (arg1) == BIT_AND_EXPR
9700 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9701 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9702 && integer_zerop (const_binop (BIT_AND_EXPR,
9703 TREE_OPERAND (arg0, 1),
9704 TREE_OPERAND (arg1, 1), 0)))
9706 code = BIT_IOR_EXPR;
9707 goto bit_ior;
9710 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9711 (plus (plus (mult) (mult)) (foo)) so that we can
9712 take advantage of the factoring cases below. */
9713 if (((TREE_CODE (arg0) == PLUS_EXPR
9714 || TREE_CODE (arg0) == MINUS_EXPR)
9715 && TREE_CODE (arg1) == MULT_EXPR)
9716 || ((TREE_CODE (arg1) == PLUS_EXPR
9717 || TREE_CODE (arg1) == MINUS_EXPR)
9718 && TREE_CODE (arg0) == MULT_EXPR))
9720 tree parg0, parg1, parg, marg;
9721 enum tree_code pcode;
9723 if (TREE_CODE (arg1) == MULT_EXPR)
9724 parg = arg0, marg = arg1;
9725 else
9726 parg = arg1, marg = arg0;
9727 pcode = TREE_CODE (parg);
9728 parg0 = TREE_OPERAND (parg, 0);
9729 parg1 = TREE_OPERAND (parg, 1);
9730 STRIP_NOPS (parg0);
9731 STRIP_NOPS (parg1);
9733 if (TREE_CODE (parg0) == MULT_EXPR
9734 && TREE_CODE (parg1) != MULT_EXPR)
9735 return fold_build2 (pcode, type,
9736 fold_build2 (PLUS_EXPR, type,
9737 fold_convert (type, parg0),
9738 fold_convert (type, marg)),
9739 fold_convert (type, parg1));
9740 if (TREE_CODE (parg0) != MULT_EXPR
9741 && TREE_CODE (parg1) == MULT_EXPR)
9742 return fold_build2 (PLUS_EXPR, type,
9743 fold_convert (type, parg0),
9744 fold_build2 (pcode, type,
9745 fold_convert (type, marg),
9746 fold_convert (type,
9747 parg1)));
9750 else
9752 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9753 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9754 return non_lvalue (fold_convert (type, arg0));
9756 /* Likewise if the operands are reversed. */
9757 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9758 return non_lvalue (fold_convert (type, arg1));
9760 /* Convert X + -C into X - C. */
9761 if (TREE_CODE (arg1) == REAL_CST
9762 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9764 tem = fold_negate_const (arg1, type);
9765 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9766 return fold_build2 (MINUS_EXPR, type,
9767 fold_convert (type, arg0),
9768 fold_convert (type, tem));
9771 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9772 to __complex__ ( x, y ). This is not the same for SNaNs or
9773 if signed zeros are involved. */
9774 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9775 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9776 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9778 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9779 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9780 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9781 bool arg0rz = false, arg0iz = false;
9782 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9783 || (arg0i && (arg0iz = real_zerop (arg0i))))
9785 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9786 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9787 if (arg0rz && arg1i && real_zerop (arg1i))
9789 tree rp = arg1r ? arg1r
9790 : build1 (REALPART_EXPR, rtype, arg1);
9791 tree ip = arg0i ? arg0i
9792 : build1 (IMAGPART_EXPR, rtype, arg0);
9793 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9795 else if (arg0iz && arg1r && real_zerop (arg1r))
9797 tree rp = arg0r ? arg0r
9798 : build1 (REALPART_EXPR, rtype, arg0);
9799 tree ip = arg1i ? arg1i
9800 : build1 (IMAGPART_EXPR, rtype, arg1);
9801 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9806 if (flag_unsafe_math_optimizations
9807 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9808 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9809 && (tem = distribute_real_division (code, type, arg0, arg1)))
9810 return tem;
9812 /* Convert x+x into x*2.0. */
9813 if (operand_equal_p (arg0, arg1, 0)
9814 && SCALAR_FLOAT_TYPE_P (type))
9815 return fold_build2 (MULT_EXPR, type, arg0,
9816 build_real (type, dconst2));
9818 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9819 We associate floats only if the user has specified
9820 -fassociative-math. */
9821 if (flag_associative_math
9822 && TREE_CODE (arg1) == PLUS_EXPR
9823 && TREE_CODE (arg0) != MULT_EXPR)
9825 tree tree10 = TREE_OPERAND (arg1, 0);
9826 tree tree11 = TREE_OPERAND (arg1, 1);
9827 if (TREE_CODE (tree11) == MULT_EXPR
9828 && TREE_CODE (tree10) == MULT_EXPR)
9830 tree tree0;
9831 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9832 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9835 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9836 We associate floats only if the user has specified
9837 -fassociative-math. */
9838 if (flag_associative_math
9839 && TREE_CODE (arg0) == PLUS_EXPR
9840 && TREE_CODE (arg1) != MULT_EXPR)
9842 tree tree00 = TREE_OPERAND (arg0, 0);
9843 tree tree01 = TREE_OPERAND (arg0, 1);
9844 if (TREE_CODE (tree01) == MULT_EXPR
9845 && TREE_CODE (tree00) == MULT_EXPR)
9847 tree tree0;
9848 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9849 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9854 bit_rotate:
9855 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9856 is a rotate of A by C1 bits. */
9857 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9858 is a rotate of A by B bits. */
9860 enum tree_code code0, code1;
9861 code0 = TREE_CODE (arg0);
9862 code1 = TREE_CODE (arg1);
9863 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9864 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9865 && operand_equal_p (TREE_OPERAND (arg0, 0),
9866 TREE_OPERAND (arg1, 0), 0)
9867 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9869 tree tree01, tree11;
9870 enum tree_code code01, code11;
9872 tree01 = TREE_OPERAND (arg0, 1);
9873 tree11 = TREE_OPERAND (arg1, 1);
9874 STRIP_NOPS (tree01);
9875 STRIP_NOPS (tree11);
9876 code01 = TREE_CODE (tree01);
9877 code11 = TREE_CODE (tree11);
9878 if (code01 == INTEGER_CST
9879 && code11 == INTEGER_CST
9880 && TREE_INT_CST_HIGH (tree01) == 0
9881 && TREE_INT_CST_HIGH (tree11) == 0
9882 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9883 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9884 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9885 code0 == LSHIFT_EXPR ? tree01 : tree11);
9886 else if (code11 == MINUS_EXPR)
9888 tree tree110, tree111;
9889 tree110 = TREE_OPERAND (tree11, 0);
9890 tree111 = TREE_OPERAND (tree11, 1);
9891 STRIP_NOPS (tree110);
9892 STRIP_NOPS (tree111);
9893 if (TREE_CODE (tree110) == INTEGER_CST
9894 && 0 == compare_tree_int (tree110,
9895 TYPE_PRECISION
9896 (TREE_TYPE (TREE_OPERAND
9897 (arg0, 0))))
9898 && operand_equal_p (tree01, tree111, 0))
9899 return build2 ((code0 == LSHIFT_EXPR
9900 ? LROTATE_EXPR
9901 : RROTATE_EXPR),
9902 type, TREE_OPERAND (arg0, 0), tree01);
9904 else if (code01 == MINUS_EXPR)
9906 tree tree010, tree011;
9907 tree010 = TREE_OPERAND (tree01, 0);
9908 tree011 = TREE_OPERAND (tree01, 1);
9909 STRIP_NOPS (tree010);
9910 STRIP_NOPS (tree011);
9911 if (TREE_CODE (tree010) == INTEGER_CST
9912 && 0 == compare_tree_int (tree010,
9913 TYPE_PRECISION
9914 (TREE_TYPE (TREE_OPERAND
9915 (arg0, 0))))
9916 && operand_equal_p (tree11, tree011, 0))
9917 return build2 ((code0 != LSHIFT_EXPR
9918 ? LROTATE_EXPR
9919 : RROTATE_EXPR),
9920 type, TREE_OPERAND (arg0, 0), tree11);
9925 associate:
9926 /* In most languages, can't associate operations on floats through
9927 parentheses. Rather than remember where the parentheses were, we
9928 don't associate floats at all, unless the user has specified
9929 -fassociative-math.
9930 And, we need to make sure type is not saturating. */
9932 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9933 && !TYPE_SATURATING (type))
9935 tree var0, con0, lit0, minus_lit0;
9936 tree var1, con1, lit1, minus_lit1;
9937 bool ok = true;
9939 /* Split both trees into variables, constants, and literals. Then
9940 associate each group together, the constants with literals,
9941 then the result with variables. This increases the chances of
9942 literals being recombined later and of generating relocatable
9943 expressions for the sum of a constant and literal. */
9944 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9945 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9946 code == MINUS_EXPR);
9948 /* With undefined overflow we can only associate constants
9949 with one variable. */
9950 if ((POINTER_TYPE_P (type)
9951 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9952 && var0 && var1)
9954 tree tmp0 = var0;
9955 tree tmp1 = var1;
9957 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9958 tmp0 = TREE_OPERAND (tmp0, 0);
9959 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9960 tmp1 = TREE_OPERAND (tmp1, 0);
9961 /* The only case we can still associate with two variables
9962 is if they are the same, modulo negation. */
9963 if (!operand_equal_p (tmp0, tmp1, 0))
9964 ok = false;
9967 /* Only do something if we found more than two objects. Otherwise,
9968 nothing has changed and we risk infinite recursion. */
9969 if (ok
9970 && (2 < ((var0 != 0) + (var1 != 0)
9971 + (con0 != 0) + (con1 != 0)
9972 + (lit0 != 0) + (lit1 != 0)
9973 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9975 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9976 if (code == MINUS_EXPR)
9977 code = PLUS_EXPR;
9979 var0 = associate_trees (var0, var1, code, type);
9980 con0 = associate_trees (con0, con1, code, type);
9981 lit0 = associate_trees (lit0, lit1, code, type);
9982 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9984 /* Preserve the MINUS_EXPR if the negative part of the literal is
9985 greater than the positive part. Otherwise, the multiplicative
9986 folding code (i.e extract_muldiv) may be fooled in case
9987 unsigned constants are subtracted, like in the following
9988 example: ((X*2 + 4) - 8U)/2. */
9989 if (minus_lit0 && lit0)
9991 if (TREE_CODE (lit0) == INTEGER_CST
9992 && TREE_CODE (minus_lit0) == INTEGER_CST
9993 && tree_int_cst_lt (lit0, minus_lit0))
9995 minus_lit0 = associate_trees (minus_lit0, lit0,
9996 MINUS_EXPR, type);
9997 lit0 = 0;
9999 else
10001 lit0 = associate_trees (lit0, minus_lit0,
10002 MINUS_EXPR, type);
10003 minus_lit0 = 0;
10006 if (minus_lit0)
10008 if (con0 == 0)
10009 return fold_convert (type,
10010 associate_trees (var0, minus_lit0,
10011 MINUS_EXPR, type));
10012 else
10014 con0 = associate_trees (con0, minus_lit0,
10015 MINUS_EXPR, type);
10016 return fold_convert (type,
10017 associate_trees (var0, con0,
10018 PLUS_EXPR, type));
10022 con0 = associate_trees (con0, lit0, code, type);
10023 return fold_convert (type, associate_trees (var0, con0,
10024 code, type));
10028 return NULL_TREE;
10030 case MINUS_EXPR:
10031 /* Pointer simplifications for subtraction, simple reassociations. */
10032 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10034 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10035 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10036 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10038 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10039 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10040 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10041 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10042 return fold_build2 (PLUS_EXPR, type,
10043 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10044 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10046 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10047 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10049 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10050 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10051 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10052 if (tmp)
10053 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10056 /* A - (-B) -> A + B */
10057 if (TREE_CODE (arg1) == NEGATE_EXPR)
10058 return fold_build2 (PLUS_EXPR, type, op0,
10059 fold_convert (type, TREE_OPERAND (arg1, 0)));
10060 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10061 if (TREE_CODE (arg0) == NEGATE_EXPR
10062 && (FLOAT_TYPE_P (type)
10063 || INTEGRAL_TYPE_P (type))
10064 && negate_expr_p (arg1)
10065 && reorder_operands_p (arg0, arg1))
10066 return fold_build2 (MINUS_EXPR, type,
10067 fold_convert (type, negate_expr (arg1)),
10068 fold_convert (type, TREE_OPERAND (arg0, 0)));
10069 /* Convert -A - 1 to ~A. */
10070 if (INTEGRAL_TYPE_P (type)
10071 && TREE_CODE (arg0) == NEGATE_EXPR
10072 && integer_onep (arg1)
10073 && !TYPE_OVERFLOW_TRAPS (type))
10074 return fold_build1 (BIT_NOT_EXPR, type,
10075 fold_convert (type, TREE_OPERAND (arg0, 0)));
10077 /* Convert -1 - A to ~A. */
10078 if (INTEGRAL_TYPE_P (type)
10079 && integer_all_onesp (arg0))
10080 return fold_build1 (BIT_NOT_EXPR, type, op1);
10083 /* X - (X / CST) * CST is X % CST. */
10084 if (INTEGRAL_TYPE_P (type)
10085 && TREE_CODE (arg1) == MULT_EXPR
10086 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10087 && operand_equal_p (arg0,
10088 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10089 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10090 TREE_OPERAND (arg1, 1), 0))
10091 return fold_convert (type,
10092 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10093 arg0, TREE_OPERAND (arg1, 1)));
10095 if (! FLOAT_TYPE_P (type))
10097 if (integer_zerop (arg0))
10098 return negate_expr (fold_convert (type, arg1));
10099 if (integer_zerop (arg1))
10100 return non_lvalue (fold_convert (type, arg0));
10102 /* Fold A - (A & B) into ~B & A. */
10103 if (!TREE_SIDE_EFFECTS (arg0)
10104 && TREE_CODE (arg1) == BIT_AND_EXPR)
10106 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10108 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10109 return fold_build2 (BIT_AND_EXPR, type,
10110 fold_build1 (BIT_NOT_EXPR, type, arg10),
10111 fold_convert (type, arg0));
10113 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10115 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10116 return fold_build2 (BIT_AND_EXPR, type,
10117 fold_build1 (BIT_NOT_EXPR, type, arg11),
10118 fold_convert (type, arg0));
10122 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10123 any power of 2 minus 1. */
10124 if (TREE_CODE (arg0) == BIT_AND_EXPR
10125 && TREE_CODE (arg1) == BIT_AND_EXPR
10126 && operand_equal_p (TREE_OPERAND (arg0, 0),
10127 TREE_OPERAND (arg1, 0), 0))
10129 tree mask0 = TREE_OPERAND (arg0, 1);
10130 tree mask1 = TREE_OPERAND (arg1, 1);
10131 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10133 if (operand_equal_p (tem, mask1, 0))
10135 tem = fold_build2 (BIT_XOR_EXPR, type,
10136 TREE_OPERAND (arg0, 0), mask1);
10137 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10142 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10143 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10144 return non_lvalue (fold_convert (type, arg0));
10146 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10147 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10148 (-ARG1 + ARG0) reduces to -ARG1. */
10149 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10150 return negate_expr (fold_convert (type, arg1));
10152 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10153 __complex__ ( x, -y ). This is not the same for SNaNs or if
10154 signed zeros are involved. */
10155 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10156 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10157 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10159 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10160 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10161 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10162 bool arg0rz = false, arg0iz = false;
10163 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10164 || (arg0i && (arg0iz = real_zerop (arg0i))))
10166 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10167 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10168 if (arg0rz && arg1i && real_zerop (arg1i))
10170 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10171 arg1r ? arg1r
10172 : build1 (REALPART_EXPR, rtype, arg1));
10173 tree ip = arg0i ? arg0i
10174 : build1 (IMAGPART_EXPR, rtype, arg0);
10175 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10177 else if (arg0iz && arg1r && real_zerop (arg1r))
10179 tree rp = arg0r ? arg0r
10180 : build1 (REALPART_EXPR, rtype, arg0);
10181 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10182 arg1i ? arg1i
10183 : build1 (IMAGPART_EXPR, rtype, arg1));
10184 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10189 /* Fold &x - &x. This can happen from &x.foo - &x.
10190 This is unsafe for certain floats even in non-IEEE formats.
10191 In IEEE, it is unsafe because it does wrong for NaNs.
10192 Also note that operand_equal_p is always false if an operand
10193 is volatile. */
10195 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10196 && operand_equal_p (arg0, arg1, 0))
10197 return fold_convert (type, integer_zero_node);
10199 /* A - B -> A + (-B) if B is easily negatable. */
10200 if (negate_expr_p (arg1)
10201 && ((FLOAT_TYPE_P (type)
10202 /* Avoid this transformation if B is a positive REAL_CST. */
10203 && (TREE_CODE (arg1) != REAL_CST
10204 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10205 || INTEGRAL_TYPE_P (type)))
10206 return fold_build2 (PLUS_EXPR, type,
10207 fold_convert (type, arg0),
10208 fold_convert (type, negate_expr (arg1)));
10210 /* Try folding difference of addresses. */
10212 HOST_WIDE_INT diff;
10214 if ((TREE_CODE (arg0) == ADDR_EXPR
10215 || TREE_CODE (arg1) == ADDR_EXPR)
10216 && ptr_difference_const (arg0, arg1, &diff))
10217 return build_int_cst_type (type, diff);
10220 /* Fold &a[i] - &a[j] to i-j. */
10221 if (TREE_CODE (arg0) == ADDR_EXPR
10222 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10223 && TREE_CODE (arg1) == ADDR_EXPR
10224 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10226 tree aref0 = TREE_OPERAND (arg0, 0);
10227 tree aref1 = TREE_OPERAND (arg1, 0);
10228 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10229 TREE_OPERAND (aref1, 0), 0))
10231 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10232 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10233 tree esz = array_ref_element_size (aref0);
10234 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10235 return fold_build2 (MULT_EXPR, type, diff,
10236 fold_convert (type, esz));
10241 if (flag_unsafe_math_optimizations
10242 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10243 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10244 && (tem = distribute_real_division (code, type, arg0, arg1)))
10245 return tem;
10247 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10248 same or one. Make sure type is not saturating.
10249 fold_plusminus_mult_expr will re-associate. */
10250 if ((TREE_CODE (arg0) == MULT_EXPR
10251 || TREE_CODE (arg1) == MULT_EXPR)
10252 && !TYPE_SATURATING (type)
10253 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10255 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10256 if (tem)
10257 return tem;
10260 goto associate;
10262 case MULT_EXPR:
10263 /* (-A) * (-B) -> A * B */
10264 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10265 return fold_build2 (MULT_EXPR, type,
10266 fold_convert (type, TREE_OPERAND (arg0, 0)),
10267 fold_convert (type, negate_expr (arg1)));
10268 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10269 return fold_build2 (MULT_EXPR, type,
10270 fold_convert (type, negate_expr (arg0)),
10271 fold_convert (type, TREE_OPERAND (arg1, 0)));
10273 if (! FLOAT_TYPE_P (type))
10275 if (integer_zerop (arg1))
10276 return omit_one_operand (type, arg1, arg0);
10277 if (integer_onep (arg1))
10278 return non_lvalue (fold_convert (type, arg0));
10279 /* Transform x * -1 into -x. Make sure to do the negation
10280 on the original operand with conversions not stripped
10281 because we can only strip non-sign-changing conversions. */
10282 if (integer_all_onesp (arg1))
10283 return fold_convert (type, negate_expr (op0));
10284 /* Transform x * -C into -x * C if x is easily negatable. */
10285 if (TREE_CODE (arg1) == INTEGER_CST
10286 && tree_int_cst_sgn (arg1) == -1
10287 && negate_expr_p (arg0)
10288 && (tem = negate_expr (arg1)) != arg1
10289 && !TREE_OVERFLOW (tem))
10290 return fold_build2 (MULT_EXPR, type,
10291 fold_convert (type, negate_expr (arg0)), tem);
10293 /* (a * (1 << b)) is (a << b) */
10294 if (TREE_CODE (arg1) == LSHIFT_EXPR
10295 && integer_onep (TREE_OPERAND (arg1, 0)))
10296 return fold_build2 (LSHIFT_EXPR, type, op0,
10297 TREE_OPERAND (arg1, 1));
10298 if (TREE_CODE (arg0) == LSHIFT_EXPR
10299 && integer_onep (TREE_OPERAND (arg0, 0)))
10300 return fold_build2 (LSHIFT_EXPR, type, op1,
10301 TREE_OPERAND (arg0, 1));
10303 strict_overflow_p = false;
10304 if (TREE_CODE (arg1) == INTEGER_CST
10305 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10306 &strict_overflow_p)))
10308 if (strict_overflow_p)
10309 fold_overflow_warning (("assuming signed overflow does not "
10310 "occur when simplifying "
10311 "multiplication"),
10312 WARN_STRICT_OVERFLOW_MISC);
10313 return fold_convert (type, tem);
10316 /* Optimize z * conj(z) for integer complex numbers. */
10317 if (TREE_CODE (arg0) == CONJ_EXPR
10318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10319 return fold_mult_zconjz (type, arg1);
10320 if (TREE_CODE (arg1) == CONJ_EXPR
10321 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10322 return fold_mult_zconjz (type, arg0);
10324 else
10326 /* Maybe fold x * 0 to 0. The expressions aren't the same
10327 when x is NaN, since x * 0 is also NaN. Nor are they the
10328 same in modes with signed zeros, since multiplying a
10329 negative value by 0 gives -0, not +0. */
10330 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10331 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10332 && real_zerop (arg1))
10333 return omit_one_operand (type, arg1, arg0);
10334 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10335 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10336 && real_onep (arg1))
10337 return non_lvalue (fold_convert (type, arg0));
10339 /* Transform x * -1.0 into -x. */
10340 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10341 && real_minus_onep (arg1))
10342 return fold_convert (type, negate_expr (arg0));
10344 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10345 the result for floating point types due to rounding so it is applied
10346 only if -fassociative-math was specify. */
10347 if (flag_associative_math
10348 && TREE_CODE (arg0) == RDIV_EXPR
10349 && TREE_CODE (arg1) == REAL_CST
10350 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10352 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10353 arg1, 0);
10354 if (tem)
10355 return fold_build2 (RDIV_EXPR, type, tem,
10356 TREE_OPERAND (arg0, 1));
10359 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10360 if (operand_equal_p (arg0, arg1, 0))
10362 tree tem = fold_strip_sign_ops (arg0);
10363 if (tem != NULL_TREE)
10365 tem = fold_convert (type, tem);
10366 return fold_build2 (MULT_EXPR, type, tem, tem);
10370 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10371 This is not the same for NaNs or if signed zeros are
10372 involved. */
10373 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10374 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10375 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10376 && TREE_CODE (arg1) == COMPLEX_CST
10377 && real_zerop (TREE_REALPART (arg1)))
10379 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10380 if (real_onep (TREE_IMAGPART (arg1)))
10381 return fold_build2 (COMPLEX_EXPR, type,
10382 negate_expr (fold_build1 (IMAGPART_EXPR,
10383 rtype, arg0)),
10384 fold_build1 (REALPART_EXPR, rtype, arg0));
10385 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10386 return fold_build2 (COMPLEX_EXPR, type,
10387 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10388 negate_expr (fold_build1 (REALPART_EXPR,
10389 rtype, arg0)));
10392 /* Optimize z * conj(z) for floating point complex numbers.
10393 Guarded by flag_unsafe_math_optimizations as non-finite
10394 imaginary components don't produce scalar results. */
10395 if (flag_unsafe_math_optimizations
10396 && TREE_CODE (arg0) == CONJ_EXPR
10397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10398 return fold_mult_zconjz (type, arg1);
10399 if (flag_unsafe_math_optimizations
10400 && TREE_CODE (arg1) == CONJ_EXPR
10401 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10402 return fold_mult_zconjz (type, arg0);
10404 if (flag_unsafe_math_optimizations)
10406 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10407 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10409 /* Optimizations of root(...)*root(...). */
10410 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10412 tree rootfn, arg;
10413 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10414 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10416 /* Optimize sqrt(x)*sqrt(x) as x. */
10417 if (BUILTIN_SQRT_P (fcode0)
10418 && operand_equal_p (arg00, arg10, 0)
10419 && ! HONOR_SNANS (TYPE_MODE (type)))
10420 return arg00;
10422 /* Optimize root(x)*root(y) as root(x*y). */
10423 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10424 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10425 return build_call_expr (rootfn, 1, arg);
10428 /* Optimize expN(x)*expN(y) as expN(x+y). */
10429 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10431 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10432 tree arg = fold_build2 (PLUS_EXPR, type,
10433 CALL_EXPR_ARG (arg0, 0),
10434 CALL_EXPR_ARG (arg1, 0));
10435 return build_call_expr (expfn, 1, arg);
10438 /* Optimizations of pow(...)*pow(...). */
10439 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10440 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10441 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10443 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10444 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10445 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10446 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10448 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10449 if (operand_equal_p (arg01, arg11, 0))
10451 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10452 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10453 return build_call_expr (powfn, 2, arg, arg01);
10456 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10457 if (operand_equal_p (arg00, arg10, 0))
10459 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10460 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10461 return build_call_expr (powfn, 2, arg00, arg);
10465 /* Optimize tan(x)*cos(x) as sin(x). */
10466 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10467 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10468 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10469 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10470 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10471 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10472 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10473 CALL_EXPR_ARG (arg1, 0), 0))
10475 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10477 if (sinfn != NULL_TREE)
10478 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10481 /* Optimize x*pow(x,c) as pow(x,c+1). */
10482 if (fcode1 == BUILT_IN_POW
10483 || fcode1 == BUILT_IN_POWF
10484 || fcode1 == BUILT_IN_POWL)
10486 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10487 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10488 if (TREE_CODE (arg11) == REAL_CST
10489 && !TREE_OVERFLOW (arg11)
10490 && operand_equal_p (arg0, arg10, 0))
10492 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10493 REAL_VALUE_TYPE c;
10494 tree arg;
10496 c = TREE_REAL_CST (arg11);
10497 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10498 arg = build_real (type, c);
10499 return build_call_expr (powfn, 2, arg0, arg);
10503 /* Optimize pow(x,c)*x as pow(x,c+1). */
10504 if (fcode0 == BUILT_IN_POW
10505 || fcode0 == BUILT_IN_POWF
10506 || fcode0 == BUILT_IN_POWL)
10508 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10509 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10510 if (TREE_CODE (arg01) == REAL_CST
10511 && !TREE_OVERFLOW (arg01)
10512 && operand_equal_p (arg1, arg00, 0))
10514 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10515 REAL_VALUE_TYPE c;
10516 tree arg;
10518 c = TREE_REAL_CST (arg01);
10519 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10520 arg = build_real (type, c);
10521 return build_call_expr (powfn, 2, arg1, arg);
10525 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10526 if (! optimize_size
10527 && operand_equal_p (arg0, arg1, 0))
10529 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10531 if (powfn)
10533 tree arg = build_real (type, dconst2);
10534 return build_call_expr (powfn, 2, arg0, arg);
10539 goto associate;
10541 case BIT_IOR_EXPR:
10542 bit_ior:
10543 if (integer_all_onesp (arg1))
10544 return omit_one_operand (type, arg1, arg0);
10545 if (integer_zerop (arg1))
10546 return non_lvalue (fold_convert (type, arg0));
10547 if (operand_equal_p (arg0, arg1, 0))
10548 return non_lvalue (fold_convert (type, arg0));
10550 /* ~X | X is -1. */
10551 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10552 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10554 t1 = fold_convert (type, integer_zero_node);
10555 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10556 return omit_one_operand (type, t1, arg1);
10559 /* X | ~X is -1. */
10560 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10561 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10563 t1 = fold_convert (type, integer_zero_node);
10564 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10565 return omit_one_operand (type, t1, arg0);
10568 /* Canonicalize (X & C1) | C2. */
10569 if (TREE_CODE (arg0) == BIT_AND_EXPR
10570 && TREE_CODE (arg1) == INTEGER_CST
10571 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10573 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10574 int width = TYPE_PRECISION (type), w;
10575 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10576 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10577 hi2 = TREE_INT_CST_HIGH (arg1);
10578 lo2 = TREE_INT_CST_LOW (arg1);
10580 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10581 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10582 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10584 if (width > HOST_BITS_PER_WIDE_INT)
10586 mhi = (unsigned HOST_WIDE_INT) -1
10587 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10588 mlo = -1;
10590 else
10592 mhi = 0;
10593 mlo = (unsigned HOST_WIDE_INT) -1
10594 >> (HOST_BITS_PER_WIDE_INT - width);
10597 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10598 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10599 return fold_build2 (BIT_IOR_EXPR, type,
10600 TREE_OPERAND (arg0, 0), arg1);
10602 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10603 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10604 mode which allows further optimizations. */
10605 hi1 &= mhi;
10606 lo1 &= mlo;
10607 hi2 &= mhi;
10608 lo2 &= mlo;
10609 hi3 = hi1 & ~hi2;
10610 lo3 = lo1 & ~lo2;
10611 for (w = BITS_PER_UNIT;
10612 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10613 w <<= 1)
10615 unsigned HOST_WIDE_INT mask
10616 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10617 if (((lo1 | lo2) & mask) == mask
10618 && (lo1 & ~mask) == 0 && hi1 == 0)
10620 hi3 = 0;
10621 lo3 = mask;
10622 break;
10625 if (hi3 != hi1 || lo3 != lo1)
10626 return fold_build2 (BIT_IOR_EXPR, type,
10627 fold_build2 (BIT_AND_EXPR, type,
10628 TREE_OPERAND (arg0, 0),
10629 build_int_cst_wide (type,
10630 lo3, hi3)),
10631 arg1);
10634 /* (X & Y) | Y is (X, Y). */
10635 if (TREE_CODE (arg0) == BIT_AND_EXPR
10636 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10637 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10638 /* (X & Y) | X is (Y, X). */
10639 if (TREE_CODE (arg0) == BIT_AND_EXPR
10640 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10641 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10642 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10643 /* X | (X & Y) is (Y, X). */
10644 if (TREE_CODE (arg1) == BIT_AND_EXPR
10645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10646 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10647 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10648 /* X | (Y & X) is (Y, X). */
10649 if (TREE_CODE (arg1) == BIT_AND_EXPR
10650 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10651 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10652 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10654 t1 = distribute_bit_expr (code, type, arg0, arg1);
10655 if (t1 != NULL_TREE)
10656 return t1;
10658 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10660 This results in more efficient code for machines without a NAND
10661 instruction. Combine will canonicalize to the first form
10662 which will allow use of NAND instructions provided by the
10663 backend if they exist. */
10664 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10665 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10667 return fold_build1 (BIT_NOT_EXPR, type,
10668 build2 (BIT_AND_EXPR, type,
10669 TREE_OPERAND (arg0, 0),
10670 TREE_OPERAND (arg1, 0)));
10673 /* See if this can be simplified into a rotate first. If that
10674 is unsuccessful continue in the association code. */
10675 goto bit_rotate;
10677 case BIT_XOR_EXPR:
10678 if (integer_zerop (arg1))
10679 return non_lvalue (fold_convert (type, arg0));
10680 if (integer_all_onesp (arg1))
10681 return fold_build1 (BIT_NOT_EXPR, type, op0);
10682 if (operand_equal_p (arg0, arg1, 0))
10683 return omit_one_operand (type, integer_zero_node, arg0);
10685 /* ~X ^ X is -1. */
10686 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10689 t1 = fold_convert (type, integer_zero_node);
10690 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10691 return omit_one_operand (type, t1, arg1);
10694 /* X ^ ~X is -1. */
10695 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10696 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10698 t1 = fold_convert (type, integer_zero_node);
10699 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10700 return omit_one_operand (type, t1, arg0);
10703 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10704 with a constant, and the two constants have no bits in common,
10705 we should treat this as a BIT_IOR_EXPR since this may produce more
10706 simplifications. */
10707 if (TREE_CODE (arg0) == BIT_AND_EXPR
10708 && TREE_CODE (arg1) == BIT_AND_EXPR
10709 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10710 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10711 && integer_zerop (const_binop (BIT_AND_EXPR,
10712 TREE_OPERAND (arg0, 1),
10713 TREE_OPERAND (arg1, 1), 0)))
10715 code = BIT_IOR_EXPR;
10716 goto bit_ior;
10719 /* (X | Y) ^ X -> Y & ~ X*/
10720 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10721 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10723 tree t2 = TREE_OPERAND (arg0, 1);
10724 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10725 arg1);
10726 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10727 fold_convert (type, t1));
10728 return t1;
10731 /* (Y | X) ^ X -> Y & ~ X*/
10732 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10733 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10735 tree t2 = TREE_OPERAND (arg0, 0);
10736 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10737 arg1);
10738 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10739 fold_convert (type, t1));
10740 return t1;
10743 /* X ^ (X | Y) -> Y & ~ X*/
10744 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10745 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10747 tree t2 = TREE_OPERAND (arg1, 1);
10748 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10749 arg0);
10750 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10751 fold_convert (type, t1));
10752 return t1;
10755 /* X ^ (Y | X) -> Y & ~ X*/
10756 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10757 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10759 tree t2 = TREE_OPERAND (arg1, 0);
10760 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10761 arg0);
10762 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10763 fold_convert (type, t1));
10764 return t1;
10767 /* Convert ~X ^ ~Y to X ^ Y. */
10768 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10769 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10770 return fold_build2 (code, type,
10771 fold_convert (type, TREE_OPERAND (arg0, 0)),
10772 fold_convert (type, TREE_OPERAND (arg1, 0)));
10774 /* Convert ~X ^ C to X ^ ~C. */
10775 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10776 && TREE_CODE (arg1) == INTEGER_CST)
10777 return fold_build2 (code, type,
10778 fold_convert (type, TREE_OPERAND (arg0, 0)),
10779 fold_build1 (BIT_NOT_EXPR, type, arg1));
10781 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10782 if (TREE_CODE (arg0) == BIT_AND_EXPR
10783 && integer_onep (TREE_OPERAND (arg0, 1))
10784 && integer_onep (arg1))
10785 return fold_build2 (EQ_EXPR, type, arg0,
10786 build_int_cst (TREE_TYPE (arg0), 0));
10788 /* Fold (X & Y) ^ Y as ~X & Y. */
10789 if (TREE_CODE (arg0) == BIT_AND_EXPR
10790 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10792 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10793 return fold_build2 (BIT_AND_EXPR, type,
10794 fold_build1 (BIT_NOT_EXPR, type, tem),
10795 fold_convert (type, arg1));
10797 /* Fold (X & Y) ^ X as ~Y & X. */
10798 if (TREE_CODE (arg0) == BIT_AND_EXPR
10799 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10800 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10802 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10803 return fold_build2 (BIT_AND_EXPR, type,
10804 fold_build1 (BIT_NOT_EXPR, type, tem),
10805 fold_convert (type, arg1));
10807 /* Fold X ^ (X & Y) as X & ~Y. */
10808 if (TREE_CODE (arg1) == BIT_AND_EXPR
10809 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10811 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10812 return fold_build2 (BIT_AND_EXPR, type,
10813 fold_convert (type, arg0),
10814 fold_build1 (BIT_NOT_EXPR, type, tem));
10816 /* Fold X ^ (Y & X) as ~Y & X. */
10817 if (TREE_CODE (arg1) == BIT_AND_EXPR
10818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10819 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10821 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10822 return fold_build2 (BIT_AND_EXPR, type,
10823 fold_build1 (BIT_NOT_EXPR, type, tem),
10824 fold_convert (type, arg0));
10827 /* See if this can be simplified into a rotate first. If that
10828 is unsuccessful continue in the association code. */
10829 goto bit_rotate;
10831 case BIT_AND_EXPR:
10832 if (integer_all_onesp (arg1))
10833 return non_lvalue (fold_convert (type, arg0));
10834 if (integer_zerop (arg1))
10835 return omit_one_operand (type, arg1, arg0);
10836 if (operand_equal_p (arg0, arg1, 0))
10837 return non_lvalue (fold_convert (type, arg0));
10839 /* ~X & X is always zero. */
10840 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10841 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10842 return omit_one_operand (type, integer_zero_node, arg1);
10844 /* X & ~X is always zero. */
10845 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10846 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10847 return omit_one_operand (type, integer_zero_node, arg0);
10849 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10850 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10851 && TREE_CODE (arg1) == INTEGER_CST
10852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10854 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10855 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10856 TREE_OPERAND (arg0, 0), tmp1);
10857 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10858 TREE_OPERAND (arg0, 1), tmp1);
10859 return fold_convert (type,
10860 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10861 tmp2, tmp3));
10864 /* (X | Y) & Y is (X, Y). */
10865 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10866 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10867 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10868 /* (X | Y) & X is (Y, X). */
10869 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10870 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10871 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10872 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10873 /* X & (X | Y) is (Y, X). */
10874 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10875 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10876 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10877 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10878 /* X & (Y | X) is (Y, X). */
10879 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10880 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10881 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10882 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10884 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10885 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10886 && integer_onep (TREE_OPERAND (arg0, 1))
10887 && integer_onep (arg1))
10889 tem = TREE_OPERAND (arg0, 0);
10890 return fold_build2 (EQ_EXPR, type,
10891 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10892 build_int_cst (TREE_TYPE (tem), 1)),
10893 build_int_cst (TREE_TYPE (tem), 0));
10895 /* Fold ~X & 1 as (X & 1) == 0. */
10896 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10897 && integer_onep (arg1))
10899 tem = TREE_OPERAND (arg0, 0);
10900 return fold_build2 (EQ_EXPR, type,
10901 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10902 build_int_cst (TREE_TYPE (tem), 1)),
10903 build_int_cst (TREE_TYPE (tem), 0));
10906 /* Fold (X ^ Y) & Y as ~X & Y. */
10907 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10908 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10910 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10911 return fold_build2 (BIT_AND_EXPR, type,
10912 fold_build1 (BIT_NOT_EXPR, type, tem),
10913 fold_convert (type, arg1));
10915 /* Fold (X ^ Y) & X as ~Y & X. */
10916 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10917 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10918 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10920 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10921 return fold_build2 (BIT_AND_EXPR, type,
10922 fold_build1 (BIT_NOT_EXPR, type, tem),
10923 fold_convert (type, arg1));
10925 /* Fold X & (X ^ Y) as X & ~Y. */
10926 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10927 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10929 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10930 return fold_build2 (BIT_AND_EXPR, type,
10931 fold_convert (type, arg0),
10932 fold_build1 (BIT_NOT_EXPR, type, tem));
10934 /* Fold X & (Y ^ X) as ~Y & X. */
10935 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10936 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10937 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10939 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10940 return fold_build2 (BIT_AND_EXPR, type,
10941 fold_build1 (BIT_NOT_EXPR, type, tem),
10942 fold_convert (type, arg0));
10945 t1 = distribute_bit_expr (code, type, arg0, arg1);
10946 if (t1 != NULL_TREE)
10947 return t1;
10948 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10949 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10950 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10952 unsigned int prec
10953 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10955 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10956 && (~TREE_INT_CST_LOW (arg1)
10957 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10958 return fold_convert (type, TREE_OPERAND (arg0, 0));
10961 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10963 This results in more efficient code for machines without a NOR
10964 instruction. Combine will canonicalize to the first form
10965 which will allow use of NOR instructions provided by the
10966 backend if they exist. */
10967 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10968 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10970 return fold_build1 (BIT_NOT_EXPR, type,
10971 build2 (BIT_IOR_EXPR, type,
10972 fold_convert (type,
10973 TREE_OPERAND (arg0, 0)),
10974 fold_convert (type,
10975 TREE_OPERAND (arg1, 0))));
10978 /* If arg0 is derived from the address of an object or function, we may
10979 be able to fold this expression using the object or function's
10980 alignment. */
10981 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10983 unsigned HOST_WIDE_INT modulus, residue;
10984 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10986 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10988 /* This works because modulus is a power of 2. If this weren't the
10989 case, we'd have to replace it by its greatest power-of-2
10990 divisor: modulus & -modulus. */
10991 if (low < modulus)
10992 return build_int_cst (type, residue & low);
10995 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10996 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10997 if the new mask might be further optimized. */
10998 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10999 || TREE_CODE (arg0) == RSHIFT_EXPR)
11000 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11001 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11002 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11003 < TYPE_PRECISION (TREE_TYPE (arg0))
11004 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11005 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11007 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11008 unsigned HOST_WIDE_INT mask
11009 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11010 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11011 tree shift_type = TREE_TYPE (arg0);
11013 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11014 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11015 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11016 && TYPE_PRECISION (TREE_TYPE (arg0))
11017 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11019 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11020 tree arg00 = TREE_OPERAND (arg0, 0);
11021 /* See if more bits can be proven as zero because of
11022 zero extension. */
11023 if (TREE_CODE (arg00) == NOP_EXPR
11024 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11026 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11027 if (TYPE_PRECISION (inner_type)
11028 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11029 && TYPE_PRECISION (inner_type) < prec)
11031 prec = TYPE_PRECISION (inner_type);
11032 /* See if we can shorten the right shift. */
11033 if (shiftc < prec)
11034 shift_type = inner_type;
11037 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11038 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11039 zerobits <<= prec - shiftc;
11040 /* For arithmetic shift if sign bit could be set, zerobits
11041 can contain actually sign bits, so no transformation is
11042 possible, unless MASK masks them all away. In that
11043 case the shift needs to be converted into logical shift. */
11044 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11045 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11047 if ((mask & zerobits) == 0)
11048 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11049 else
11050 zerobits = 0;
11054 /* ((X << 16) & 0xff00) is (X, 0). */
11055 if ((mask & zerobits) == mask)
11056 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11058 newmask = mask | zerobits;
11059 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11061 unsigned int prec;
11063 /* Only do the transformation if NEWMASK is some integer
11064 mode's mask. */
11065 for (prec = BITS_PER_UNIT;
11066 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11067 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11068 break;
11069 if (prec < HOST_BITS_PER_WIDE_INT
11070 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11072 if (shift_type != TREE_TYPE (arg0))
11074 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11075 fold_convert (shift_type,
11076 TREE_OPERAND (arg0, 0)),
11077 TREE_OPERAND (arg0, 1));
11078 tem = fold_convert (type, tem);
11080 else
11081 tem = op0;
11082 return fold_build2 (BIT_AND_EXPR, type, tem,
11083 build_int_cst_type (TREE_TYPE (op1),
11084 newmask));
11089 goto associate;
11091 case RDIV_EXPR:
11092 /* Don't touch a floating-point divide by zero unless the mode
11093 of the constant can represent infinity. */
11094 if (TREE_CODE (arg1) == REAL_CST
11095 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11096 && real_zerop (arg1))
11097 return NULL_TREE;
11099 /* Optimize A / A to 1.0 if we don't care about
11100 NaNs or Infinities. Skip the transformation
11101 for non-real operands. */
11102 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11103 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11104 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11105 && operand_equal_p (arg0, arg1, 0))
11107 tree r = build_real (TREE_TYPE (arg0), dconst1);
11109 return omit_two_operands (type, r, arg0, arg1);
11112 /* The complex version of the above A / A optimization. */
11113 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11114 && operand_equal_p (arg0, arg1, 0))
11116 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11117 if (! HONOR_NANS (TYPE_MODE (elem_type))
11118 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11120 tree r = build_real (elem_type, dconst1);
11121 /* omit_two_operands will call fold_convert for us. */
11122 return omit_two_operands (type, r, arg0, arg1);
11126 /* (-A) / (-B) -> A / B */
11127 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11128 return fold_build2 (RDIV_EXPR, type,
11129 TREE_OPERAND (arg0, 0),
11130 negate_expr (arg1));
11131 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11132 return fold_build2 (RDIV_EXPR, type,
11133 negate_expr (arg0),
11134 TREE_OPERAND (arg1, 0));
11136 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11137 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11138 && real_onep (arg1))
11139 return non_lvalue (fold_convert (type, arg0));
11141 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11142 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11143 && real_minus_onep (arg1))
11144 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11146 /* If ARG1 is a constant, we can convert this to a multiply by the
11147 reciprocal. This does not have the same rounding properties,
11148 so only do this if -freciprocal-math. We can actually
11149 always safely do it if ARG1 is a power of two, but it's hard to
11150 tell if it is or not in a portable manner. */
11151 if (TREE_CODE (arg1) == REAL_CST)
11153 if (flag_reciprocal_math
11154 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11155 arg1, 0)))
11156 return fold_build2 (MULT_EXPR, type, arg0, tem);
11157 /* Find the reciprocal if optimizing and the result is exact. */
11158 if (optimize)
11160 REAL_VALUE_TYPE r;
11161 r = TREE_REAL_CST (arg1);
11162 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11164 tem = build_real (type, r);
11165 return fold_build2 (MULT_EXPR, type,
11166 fold_convert (type, arg0), tem);
11170 /* Convert A/B/C to A/(B*C). */
11171 if (flag_reciprocal_math
11172 && TREE_CODE (arg0) == RDIV_EXPR)
11173 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11174 fold_build2 (MULT_EXPR, type,
11175 TREE_OPERAND (arg0, 1), arg1));
11177 /* Convert A/(B/C) to (A/B)*C. */
11178 if (flag_reciprocal_math
11179 && TREE_CODE (arg1) == RDIV_EXPR)
11180 return fold_build2 (MULT_EXPR, type,
11181 fold_build2 (RDIV_EXPR, type, arg0,
11182 TREE_OPERAND (arg1, 0)),
11183 TREE_OPERAND (arg1, 1));
11185 /* Convert C1/(X*C2) into (C1/C2)/X. */
11186 if (flag_reciprocal_math
11187 && TREE_CODE (arg1) == MULT_EXPR
11188 && TREE_CODE (arg0) == REAL_CST
11189 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11191 tree tem = const_binop (RDIV_EXPR, arg0,
11192 TREE_OPERAND (arg1, 1), 0);
11193 if (tem)
11194 return fold_build2 (RDIV_EXPR, type, tem,
11195 TREE_OPERAND (arg1, 0));
11198 if (flag_unsafe_math_optimizations)
11200 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11201 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11203 /* Optimize sin(x)/cos(x) as tan(x). */
11204 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11205 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11206 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11207 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11208 CALL_EXPR_ARG (arg1, 0), 0))
11210 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11212 if (tanfn != NULL_TREE)
11213 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11216 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11217 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11218 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11219 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11220 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11221 CALL_EXPR_ARG (arg1, 0), 0))
11223 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11225 if (tanfn != NULL_TREE)
11227 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11228 return fold_build2 (RDIV_EXPR, type,
11229 build_real (type, dconst1), tmp);
11233 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11234 NaNs or Infinities. */
11235 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11236 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11237 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11239 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11240 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11242 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11243 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11244 && operand_equal_p (arg00, arg01, 0))
11246 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11248 if (cosfn != NULL_TREE)
11249 return build_call_expr (cosfn, 1, arg00);
11253 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11254 NaNs or Infinities. */
11255 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11256 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11257 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11259 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11260 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11262 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11263 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11264 && operand_equal_p (arg00, arg01, 0))
11266 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11268 if (cosfn != NULL_TREE)
11270 tree tmp = build_call_expr (cosfn, 1, arg00);
11271 return fold_build2 (RDIV_EXPR, type,
11272 build_real (type, dconst1),
11273 tmp);
11278 /* Optimize pow(x,c)/x as pow(x,c-1). */
11279 if (fcode0 == BUILT_IN_POW
11280 || fcode0 == BUILT_IN_POWF
11281 || fcode0 == BUILT_IN_POWL)
11283 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11284 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11285 if (TREE_CODE (arg01) == REAL_CST
11286 && !TREE_OVERFLOW (arg01)
11287 && operand_equal_p (arg1, arg00, 0))
11289 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11290 REAL_VALUE_TYPE c;
11291 tree arg;
11293 c = TREE_REAL_CST (arg01);
11294 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11295 arg = build_real (type, c);
11296 return build_call_expr (powfn, 2, arg1, arg);
11300 /* Optimize a/root(b/c) into a*root(c/b). */
11301 if (BUILTIN_ROOT_P (fcode1))
11303 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11305 if (TREE_CODE (rootarg) == RDIV_EXPR)
11307 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11308 tree b = TREE_OPERAND (rootarg, 0);
11309 tree c = TREE_OPERAND (rootarg, 1);
11311 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11313 tmp = build_call_expr (rootfn, 1, tmp);
11314 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11318 /* Optimize x/expN(y) into x*expN(-y). */
11319 if (BUILTIN_EXPONENT_P (fcode1))
11321 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11322 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11323 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11324 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11327 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11328 if (fcode1 == BUILT_IN_POW
11329 || fcode1 == BUILT_IN_POWF
11330 || fcode1 == BUILT_IN_POWL)
11332 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11333 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11334 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11335 tree neg11 = fold_convert (type, negate_expr (arg11));
11336 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11337 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11340 return NULL_TREE;
11342 case TRUNC_DIV_EXPR:
11343 case FLOOR_DIV_EXPR:
11344 /* Simplify A / (B << N) where A and B are positive and B is
11345 a power of 2, to A >> (N + log2(B)). */
11346 strict_overflow_p = false;
11347 if (TREE_CODE (arg1) == LSHIFT_EXPR
11348 && (TYPE_UNSIGNED (type)
11349 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11351 tree sval = TREE_OPERAND (arg1, 0);
11352 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11354 tree sh_cnt = TREE_OPERAND (arg1, 1);
11355 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11357 if (strict_overflow_p)
11358 fold_overflow_warning (("assuming signed overflow does not "
11359 "occur when simplifying A / (B << N)"),
11360 WARN_STRICT_OVERFLOW_MISC);
11362 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11363 sh_cnt, build_int_cst (NULL_TREE, pow2));
11364 return fold_build2 (RSHIFT_EXPR, type,
11365 fold_convert (type, arg0), sh_cnt);
11369 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11370 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11371 if (INTEGRAL_TYPE_P (type)
11372 && TYPE_UNSIGNED (type)
11373 && code == FLOOR_DIV_EXPR)
11374 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11376 /* Fall thru */
11378 case ROUND_DIV_EXPR:
11379 case CEIL_DIV_EXPR:
11380 case EXACT_DIV_EXPR:
11381 if (integer_onep (arg1))
11382 return non_lvalue (fold_convert (type, arg0));
11383 if (integer_zerop (arg1))
11384 return NULL_TREE;
11385 /* X / -1 is -X. */
11386 if (!TYPE_UNSIGNED (type)
11387 && TREE_CODE (arg1) == INTEGER_CST
11388 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11389 && TREE_INT_CST_HIGH (arg1) == -1)
11390 return fold_convert (type, negate_expr (arg0));
11392 /* Convert -A / -B to A / B when the type is signed and overflow is
11393 undefined. */
11394 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11395 && TREE_CODE (arg0) == NEGATE_EXPR
11396 && negate_expr_p (arg1))
11398 if (INTEGRAL_TYPE_P (type))
11399 fold_overflow_warning (("assuming signed overflow does not occur "
11400 "when distributing negation across "
11401 "division"),
11402 WARN_STRICT_OVERFLOW_MISC);
11403 return fold_build2 (code, type,
11404 fold_convert (type, TREE_OPERAND (arg0, 0)),
11405 negate_expr (arg1));
11407 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11408 && TREE_CODE (arg1) == NEGATE_EXPR
11409 && negate_expr_p (arg0))
11411 if (INTEGRAL_TYPE_P (type))
11412 fold_overflow_warning (("assuming signed overflow does not occur "
11413 "when distributing negation across "
11414 "division"),
11415 WARN_STRICT_OVERFLOW_MISC);
11416 return fold_build2 (code, type, negate_expr (arg0),
11417 TREE_OPERAND (arg1, 0));
11420 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11421 operation, EXACT_DIV_EXPR.
11423 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11424 At one time others generated faster code, it's not clear if they do
11425 after the last round to changes to the DIV code in expmed.c. */
11426 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11427 && multiple_of_p (type, arg0, arg1))
11428 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11430 strict_overflow_p = false;
11431 if (TREE_CODE (arg1) == INTEGER_CST
11432 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11433 &strict_overflow_p)))
11435 if (strict_overflow_p)
11436 fold_overflow_warning (("assuming signed overflow does not occur "
11437 "when simplifying division"),
11438 WARN_STRICT_OVERFLOW_MISC);
11439 return fold_convert (type, tem);
11442 return NULL_TREE;
11444 case CEIL_MOD_EXPR:
11445 case FLOOR_MOD_EXPR:
11446 case ROUND_MOD_EXPR:
11447 case TRUNC_MOD_EXPR:
11448 /* X % 1 is always zero, but be sure to preserve any side
11449 effects in X. */
11450 if (integer_onep (arg1))
11451 return omit_one_operand (type, integer_zero_node, arg0);
11453 /* X % 0, return X % 0 unchanged so that we can get the
11454 proper warnings and errors. */
11455 if (integer_zerop (arg1))
11456 return NULL_TREE;
11458 /* 0 % X is always zero, but be sure to preserve any side
11459 effects in X. Place this after checking for X == 0. */
11460 if (integer_zerop (arg0))
11461 return omit_one_operand (type, integer_zero_node, arg1);
11463 /* X % -1 is zero. */
11464 if (!TYPE_UNSIGNED (type)
11465 && TREE_CODE (arg1) == INTEGER_CST
11466 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11467 && TREE_INT_CST_HIGH (arg1) == -1)
11468 return omit_one_operand (type, integer_zero_node, arg0);
11470 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11471 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11472 strict_overflow_p = false;
11473 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11474 && (TYPE_UNSIGNED (type)
11475 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11477 tree c = arg1;
11478 /* Also optimize A % (C << N) where C is a power of 2,
11479 to A & ((C << N) - 1). */
11480 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11481 c = TREE_OPERAND (arg1, 0);
11483 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11485 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11486 build_int_cst (TREE_TYPE (arg1), 1));
11487 if (strict_overflow_p)
11488 fold_overflow_warning (("assuming signed overflow does not "
11489 "occur when simplifying "
11490 "X % (power of two)"),
11491 WARN_STRICT_OVERFLOW_MISC);
11492 return fold_build2 (BIT_AND_EXPR, type,
11493 fold_convert (type, arg0),
11494 fold_convert (type, mask));
11498 /* X % -C is the same as X % C. */
11499 if (code == TRUNC_MOD_EXPR
11500 && !TYPE_UNSIGNED (type)
11501 && TREE_CODE (arg1) == INTEGER_CST
11502 && !TREE_OVERFLOW (arg1)
11503 && TREE_INT_CST_HIGH (arg1) < 0
11504 && !TYPE_OVERFLOW_TRAPS (type)
11505 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11506 && !sign_bit_p (arg1, arg1))
11507 return fold_build2 (code, type, fold_convert (type, arg0),
11508 fold_convert (type, negate_expr (arg1)));
11510 /* X % -Y is the same as X % Y. */
11511 if (code == TRUNC_MOD_EXPR
11512 && !TYPE_UNSIGNED (type)
11513 && TREE_CODE (arg1) == NEGATE_EXPR
11514 && !TYPE_OVERFLOW_TRAPS (type))
11515 return fold_build2 (code, type, fold_convert (type, arg0),
11516 fold_convert (type, TREE_OPERAND (arg1, 0)));
11518 if (TREE_CODE (arg1) == INTEGER_CST
11519 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11520 &strict_overflow_p)))
11522 if (strict_overflow_p)
11523 fold_overflow_warning (("assuming signed overflow does not occur "
11524 "when simplifying modulos"),
11525 WARN_STRICT_OVERFLOW_MISC);
11526 return fold_convert (type, tem);
11529 return NULL_TREE;
11531 case LROTATE_EXPR:
11532 case RROTATE_EXPR:
11533 if (integer_all_onesp (arg0))
11534 return omit_one_operand (type, arg0, arg1);
11535 goto shift;
11537 case RSHIFT_EXPR:
11538 /* Optimize -1 >> x for arithmetic right shifts. */
11539 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11540 return omit_one_operand (type, arg0, arg1);
11541 /* ... fall through ... */
11543 case LSHIFT_EXPR:
11544 shift:
11545 if (integer_zerop (arg1))
11546 return non_lvalue (fold_convert (type, arg0));
11547 if (integer_zerop (arg0))
11548 return omit_one_operand (type, arg0, arg1);
11550 /* Since negative shift count is not well-defined,
11551 don't try to compute it in the compiler. */
11552 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11553 return NULL_TREE;
11555 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11556 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11557 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11558 && host_integerp (TREE_OPERAND (arg0, 1), false)
11559 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11561 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11562 + TREE_INT_CST_LOW (arg1));
11564 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11565 being well defined. */
11566 if (low >= TYPE_PRECISION (type))
11568 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11569 low = low % TYPE_PRECISION (type);
11570 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11571 return build_int_cst (type, 0);
11572 else
11573 low = TYPE_PRECISION (type) - 1;
11576 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11577 build_int_cst (type, low));
11580 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11581 into x & ((unsigned)-1 >> c) for unsigned types. */
11582 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11583 || (TYPE_UNSIGNED (type)
11584 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11585 && host_integerp (arg1, false)
11586 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11587 && host_integerp (TREE_OPERAND (arg0, 1), false)
11588 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11590 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11591 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11592 tree lshift;
11593 tree arg00;
11595 if (low0 == low1)
11597 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11599 lshift = build_int_cst (type, -1);
11600 lshift = int_const_binop (code, lshift, arg1, 0);
11602 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11606 /* Rewrite an LROTATE_EXPR by a constant into an
11607 RROTATE_EXPR by a new constant. */
11608 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11610 tree tem = build_int_cst (TREE_TYPE (arg1),
11611 GET_MODE_BITSIZE (TYPE_MODE (type)));
11612 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11613 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11616 /* If we have a rotate of a bit operation with the rotate count and
11617 the second operand of the bit operation both constant,
11618 permute the two operations. */
11619 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11620 && (TREE_CODE (arg0) == BIT_AND_EXPR
11621 || TREE_CODE (arg0) == BIT_IOR_EXPR
11622 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11623 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11624 return fold_build2 (TREE_CODE (arg0), type,
11625 fold_build2 (code, type,
11626 TREE_OPERAND (arg0, 0), arg1),
11627 fold_build2 (code, type,
11628 TREE_OPERAND (arg0, 1), arg1));
11630 /* Two consecutive rotates adding up to the width of the mode can
11631 be ignored. */
11632 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11633 && TREE_CODE (arg0) == RROTATE_EXPR
11634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11635 && TREE_INT_CST_HIGH (arg1) == 0
11636 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11637 && ((TREE_INT_CST_LOW (arg1)
11638 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11639 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
11640 return TREE_OPERAND (arg0, 0);
11642 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11643 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11644 if the latter can be further optimized. */
11645 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11646 && TREE_CODE (arg0) == BIT_AND_EXPR
11647 && TREE_CODE (arg1) == INTEGER_CST
11648 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11650 tree mask = fold_build2 (code, type,
11651 fold_convert (type, TREE_OPERAND (arg0, 1)),
11652 arg1);
11653 tree shift = fold_build2 (code, type,
11654 fold_convert (type, TREE_OPERAND (arg0, 0)),
11655 arg1);
11656 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11657 if (tem)
11658 return tem;
11661 return NULL_TREE;
11663 case MIN_EXPR:
11664 if (operand_equal_p (arg0, arg1, 0))
11665 return omit_one_operand (type, arg0, arg1);
11666 if (INTEGRAL_TYPE_P (type)
11667 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11668 return omit_one_operand (type, arg1, arg0);
11669 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11670 if (tem)
11671 return tem;
11672 goto associate;
11674 case MAX_EXPR:
11675 if (operand_equal_p (arg0, arg1, 0))
11676 return omit_one_operand (type, arg0, arg1);
11677 if (INTEGRAL_TYPE_P (type)
11678 && TYPE_MAX_VALUE (type)
11679 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11680 return omit_one_operand (type, arg1, arg0);
11681 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11682 if (tem)
11683 return tem;
11684 goto associate;
11686 case TRUTH_ANDIF_EXPR:
11687 /* Note that the operands of this must be ints
11688 and their values must be 0 or 1.
11689 ("true" is a fixed value perhaps depending on the language.) */
11690 /* If first arg is constant zero, return it. */
11691 if (integer_zerop (arg0))
11692 return fold_convert (type, arg0);
11693 case TRUTH_AND_EXPR:
11694 /* If either arg is constant true, drop it. */
11695 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11696 return non_lvalue (fold_convert (type, arg1));
11697 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11698 /* Preserve sequence points. */
11699 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11700 return non_lvalue (fold_convert (type, arg0));
11701 /* If second arg is constant zero, result is zero, but first arg
11702 must be evaluated. */
11703 if (integer_zerop (arg1))
11704 return omit_one_operand (type, arg1, arg0);
11705 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11706 case will be handled here. */
11707 if (integer_zerop (arg0))
11708 return omit_one_operand (type, arg0, arg1);
11710 /* !X && X is always false. */
11711 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11712 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11713 return omit_one_operand (type, integer_zero_node, arg1);
11714 /* X && !X is always false. */
11715 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11716 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11717 return omit_one_operand (type, integer_zero_node, arg0);
11719 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11720 means A >= Y && A != MAX, but in this case we know that
11721 A < X <= MAX. */
11723 if (!TREE_SIDE_EFFECTS (arg0)
11724 && !TREE_SIDE_EFFECTS (arg1))
11726 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11727 if (tem && !operand_equal_p (tem, arg0, 0))
11728 return fold_build2 (code, type, tem, arg1);
11730 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11731 if (tem && !operand_equal_p (tem, arg1, 0))
11732 return fold_build2 (code, type, arg0, tem);
11735 truth_andor:
11736 /* We only do these simplifications if we are optimizing. */
11737 if (!optimize)
11738 return NULL_TREE;
11740 /* Check for things like (A || B) && (A || C). We can convert this
11741 to A || (B && C). Note that either operator can be any of the four
11742 truth and/or operations and the transformation will still be
11743 valid. Also note that we only care about order for the
11744 ANDIF and ORIF operators. If B contains side effects, this
11745 might change the truth-value of A. */
11746 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11747 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11748 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11749 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11750 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11751 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11753 tree a00 = TREE_OPERAND (arg0, 0);
11754 tree a01 = TREE_OPERAND (arg0, 1);
11755 tree a10 = TREE_OPERAND (arg1, 0);
11756 tree a11 = TREE_OPERAND (arg1, 1);
11757 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11758 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11759 && (code == TRUTH_AND_EXPR
11760 || code == TRUTH_OR_EXPR));
11762 if (operand_equal_p (a00, a10, 0))
11763 return fold_build2 (TREE_CODE (arg0), type, a00,
11764 fold_build2 (code, type, a01, a11));
11765 else if (commutative && operand_equal_p (a00, a11, 0))
11766 return fold_build2 (TREE_CODE (arg0), type, a00,
11767 fold_build2 (code, type, a01, a10));
11768 else if (commutative && operand_equal_p (a01, a10, 0))
11769 return fold_build2 (TREE_CODE (arg0), type, a01,
11770 fold_build2 (code, type, a00, a11));
11772 /* This case if tricky because we must either have commutative
11773 operators or else A10 must not have side-effects. */
11775 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11776 && operand_equal_p (a01, a11, 0))
11777 return fold_build2 (TREE_CODE (arg0), type,
11778 fold_build2 (code, type, a00, a10),
11779 a01);
11782 /* See if we can build a range comparison. */
11783 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11784 return tem;
11786 /* Check for the possibility of merging component references. If our
11787 lhs is another similar operation, try to merge its rhs with our
11788 rhs. Then try to merge our lhs and rhs. */
11789 if (TREE_CODE (arg0) == code
11790 && 0 != (tem = fold_truthop (code, type,
11791 TREE_OPERAND (arg0, 1), arg1)))
11792 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11794 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11795 return tem;
11797 return NULL_TREE;
11799 case TRUTH_ORIF_EXPR:
11800 /* Note that the operands of this must be ints
11801 and their values must be 0 or true.
11802 ("true" is a fixed value perhaps depending on the language.) */
11803 /* If first arg is constant true, return it. */
11804 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11805 return fold_convert (type, arg0);
11806 case TRUTH_OR_EXPR:
11807 /* If either arg is constant zero, drop it. */
11808 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11809 return non_lvalue (fold_convert (type, arg1));
11810 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11811 /* Preserve sequence points. */
11812 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11813 return non_lvalue (fold_convert (type, arg0));
11814 /* If second arg is constant true, result is true, but we must
11815 evaluate first arg. */
11816 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11817 return omit_one_operand (type, arg1, arg0);
11818 /* Likewise for first arg, but note this only occurs here for
11819 TRUTH_OR_EXPR. */
11820 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11821 return omit_one_operand (type, arg0, arg1);
11823 /* !X || X is always true. */
11824 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11825 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11826 return omit_one_operand (type, integer_one_node, arg1);
11827 /* X || !X is always true. */
11828 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11829 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11830 return omit_one_operand (type, integer_one_node, arg0);
11832 goto truth_andor;
11834 case TRUTH_XOR_EXPR:
11835 /* If the second arg is constant zero, drop it. */
11836 if (integer_zerop (arg1))
11837 return non_lvalue (fold_convert (type, arg0));
11838 /* If the second arg is constant true, this is a logical inversion. */
11839 if (integer_onep (arg1))
11841 /* Only call invert_truthvalue if operand is a truth value. */
11842 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11843 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11844 else
11845 tem = invert_truthvalue (arg0);
11846 return non_lvalue (fold_convert (type, tem));
11848 /* Identical arguments cancel to zero. */
11849 if (operand_equal_p (arg0, arg1, 0))
11850 return omit_one_operand (type, integer_zero_node, arg0);
11852 /* !X ^ X is always true. */
11853 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11854 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11855 return omit_one_operand (type, integer_one_node, arg1);
11857 /* X ^ !X is always true. */
11858 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11859 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11860 return omit_one_operand (type, integer_one_node, arg0);
11862 return NULL_TREE;
11864 case EQ_EXPR:
11865 case NE_EXPR:
11866 tem = fold_comparison (code, type, op0, op1);
11867 if (tem != NULL_TREE)
11868 return tem;
11870 /* bool_var != 0 becomes bool_var. */
11871 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11872 && code == NE_EXPR)
11873 return non_lvalue (fold_convert (type, arg0));
11875 /* bool_var == 1 becomes bool_var. */
11876 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11877 && code == EQ_EXPR)
11878 return non_lvalue (fold_convert (type, arg0));
11880 /* bool_var != 1 becomes !bool_var. */
11881 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11882 && code == NE_EXPR)
11883 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11885 /* bool_var == 0 becomes !bool_var. */
11886 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11887 && code == EQ_EXPR)
11888 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11890 /* If this is an equality comparison of the address of two non-weak,
11891 unaliased symbols neither of which are extern (since we do not
11892 have access to attributes for externs), then we know the result. */
11893 if (TREE_CODE (arg0) == ADDR_EXPR
11894 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11895 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11896 && ! lookup_attribute ("alias",
11897 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11898 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11899 && TREE_CODE (arg1) == ADDR_EXPR
11900 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11901 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11902 && ! lookup_attribute ("alias",
11903 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11904 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11906 /* We know that we're looking at the address of two
11907 non-weak, unaliased, static _DECL nodes.
11909 It is both wasteful and incorrect to call operand_equal_p
11910 to compare the two ADDR_EXPR nodes. It is wasteful in that
11911 all we need to do is test pointer equality for the arguments
11912 to the two ADDR_EXPR nodes. It is incorrect to use
11913 operand_equal_p as that function is NOT equivalent to a
11914 C equality test. It can in fact return false for two
11915 objects which would test as equal using the C equality
11916 operator. */
11917 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11918 return constant_boolean_node (equal
11919 ? code == EQ_EXPR : code != EQ_EXPR,
11920 type);
11923 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11924 a MINUS_EXPR of a constant, we can convert it into a comparison with
11925 a revised constant as long as no overflow occurs. */
11926 if (TREE_CODE (arg1) == INTEGER_CST
11927 && (TREE_CODE (arg0) == PLUS_EXPR
11928 || TREE_CODE (arg0) == MINUS_EXPR)
11929 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11930 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11931 ? MINUS_EXPR : PLUS_EXPR,
11932 fold_convert (TREE_TYPE (arg0), arg1),
11933 TREE_OPERAND (arg0, 1), 0))
11934 && !TREE_OVERFLOW (tem))
11935 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11937 /* Similarly for a NEGATE_EXPR. */
11938 if (TREE_CODE (arg0) == NEGATE_EXPR
11939 && TREE_CODE (arg1) == INTEGER_CST
11940 && 0 != (tem = negate_expr (arg1))
11941 && TREE_CODE (tem) == INTEGER_CST
11942 && !TREE_OVERFLOW (tem))
11943 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11945 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11946 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11947 && TREE_CODE (arg1) == INTEGER_CST
11948 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11949 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11950 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11951 fold_convert (TREE_TYPE (arg0), arg1),
11952 TREE_OPERAND (arg0, 1)));
11954 /* Transform comparisons of the form X +- C CMP X. */
11955 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11956 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11957 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11958 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11959 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11961 tree cst = TREE_OPERAND (arg0, 1);
11963 if (code == EQ_EXPR
11964 && !integer_zerop (cst))
11965 return omit_two_operands (type, boolean_false_node,
11966 TREE_OPERAND (arg0, 0), arg1);
11967 else
11968 return omit_two_operands (type, boolean_true_node,
11969 TREE_OPERAND (arg0, 0), arg1);
11972 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11973 for !=. Don't do this for ordered comparisons due to overflow. */
11974 if (TREE_CODE (arg0) == MINUS_EXPR
11975 && integer_zerop (arg1))
11976 return fold_build2 (code, type,
11977 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11979 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11980 if (TREE_CODE (arg0) == ABS_EXPR
11981 && (integer_zerop (arg1) || real_zerop (arg1)))
11982 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11984 /* If this is an EQ or NE comparison with zero and ARG0 is
11985 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11986 two operations, but the latter can be done in one less insn
11987 on machines that have only two-operand insns or on which a
11988 constant cannot be the first operand. */
11989 if (TREE_CODE (arg0) == BIT_AND_EXPR
11990 && integer_zerop (arg1))
11992 tree arg00 = TREE_OPERAND (arg0, 0);
11993 tree arg01 = TREE_OPERAND (arg0, 1);
11994 if (TREE_CODE (arg00) == LSHIFT_EXPR
11995 && integer_onep (TREE_OPERAND (arg00, 0)))
11997 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11998 arg01, TREE_OPERAND (arg00, 1));
11999 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12000 build_int_cst (TREE_TYPE (arg0), 1));
12001 return fold_build2 (code, type,
12002 fold_convert (TREE_TYPE (arg1), tem), arg1);
12004 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12005 && integer_onep (TREE_OPERAND (arg01, 0)))
12007 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12008 arg00, TREE_OPERAND (arg01, 1));
12009 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12010 build_int_cst (TREE_TYPE (arg0), 1));
12011 return fold_build2 (code, type,
12012 fold_convert (TREE_TYPE (arg1), tem), arg1);
12016 /* If this is an NE or EQ comparison of zero against the result of a
12017 signed MOD operation whose second operand is a power of 2, make
12018 the MOD operation unsigned since it is simpler and equivalent. */
12019 if (integer_zerop (arg1)
12020 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12021 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12022 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12023 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12024 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12025 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12027 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12028 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12029 fold_convert (newtype,
12030 TREE_OPERAND (arg0, 0)),
12031 fold_convert (newtype,
12032 TREE_OPERAND (arg0, 1)));
12034 return fold_build2 (code, type, newmod,
12035 fold_convert (newtype, arg1));
12038 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12039 C1 is a valid shift constant, and C2 is a power of two, i.e.
12040 a single bit. */
12041 if (TREE_CODE (arg0) == BIT_AND_EXPR
12042 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12043 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12044 == INTEGER_CST
12045 && integer_pow2p (TREE_OPERAND (arg0, 1))
12046 && integer_zerop (arg1))
12048 tree itype = TREE_TYPE (arg0);
12049 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12050 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12052 /* Check for a valid shift count. */
12053 if (TREE_INT_CST_HIGH (arg001) == 0
12054 && TREE_INT_CST_LOW (arg001) < prec)
12056 tree arg01 = TREE_OPERAND (arg0, 1);
12057 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12058 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12059 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12060 can be rewritten as (X & (C2 << C1)) != 0. */
12061 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12063 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12064 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12065 return fold_build2 (code, type, tem, arg1);
12067 /* Otherwise, for signed (arithmetic) shifts,
12068 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12069 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12070 else if (!TYPE_UNSIGNED (itype))
12071 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12072 arg000, build_int_cst (itype, 0));
12073 /* Otherwise, of unsigned (logical) shifts,
12074 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12075 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12076 else
12077 return omit_one_operand (type,
12078 code == EQ_EXPR ? integer_one_node
12079 : integer_zero_node,
12080 arg000);
12084 /* If this is an NE comparison of zero with an AND of one, remove the
12085 comparison since the AND will give the correct value. */
12086 if (code == NE_EXPR
12087 && integer_zerop (arg1)
12088 && TREE_CODE (arg0) == BIT_AND_EXPR
12089 && integer_onep (TREE_OPERAND (arg0, 1)))
12090 return fold_convert (type, arg0);
12092 /* If we have (A & C) == C where C is a power of 2, convert this into
12093 (A & C) != 0. Similarly for NE_EXPR. */
12094 if (TREE_CODE (arg0) == BIT_AND_EXPR
12095 && integer_pow2p (TREE_OPERAND (arg0, 1))
12096 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12097 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12098 arg0, fold_convert (TREE_TYPE (arg0),
12099 integer_zero_node));
12101 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12102 bit, then fold the expression into A < 0 or A >= 0. */
12103 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12104 if (tem)
12105 return tem;
12107 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12108 Similarly for NE_EXPR. */
12109 if (TREE_CODE (arg0) == BIT_AND_EXPR
12110 && TREE_CODE (arg1) == INTEGER_CST
12111 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12113 tree notc = fold_build1 (BIT_NOT_EXPR,
12114 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12115 TREE_OPERAND (arg0, 1));
12116 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12117 arg1, notc);
12118 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12119 if (integer_nonzerop (dandnotc))
12120 return omit_one_operand (type, rslt, arg0);
12123 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12124 Similarly for NE_EXPR. */
12125 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12126 && TREE_CODE (arg1) == INTEGER_CST
12127 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12129 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12130 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12131 TREE_OPERAND (arg0, 1), notd);
12132 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12133 if (integer_nonzerop (candnotd))
12134 return omit_one_operand (type, rslt, arg0);
12137 /* If this is a comparison of a field, we may be able to simplify it. */
12138 if ((TREE_CODE (arg0) == COMPONENT_REF
12139 || TREE_CODE (arg0) == BIT_FIELD_REF)
12140 /* Handle the constant case even without -O
12141 to make sure the warnings are given. */
12142 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12144 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12145 if (t1)
12146 return t1;
12149 /* Optimize comparisons of strlen vs zero to a compare of the
12150 first character of the string vs zero. To wit,
12151 strlen(ptr) == 0 => *ptr == 0
12152 strlen(ptr) != 0 => *ptr != 0
12153 Other cases should reduce to one of these two (or a constant)
12154 due to the return value of strlen being unsigned. */
12155 if (TREE_CODE (arg0) == CALL_EXPR
12156 && integer_zerop (arg1))
12158 tree fndecl = get_callee_fndecl (arg0);
12160 if (fndecl
12161 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12162 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12163 && call_expr_nargs (arg0) == 1
12164 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12166 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12167 return fold_build2 (code, type, iref,
12168 build_int_cst (TREE_TYPE (iref), 0));
12172 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12173 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12174 if (TREE_CODE (arg0) == RSHIFT_EXPR
12175 && integer_zerop (arg1)
12176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12178 tree arg00 = TREE_OPERAND (arg0, 0);
12179 tree arg01 = TREE_OPERAND (arg0, 1);
12180 tree itype = TREE_TYPE (arg00);
12181 if (TREE_INT_CST_HIGH (arg01) == 0
12182 && TREE_INT_CST_LOW (arg01)
12183 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12185 if (TYPE_UNSIGNED (itype))
12187 itype = signed_type_for (itype);
12188 arg00 = fold_convert (itype, arg00);
12190 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12191 type, arg00, build_int_cst (itype, 0));
12195 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12196 if (integer_zerop (arg1)
12197 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12198 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12199 TREE_OPERAND (arg0, 1));
12201 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12202 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12203 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12204 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12205 build_int_cst (TREE_TYPE (arg1), 0));
12206 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12207 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12208 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12209 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12210 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12211 build_int_cst (TREE_TYPE (arg1), 0));
12213 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12214 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12215 && TREE_CODE (arg1) == INTEGER_CST
12216 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12217 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12218 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12219 TREE_OPERAND (arg0, 1), arg1));
12221 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12222 (X & C) == 0 when C is a single bit. */
12223 if (TREE_CODE (arg0) == BIT_AND_EXPR
12224 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12225 && integer_zerop (arg1)
12226 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12228 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12229 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12230 TREE_OPERAND (arg0, 1));
12231 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12232 type, tem, arg1);
12235 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12236 constant C is a power of two, i.e. a single bit. */
12237 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12238 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12239 && integer_zerop (arg1)
12240 && integer_pow2p (TREE_OPERAND (arg0, 1))
12241 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12242 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12244 tree arg00 = TREE_OPERAND (arg0, 0);
12245 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12246 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12249 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12250 when is C is a power of two, i.e. a single bit. */
12251 if (TREE_CODE (arg0) == BIT_AND_EXPR
12252 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12253 && integer_zerop (arg1)
12254 && integer_pow2p (TREE_OPERAND (arg0, 1))
12255 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12256 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12258 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12259 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12260 arg000, TREE_OPERAND (arg0, 1));
12261 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12262 tem, build_int_cst (TREE_TYPE (tem), 0));
12265 if (integer_zerop (arg1)
12266 && tree_expr_nonzero_p (arg0))
12268 tree res = constant_boolean_node (code==NE_EXPR, type);
12269 return omit_one_operand (type, res, arg0);
12272 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12273 if (TREE_CODE (arg0) == NEGATE_EXPR
12274 && TREE_CODE (arg1) == NEGATE_EXPR)
12275 return fold_build2 (code, type,
12276 TREE_OPERAND (arg0, 0),
12277 TREE_OPERAND (arg1, 0));
12279 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12280 if (TREE_CODE (arg0) == BIT_AND_EXPR
12281 && TREE_CODE (arg1) == BIT_AND_EXPR)
12283 tree arg00 = TREE_OPERAND (arg0, 0);
12284 tree arg01 = TREE_OPERAND (arg0, 1);
12285 tree arg10 = TREE_OPERAND (arg1, 0);
12286 tree arg11 = TREE_OPERAND (arg1, 1);
12287 tree itype = TREE_TYPE (arg0);
12289 if (operand_equal_p (arg01, arg11, 0))
12290 return fold_build2 (code, type,
12291 fold_build2 (BIT_AND_EXPR, itype,
12292 fold_build2 (BIT_XOR_EXPR, itype,
12293 arg00, arg10),
12294 arg01),
12295 build_int_cst (itype, 0));
12297 if (operand_equal_p (arg01, arg10, 0))
12298 return fold_build2 (code, type,
12299 fold_build2 (BIT_AND_EXPR, itype,
12300 fold_build2 (BIT_XOR_EXPR, itype,
12301 arg00, arg11),
12302 arg01),
12303 build_int_cst (itype, 0));
12305 if (operand_equal_p (arg00, arg11, 0))
12306 return fold_build2 (code, type,
12307 fold_build2 (BIT_AND_EXPR, itype,
12308 fold_build2 (BIT_XOR_EXPR, itype,
12309 arg01, arg10),
12310 arg00),
12311 build_int_cst (itype, 0));
12313 if (operand_equal_p (arg00, arg10, 0))
12314 return fold_build2 (code, type,
12315 fold_build2 (BIT_AND_EXPR, itype,
12316 fold_build2 (BIT_XOR_EXPR, itype,
12317 arg01, arg11),
12318 arg00),
12319 build_int_cst (itype, 0));
12322 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12323 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12325 tree arg00 = TREE_OPERAND (arg0, 0);
12326 tree arg01 = TREE_OPERAND (arg0, 1);
12327 tree arg10 = TREE_OPERAND (arg1, 0);
12328 tree arg11 = TREE_OPERAND (arg1, 1);
12329 tree itype = TREE_TYPE (arg0);
12331 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12332 operand_equal_p guarantees no side-effects so we don't need
12333 to use omit_one_operand on Z. */
12334 if (operand_equal_p (arg01, arg11, 0))
12335 return fold_build2 (code, type, arg00, arg10);
12336 if (operand_equal_p (arg01, arg10, 0))
12337 return fold_build2 (code, type, arg00, arg11);
12338 if (operand_equal_p (arg00, arg11, 0))
12339 return fold_build2 (code, type, arg01, arg10);
12340 if (operand_equal_p (arg00, arg10, 0))
12341 return fold_build2 (code, type, arg01, arg11);
12343 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12344 if (TREE_CODE (arg01) == INTEGER_CST
12345 && TREE_CODE (arg11) == INTEGER_CST)
12346 return fold_build2 (code, type,
12347 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12348 fold_build2 (BIT_XOR_EXPR, itype,
12349 arg01, arg11)),
12350 arg10);
12353 /* Attempt to simplify equality/inequality comparisons of complex
12354 values. Only lower the comparison if the result is known or
12355 can be simplified to a single scalar comparison. */
12356 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12357 || TREE_CODE (arg0) == COMPLEX_CST)
12358 && (TREE_CODE (arg1) == COMPLEX_EXPR
12359 || TREE_CODE (arg1) == COMPLEX_CST))
12361 tree real0, imag0, real1, imag1;
12362 tree rcond, icond;
12364 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12366 real0 = TREE_OPERAND (arg0, 0);
12367 imag0 = TREE_OPERAND (arg0, 1);
12369 else
12371 real0 = TREE_REALPART (arg0);
12372 imag0 = TREE_IMAGPART (arg0);
12375 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12377 real1 = TREE_OPERAND (arg1, 0);
12378 imag1 = TREE_OPERAND (arg1, 1);
12380 else
12382 real1 = TREE_REALPART (arg1);
12383 imag1 = TREE_IMAGPART (arg1);
12386 rcond = fold_binary (code, type, real0, real1);
12387 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12389 if (integer_zerop (rcond))
12391 if (code == EQ_EXPR)
12392 return omit_two_operands (type, boolean_false_node,
12393 imag0, imag1);
12394 return fold_build2 (NE_EXPR, type, imag0, imag1);
12396 else
12398 if (code == NE_EXPR)
12399 return omit_two_operands (type, boolean_true_node,
12400 imag0, imag1);
12401 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12405 icond = fold_binary (code, type, imag0, imag1);
12406 if (icond && TREE_CODE (icond) == INTEGER_CST)
12408 if (integer_zerop (icond))
12410 if (code == EQ_EXPR)
12411 return omit_two_operands (type, boolean_false_node,
12412 real0, real1);
12413 return fold_build2 (NE_EXPR, type, real0, real1);
12415 else
12417 if (code == NE_EXPR)
12418 return omit_two_operands (type, boolean_true_node,
12419 real0, real1);
12420 return fold_build2 (EQ_EXPR, type, real0, real1);
12425 return NULL_TREE;
12427 case LT_EXPR:
12428 case GT_EXPR:
12429 case LE_EXPR:
12430 case GE_EXPR:
12431 tem = fold_comparison (code, type, op0, op1);
12432 if (tem != NULL_TREE)
12433 return tem;
12435 /* Transform comparisons of the form X +- C CMP X. */
12436 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12437 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12438 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12439 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12440 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12441 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12443 tree arg01 = TREE_OPERAND (arg0, 1);
12444 enum tree_code code0 = TREE_CODE (arg0);
12445 int is_positive;
12447 if (TREE_CODE (arg01) == REAL_CST)
12448 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12449 else
12450 is_positive = tree_int_cst_sgn (arg01);
12452 /* (X - c) > X becomes false. */
12453 if (code == GT_EXPR
12454 && ((code0 == MINUS_EXPR && is_positive >= 0)
12455 || (code0 == PLUS_EXPR && is_positive <= 0)))
12457 if (TREE_CODE (arg01) == INTEGER_CST
12458 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12459 fold_overflow_warning (("assuming signed overflow does not "
12460 "occur when assuming that (X - c) > X "
12461 "is always false"),
12462 WARN_STRICT_OVERFLOW_ALL);
12463 return constant_boolean_node (0, type);
12466 /* Likewise (X + c) < X becomes false. */
12467 if (code == LT_EXPR
12468 && ((code0 == PLUS_EXPR && is_positive >= 0)
12469 || (code0 == MINUS_EXPR && is_positive <= 0)))
12471 if (TREE_CODE (arg01) == INTEGER_CST
12472 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12473 fold_overflow_warning (("assuming signed overflow does not "
12474 "occur when assuming that "
12475 "(X + c) < X is always false"),
12476 WARN_STRICT_OVERFLOW_ALL);
12477 return constant_boolean_node (0, type);
12480 /* Convert (X - c) <= X to true. */
12481 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12482 && code == LE_EXPR
12483 && ((code0 == MINUS_EXPR && is_positive >= 0)
12484 || (code0 == PLUS_EXPR && is_positive <= 0)))
12486 if (TREE_CODE (arg01) == INTEGER_CST
12487 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12488 fold_overflow_warning (("assuming signed overflow does not "
12489 "occur when assuming that "
12490 "(X - c) <= X is always true"),
12491 WARN_STRICT_OVERFLOW_ALL);
12492 return constant_boolean_node (1, type);
12495 /* Convert (X + c) >= X to true. */
12496 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12497 && code == GE_EXPR
12498 && ((code0 == PLUS_EXPR && is_positive >= 0)
12499 || (code0 == MINUS_EXPR && is_positive <= 0)))
12501 if (TREE_CODE (arg01) == INTEGER_CST
12502 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12503 fold_overflow_warning (("assuming signed overflow does not "
12504 "occur when assuming that "
12505 "(X + c) >= X is always true"),
12506 WARN_STRICT_OVERFLOW_ALL);
12507 return constant_boolean_node (1, type);
12510 if (TREE_CODE (arg01) == INTEGER_CST)
12512 /* Convert X + c > X and X - c < X to true for integers. */
12513 if (code == GT_EXPR
12514 && ((code0 == PLUS_EXPR && is_positive > 0)
12515 || (code0 == MINUS_EXPR && is_positive < 0)))
12517 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12518 fold_overflow_warning (("assuming signed overflow does "
12519 "not occur when assuming that "
12520 "(X + c) > X is always true"),
12521 WARN_STRICT_OVERFLOW_ALL);
12522 return constant_boolean_node (1, type);
12525 if (code == LT_EXPR
12526 && ((code0 == MINUS_EXPR && is_positive > 0)
12527 || (code0 == PLUS_EXPR && is_positive < 0)))
12529 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12530 fold_overflow_warning (("assuming signed overflow does "
12531 "not occur when assuming that "
12532 "(X - c) < X is always true"),
12533 WARN_STRICT_OVERFLOW_ALL);
12534 return constant_boolean_node (1, type);
12537 /* Convert X + c <= X and X - c >= X to false for integers. */
12538 if (code == LE_EXPR
12539 && ((code0 == PLUS_EXPR && is_positive > 0)
12540 || (code0 == MINUS_EXPR && is_positive < 0)))
12542 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12543 fold_overflow_warning (("assuming signed overflow does "
12544 "not occur when assuming that "
12545 "(X + c) <= X is always false"),
12546 WARN_STRICT_OVERFLOW_ALL);
12547 return constant_boolean_node (0, type);
12550 if (code == GE_EXPR
12551 && ((code0 == MINUS_EXPR && is_positive > 0)
12552 || (code0 == PLUS_EXPR && is_positive < 0)))
12554 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12555 fold_overflow_warning (("assuming signed overflow does "
12556 "not occur when assuming that "
12557 "(X - c) >= X is always false"),
12558 WARN_STRICT_OVERFLOW_ALL);
12559 return constant_boolean_node (0, type);
12564 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12565 This transformation affects the cases which are handled in later
12566 optimizations involving comparisons with non-negative constants. */
12567 if (TREE_CODE (arg1) == INTEGER_CST
12568 && TREE_CODE (arg0) != INTEGER_CST
12569 && tree_int_cst_sgn (arg1) > 0)
12571 if (code == GE_EXPR)
12573 arg1 = const_binop (MINUS_EXPR, arg1,
12574 build_int_cst (TREE_TYPE (arg1), 1), 0);
12575 return fold_build2 (GT_EXPR, type, arg0,
12576 fold_convert (TREE_TYPE (arg0), arg1));
12578 if (code == LT_EXPR)
12580 arg1 = const_binop (MINUS_EXPR, arg1,
12581 build_int_cst (TREE_TYPE (arg1), 1), 0);
12582 return fold_build2 (LE_EXPR, type, arg0,
12583 fold_convert (TREE_TYPE (arg0), arg1));
12587 /* Comparisons with the highest or lowest possible integer of
12588 the specified precision will have known values. */
12590 tree arg1_type = TREE_TYPE (arg1);
12591 unsigned int width = TYPE_PRECISION (arg1_type);
12593 if (TREE_CODE (arg1) == INTEGER_CST
12594 && !TREE_OVERFLOW (arg1)
12595 && width <= 2 * HOST_BITS_PER_WIDE_INT
12596 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12598 HOST_WIDE_INT signed_max_hi;
12599 unsigned HOST_WIDE_INT signed_max_lo;
12600 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12602 if (width <= HOST_BITS_PER_WIDE_INT)
12604 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12605 - 1;
12606 signed_max_hi = 0;
12607 max_hi = 0;
12609 if (TYPE_UNSIGNED (arg1_type))
12611 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12612 min_lo = 0;
12613 min_hi = 0;
12615 else
12617 max_lo = signed_max_lo;
12618 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12619 min_hi = -1;
12622 else
12624 width -= HOST_BITS_PER_WIDE_INT;
12625 signed_max_lo = -1;
12626 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12627 - 1;
12628 max_lo = -1;
12629 min_lo = 0;
12631 if (TYPE_UNSIGNED (arg1_type))
12633 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12634 min_hi = 0;
12636 else
12638 max_hi = signed_max_hi;
12639 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12643 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12644 && TREE_INT_CST_LOW (arg1) == max_lo)
12645 switch (code)
12647 case GT_EXPR:
12648 return omit_one_operand (type, integer_zero_node, arg0);
12650 case GE_EXPR:
12651 return fold_build2 (EQ_EXPR, type, op0, op1);
12653 case LE_EXPR:
12654 return omit_one_operand (type, integer_one_node, arg0);
12656 case LT_EXPR:
12657 return fold_build2 (NE_EXPR, type, op0, op1);
12659 /* The GE_EXPR and LT_EXPR cases above are not normally
12660 reached because of previous transformations. */
12662 default:
12663 break;
12665 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12666 == max_hi
12667 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12668 switch (code)
12670 case GT_EXPR:
12671 arg1 = const_binop (PLUS_EXPR, arg1,
12672 build_int_cst (TREE_TYPE (arg1), 1), 0);
12673 return fold_build2 (EQ_EXPR, type,
12674 fold_convert (TREE_TYPE (arg1), arg0),
12675 arg1);
12676 case LE_EXPR:
12677 arg1 = const_binop (PLUS_EXPR, arg1,
12678 build_int_cst (TREE_TYPE (arg1), 1), 0);
12679 return fold_build2 (NE_EXPR, type,
12680 fold_convert (TREE_TYPE (arg1), arg0),
12681 arg1);
12682 default:
12683 break;
12685 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12686 == min_hi
12687 && TREE_INT_CST_LOW (arg1) == min_lo)
12688 switch (code)
12690 case LT_EXPR:
12691 return omit_one_operand (type, integer_zero_node, arg0);
12693 case LE_EXPR:
12694 return fold_build2 (EQ_EXPR, type, op0, op1);
12696 case GE_EXPR:
12697 return omit_one_operand (type, integer_one_node, arg0);
12699 case GT_EXPR:
12700 return fold_build2 (NE_EXPR, type, op0, op1);
12702 default:
12703 break;
12705 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12706 == min_hi
12707 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12708 switch (code)
12710 case GE_EXPR:
12711 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12712 return fold_build2 (NE_EXPR, type,
12713 fold_convert (TREE_TYPE (arg1), arg0),
12714 arg1);
12715 case LT_EXPR:
12716 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12717 return fold_build2 (EQ_EXPR, type,
12718 fold_convert (TREE_TYPE (arg1), arg0),
12719 arg1);
12720 default:
12721 break;
12724 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12725 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12726 && TYPE_UNSIGNED (arg1_type)
12727 /* We will flip the signedness of the comparison operator
12728 associated with the mode of arg1, so the sign bit is
12729 specified by this mode. Check that arg1 is the signed
12730 max associated with this sign bit. */
12731 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12732 /* signed_type does not work on pointer types. */
12733 && INTEGRAL_TYPE_P (arg1_type))
12735 /* The following case also applies to X < signed_max+1
12736 and X >= signed_max+1 because previous transformations. */
12737 if (code == LE_EXPR || code == GT_EXPR)
12739 tree st;
12740 st = signed_type_for (TREE_TYPE (arg1));
12741 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12742 type, fold_convert (st, arg0),
12743 build_int_cst (st, 0));
12749 /* If we are comparing an ABS_EXPR with a constant, we can
12750 convert all the cases into explicit comparisons, but they may
12751 well not be faster than doing the ABS and one comparison.
12752 But ABS (X) <= C is a range comparison, which becomes a subtraction
12753 and a comparison, and is probably faster. */
12754 if (code == LE_EXPR
12755 && TREE_CODE (arg1) == INTEGER_CST
12756 && TREE_CODE (arg0) == ABS_EXPR
12757 && ! TREE_SIDE_EFFECTS (arg0)
12758 && (0 != (tem = negate_expr (arg1)))
12759 && TREE_CODE (tem) == INTEGER_CST
12760 && !TREE_OVERFLOW (tem))
12761 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12762 build2 (GE_EXPR, type,
12763 TREE_OPERAND (arg0, 0), tem),
12764 build2 (LE_EXPR, type,
12765 TREE_OPERAND (arg0, 0), arg1));
12767 /* Convert ABS_EXPR<x> >= 0 to true. */
12768 strict_overflow_p = false;
12769 if (code == GE_EXPR
12770 && (integer_zerop (arg1)
12771 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12772 && real_zerop (arg1)))
12773 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12775 if (strict_overflow_p)
12776 fold_overflow_warning (("assuming signed overflow does not occur "
12777 "when simplifying comparison of "
12778 "absolute value and zero"),
12779 WARN_STRICT_OVERFLOW_CONDITIONAL);
12780 return omit_one_operand (type, integer_one_node, arg0);
12783 /* Convert ABS_EXPR<x> < 0 to false. */
12784 strict_overflow_p = false;
12785 if (code == LT_EXPR
12786 && (integer_zerop (arg1) || real_zerop (arg1))
12787 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12789 if (strict_overflow_p)
12790 fold_overflow_warning (("assuming signed overflow does not occur "
12791 "when simplifying comparison of "
12792 "absolute value and zero"),
12793 WARN_STRICT_OVERFLOW_CONDITIONAL);
12794 return omit_one_operand (type, integer_zero_node, arg0);
12797 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12798 and similarly for >= into !=. */
12799 if ((code == LT_EXPR || code == GE_EXPR)
12800 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12801 && TREE_CODE (arg1) == LSHIFT_EXPR
12802 && integer_onep (TREE_OPERAND (arg1, 0)))
12803 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12804 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12805 TREE_OPERAND (arg1, 1)),
12806 build_int_cst (TREE_TYPE (arg0), 0));
12808 if ((code == LT_EXPR || code == GE_EXPR)
12809 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12810 && (TREE_CODE (arg1) == NOP_EXPR
12811 || TREE_CODE (arg1) == CONVERT_EXPR)
12812 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12813 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12814 return
12815 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12816 fold_convert (TREE_TYPE (arg0),
12817 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12818 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12819 1))),
12820 build_int_cst (TREE_TYPE (arg0), 0));
12822 return NULL_TREE;
12824 case UNORDERED_EXPR:
12825 case ORDERED_EXPR:
12826 case UNLT_EXPR:
12827 case UNLE_EXPR:
12828 case UNGT_EXPR:
12829 case UNGE_EXPR:
12830 case UNEQ_EXPR:
12831 case LTGT_EXPR:
12832 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12834 t1 = fold_relational_const (code, type, arg0, arg1);
12835 if (t1 != NULL_TREE)
12836 return t1;
12839 /* If the first operand is NaN, the result is constant. */
12840 if (TREE_CODE (arg0) == REAL_CST
12841 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12842 && (code != LTGT_EXPR || ! flag_trapping_math))
12844 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12845 ? integer_zero_node
12846 : integer_one_node;
12847 return omit_one_operand (type, t1, arg1);
12850 /* If the second operand is NaN, the result is constant. */
12851 if (TREE_CODE (arg1) == REAL_CST
12852 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12853 && (code != LTGT_EXPR || ! flag_trapping_math))
12855 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12856 ? integer_zero_node
12857 : integer_one_node;
12858 return omit_one_operand (type, t1, arg0);
12861 /* Simplify unordered comparison of something with itself. */
12862 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12863 && operand_equal_p (arg0, arg1, 0))
12864 return constant_boolean_node (1, type);
12866 if (code == LTGT_EXPR
12867 && !flag_trapping_math
12868 && operand_equal_p (arg0, arg1, 0))
12869 return constant_boolean_node (0, type);
12871 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12873 tree targ0 = strip_float_extensions (arg0);
12874 tree targ1 = strip_float_extensions (arg1);
12875 tree newtype = TREE_TYPE (targ0);
12877 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12878 newtype = TREE_TYPE (targ1);
12880 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12881 return fold_build2 (code, type, fold_convert (newtype, targ0),
12882 fold_convert (newtype, targ1));
12885 return NULL_TREE;
12887 case COMPOUND_EXPR:
12888 /* When pedantic, a compound expression can be neither an lvalue
12889 nor an integer constant expression. */
12890 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12891 return NULL_TREE;
12892 /* Don't let (0, 0) be null pointer constant. */
12893 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12894 : fold_convert (type, arg1);
12895 return pedantic_non_lvalue (tem);
12897 case COMPLEX_EXPR:
12898 if ((TREE_CODE (arg0) == REAL_CST
12899 && TREE_CODE (arg1) == REAL_CST)
12900 || (TREE_CODE (arg0) == INTEGER_CST
12901 && TREE_CODE (arg1) == INTEGER_CST))
12902 return build_complex (type, arg0, arg1);
12903 return NULL_TREE;
12905 case ASSERT_EXPR:
12906 /* An ASSERT_EXPR should never be passed to fold_binary. */
12907 gcc_unreachable ();
12909 default:
12910 return NULL_TREE;
12911 } /* switch (code) */
12914 /* Callback for walk_tree, looking for LABEL_EXPR.
12915 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12916 Do not check the sub-tree of GOTO_EXPR. */
12918 static tree
12919 contains_label_1 (tree *tp,
12920 int *walk_subtrees,
12921 void *data ATTRIBUTE_UNUSED)
12923 switch (TREE_CODE (*tp))
12925 case LABEL_EXPR:
12926 return *tp;
12927 case GOTO_EXPR:
12928 *walk_subtrees = 0;
12929 /* no break */
12930 default:
12931 return NULL_TREE;
12935 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12936 accessible from outside the sub-tree. Returns NULL_TREE if no
12937 addressable label is found. */
12939 static bool
12940 contains_label_p (tree st)
12942 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12945 /* Fold a ternary expression of code CODE and type TYPE with operands
12946 OP0, OP1, and OP2. Return the folded expression if folding is
12947 successful. Otherwise, return NULL_TREE. */
12949 tree
12950 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12952 tree tem;
12953 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12954 enum tree_code_class kind = TREE_CODE_CLASS (code);
12956 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12957 && TREE_CODE_LENGTH (code) == 3);
12959 /* Strip any conversions that don't change the mode. This is safe
12960 for every expression, except for a comparison expression because
12961 its signedness is derived from its operands. So, in the latter
12962 case, only strip conversions that don't change the signedness.
12964 Note that this is done as an internal manipulation within the
12965 constant folder, in order to find the simplest representation of
12966 the arguments so that their form can be studied. In any cases,
12967 the appropriate type conversions should be put back in the tree
12968 that will get out of the constant folder. */
12969 if (op0)
12971 arg0 = op0;
12972 STRIP_NOPS (arg0);
12975 if (op1)
12977 arg1 = op1;
12978 STRIP_NOPS (arg1);
12981 switch (code)
12983 case COMPONENT_REF:
12984 if (TREE_CODE (arg0) == CONSTRUCTOR
12985 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12987 unsigned HOST_WIDE_INT idx;
12988 tree field, value;
12989 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12990 if (field == arg1)
12991 return value;
12993 return NULL_TREE;
12995 case COND_EXPR:
12996 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12997 so all simple results must be passed through pedantic_non_lvalue. */
12998 if (TREE_CODE (arg0) == INTEGER_CST)
13000 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13001 tem = integer_zerop (arg0) ? op2 : op1;
13002 /* Only optimize constant conditions when the selected branch
13003 has the same type as the COND_EXPR. This avoids optimizing
13004 away "c ? x : throw", where the throw has a void type.
13005 Avoid throwing away that operand which contains label. */
13006 if ((!TREE_SIDE_EFFECTS (unused_op)
13007 || !contains_label_p (unused_op))
13008 && (! VOID_TYPE_P (TREE_TYPE (tem))
13009 || VOID_TYPE_P (type)))
13010 return pedantic_non_lvalue (tem);
13011 return NULL_TREE;
13013 if (operand_equal_p (arg1, op2, 0))
13014 return pedantic_omit_one_operand (type, arg1, arg0);
13016 /* If we have A op B ? A : C, we may be able to convert this to a
13017 simpler expression, depending on the operation and the values
13018 of B and C. Signed zeros prevent all of these transformations,
13019 for reasons given above each one.
13021 Also try swapping the arguments and inverting the conditional. */
13022 if (COMPARISON_CLASS_P (arg0)
13023 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13024 arg1, TREE_OPERAND (arg0, 1))
13025 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13027 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13028 if (tem)
13029 return tem;
13032 if (COMPARISON_CLASS_P (arg0)
13033 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13034 op2,
13035 TREE_OPERAND (arg0, 1))
13036 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13038 tem = fold_truth_not_expr (arg0);
13039 if (tem && COMPARISON_CLASS_P (tem))
13041 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13042 if (tem)
13043 return tem;
13047 /* If the second operand is simpler than the third, swap them
13048 since that produces better jump optimization results. */
13049 if (truth_value_p (TREE_CODE (arg0))
13050 && tree_swap_operands_p (op1, op2, false))
13052 /* See if this can be inverted. If it can't, possibly because
13053 it was a floating-point inequality comparison, don't do
13054 anything. */
13055 tem = fold_truth_not_expr (arg0);
13056 if (tem)
13057 return fold_build3 (code, type, tem, op2, op1);
13060 /* Convert A ? 1 : 0 to simply A. */
13061 if (integer_onep (op1)
13062 && integer_zerop (op2)
13063 /* If we try to convert OP0 to our type, the
13064 call to fold will try to move the conversion inside
13065 a COND, which will recurse. In that case, the COND_EXPR
13066 is probably the best choice, so leave it alone. */
13067 && type == TREE_TYPE (arg0))
13068 return pedantic_non_lvalue (arg0);
13070 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13071 over COND_EXPR in cases such as floating point comparisons. */
13072 if (integer_zerop (op1)
13073 && integer_onep (op2)
13074 && truth_value_p (TREE_CODE (arg0)))
13075 return pedantic_non_lvalue (fold_convert (type,
13076 invert_truthvalue (arg0)));
13078 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13079 if (TREE_CODE (arg0) == LT_EXPR
13080 && integer_zerop (TREE_OPERAND (arg0, 1))
13081 && integer_zerop (op2)
13082 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13084 /* sign_bit_p only checks ARG1 bits within A's precision.
13085 If <sign bit of A> has wider type than A, bits outside
13086 of A's precision in <sign bit of A> need to be checked.
13087 If they are all 0, this optimization needs to be done
13088 in unsigned A's type, if they are all 1 in signed A's type,
13089 otherwise this can't be done. */
13090 if (TYPE_PRECISION (TREE_TYPE (tem))
13091 < TYPE_PRECISION (TREE_TYPE (arg1))
13092 && TYPE_PRECISION (TREE_TYPE (tem))
13093 < TYPE_PRECISION (type))
13095 unsigned HOST_WIDE_INT mask_lo;
13096 HOST_WIDE_INT mask_hi;
13097 int inner_width, outer_width;
13098 tree tem_type;
13100 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13101 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13102 if (outer_width > TYPE_PRECISION (type))
13103 outer_width = TYPE_PRECISION (type);
13105 if (outer_width > HOST_BITS_PER_WIDE_INT)
13107 mask_hi = ((unsigned HOST_WIDE_INT) -1
13108 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13109 mask_lo = -1;
13111 else
13113 mask_hi = 0;
13114 mask_lo = ((unsigned HOST_WIDE_INT) -1
13115 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13117 if (inner_width > HOST_BITS_PER_WIDE_INT)
13119 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13120 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13121 mask_lo = 0;
13123 else
13124 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13125 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13127 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13128 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13130 tem_type = signed_type_for (TREE_TYPE (tem));
13131 tem = fold_convert (tem_type, tem);
13133 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13134 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13136 tem_type = unsigned_type_for (TREE_TYPE (tem));
13137 tem = fold_convert (tem_type, tem);
13139 else
13140 tem = NULL;
13143 if (tem)
13144 return fold_convert (type,
13145 fold_build2 (BIT_AND_EXPR,
13146 TREE_TYPE (tem), tem,
13147 fold_convert (TREE_TYPE (tem),
13148 arg1)));
13151 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13152 already handled above. */
13153 if (TREE_CODE (arg0) == BIT_AND_EXPR
13154 && integer_onep (TREE_OPERAND (arg0, 1))
13155 && integer_zerop (op2)
13156 && integer_pow2p (arg1))
13158 tree tem = TREE_OPERAND (arg0, 0);
13159 STRIP_NOPS (tem);
13160 if (TREE_CODE (tem) == RSHIFT_EXPR
13161 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13162 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13163 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13164 return fold_build2 (BIT_AND_EXPR, type,
13165 TREE_OPERAND (tem, 0), arg1);
13168 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13169 is probably obsolete because the first operand should be a
13170 truth value (that's why we have the two cases above), but let's
13171 leave it in until we can confirm this for all front-ends. */
13172 if (integer_zerop (op2)
13173 && TREE_CODE (arg0) == NE_EXPR
13174 && integer_zerop (TREE_OPERAND (arg0, 1))
13175 && integer_pow2p (arg1)
13176 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13177 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13178 arg1, OEP_ONLY_CONST))
13179 return pedantic_non_lvalue (fold_convert (type,
13180 TREE_OPERAND (arg0, 0)));
13182 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13183 if (integer_zerop (op2)
13184 && truth_value_p (TREE_CODE (arg0))
13185 && truth_value_p (TREE_CODE (arg1)))
13186 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13187 fold_convert (type, arg0),
13188 arg1);
13190 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13191 if (integer_onep (op2)
13192 && truth_value_p (TREE_CODE (arg0))
13193 && truth_value_p (TREE_CODE (arg1)))
13195 /* Only perform transformation if ARG0 is easily inverted. */
13196 tem = fold_truth_not_expr (arg0);
13197 if (tem)
13198 return fold_build2 (TRUTH_ORIF_EXPR, type,
13199 fold_convert (type, tem),
13200 arg1);
13203 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13204 if (integer_zerop (arg1)
13205 && truth_value_p (TREE_CODE (arg0))
13206 && truth_value_p (TREE_CODE (op2)))
13208 /* Only perform transformation if ARG0 is easily inverted. */
13209 tem = fold_truth_not_expr (arg0);
13210 if (tem)
13211 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13212 fold_convert (type, tem),
13213 op2);
13216 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13217 if (integer_onep (arg1)
13218 && truth_value_p (TREE_CODE (arg0))
13219 && truth_value_p (TREE_CODE (op2)))
13220 return fold_build2 (TRUTH_ORIF_EXPR, type,
13221 fold_convert (type, arg0),
13222 op2);
13224 return NULL_TREE;
13226 case CALL_EXPR:
13227 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13228 of fold_ternary on them. */
13229 gcc_unreachable ();
13231 case BIT_FIELD_REF:
13232 if ((TREE_CODE (arg0) == VECTOR_CST
13233 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13234 && type == TREE_TYPE (TREE_TYPE (arg0))
13235 && host_integerp (arg1, 1)
13236 && host_integerp (op2, 1))
13238 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13239 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13241 if (width != 0
13242 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13243 && (idx % width) == 0
13244 && (idx = idx / width)
13245 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13247 tree elements = NULL_TREE;
13249 if (TREE_CODE (arg0) == VECTOR_CST)
13250 elements = TREE_VECTOR_CST_ELTS (arg0);
13251 else
13253 unsigned HOST_WIDE_INT idx;
13254 tree value;
13256 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13257 elements = tree_cons (NULL_TREE, value, elements);
13259 while (idx-- > 0 && elements)
13260 elements = TREE_CHAIN (elements);
13261 if (elements)
13262 return TREE_VALUE (elements);
13263 else
13264 return fold_convert (type, integer_zero_node);
13267 return NULL_TREE;
13269 default:
13270 return NULL_TREE;
13271 } /* switch (code) */
13274 /* Perform constant folding and related simplification of EXPR.
13275 The related simplifications include x*1 => x, x*0 => 0, etc.,
13276 and application of the associative law.
13277 NOP_EXPR conversions may be removed freely (as long as we
13278 are careful not to change the type of the overall expression).
13279 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13280 but we can constant-fold them if they have constant operands. */
13282 #ifdef ENABLE_FOLD_CHECKING
13283 # define fold(x) fold_1 (x)
13284 static tree fold_1 (tree);
13285 static
13286 #endif
13287 tree
13288 fold (tree expr)
13290 const tree t = expr;
13291 enum tree_code code = TREE_CODE (t);
13292 enum tree_code_class kind = TREE_CODE_CLASS (code);
13293 tree tem;
13295 /* Return right away if a constant. */
13296 if (kind == tcc_constant)
13297 return t;
13299 /* CALL_EXPR-like objects with variable numbers of operands are
13300 treated specially. */
13301 if (kind == tcc_vl_exp)
13303 if (code == CALL_EXPR)
13305 tem = fold_call_expr (expr, false);
13306 return tem ? tem : expr;
13308 return expr;
13311 if (IS_EXPR_CODE_CLASS (kind)
13312 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13314 tree type = TREE_TYPE (t);
13315 tree op0, op1, op2;
13317 switch (TREE_CODE_LENGTH (code))
13319 case 1:
13320 op0 = TREE_OPERAND (t, 0);
13321 tem = fold_unary (code, type, op0);
13322 return tem ? tem : expr;
13323 case 2:
13324 op0 = TREE_OPERAND (t, 0);
13325 op1 = TREE_OPERAND (t, 1);
13326 tem = fold_binary (code, type, op0, op1);
13327 return tem ? tem : expr;
13328 case 3:
13329 op0 = TREE_OPERAND (t, 0);
13330 op1 = TREE_OPERAND (t, 1);
13331 op2 = TREE_OPERAND (t, 2);
13332 tem = fold_ternary (code, type, op0, op1, op2);
13333 return tem ? tem : expr;
13334 default:
13335 break;
13339 switch (code)
13341 case CONST_DECL:
13342 return fold (DECL_INITIAL (t));
13344 default:
13345 return t;
13346 } /* switch (code) */
13349 #ifdef ENABLE_FOLD_CHECKING
13350 #undef fold
13352 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13353 static void fold_check_failed (const_tree, const_tree);
13354 void print_fold_checksum (const_tree);
13356 /* When --enable-checking=fold, compute a digest of expr before
13357 and after actual fold call to see if fold did not accidentally
13358 change original expr. */
13360 tree
13361 fold (tree expr)
13363 tree ret;
13364 struct md5_ctx ctx;
13365 unsigned char checksum_before[16], checksum_after[16];
13366 htab_t ht;
13368 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13369 md5_init_ctx (&ctx);
13370 fold_checksum_tree (expr, &ctx, ht);
13371 md5_finish_ctx (&ctx, checksum_before);
13372 htab_empty (ht);
13374 ret = fold_1 (expr);
13376 md5_init_ctx (&ctx);
13377 fold_checksum_tree (expr, &ctx, ht);
13378 md5_finish_ctx (&ctx, checksum_after);
13379 htab_delete (ht);
13381 if (memcmp (checksum_before, checksum_after, 16))
13382 fold_check_failed (expr, ret);
13384 return ret;
13387 void
13388 print_fold_checksum (const_tree expr)
13390 struct md5_ctx ctx;
13391 unsigned char checksum[16], cnt;
13392 htab_t ht;
13394 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13395 md5_init_ctx (&ctx);
13396 fold_checksum_tree (expr, &ctx, ht);
13397 md5_finish_ctx (&ctx, checksum);
13398 htab_delete (ht);
13399 for (cnt = 0; cnt < 16; ++cnt)
13400 fprintf (stderr, "%02x", checksum[cnt]);
13401 putc ('\n', stderr);
13404 static void
13405 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13407 internal_error ("fold check: original tree changed by fold");
13410 static void
13411 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13413 const void **slot;
13414 enum tree_code code;
13415 struct tree_function_decl buf;
13416 int i, len;
13418 recursive_label:
13420 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13421 <= sizeof (struct tree_function_decl))
13422 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13423 if (expr == NULL)
13424 return;
13425 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13426 if (*slot != NULL)
13427 return;
13428 *slot = expr;
13429 code = TREE_CODE (expr);
13430 if (TREE_CODE_CLASS (code) == tcc_declaration
13431 && DECL_ASSEMBLER_NAME_SET_P (expr))
13433 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13434 memcpy ((char *) &buf, expr, tree_size (expr));
13435 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13436 expr = (tree) &buf;
13438 else if (TREE_CODE_CLASS (code) == tcc_type
13439 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13440 || TYPE_CACHED_VALUES_P (expr)
13441 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13443 /* Allow these fields to be modified. */
13444 tree tmp;
13445 memcpy ((char *) &buf, expr, tree_size (expr));
13446 expr = tmp = (tree) &buf;
13447 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13448 TYPE_POINTER_TO (tmp) = NULL;
13449 TYPE_REFERENCE_TO (tmp) = NULL;
13450 if (TYPE_CACHED_VALUES_P (tmp))
13452 TYPE_CACHED_VALUES_P (tmp) = 0;
13453 TYPE_CACHED_VALUES (tmp) = NULL;
13456 md5_process_bytes (expr, tree_size (expr), ctx);
13457 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13458 if (TREE_CODE_CLASS (code) != tcc_type
13459 && TREE_CODE_CLASS (code) != tcc_declaration
13460 && code != TREE_LIST
13461 && code != SSA_NAME)
13462 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13463 switch (TREE_CODE_CLASS (code))
13465 case tcc_constant:
13466 switch (code)
13468 case STRING_CST:
13469 md5_process_bytes (TREE_STRING_POINTER (expr),
13470 TREE_STRING_LENGTH (expr), ctx);
13471 break;
13472 case COMPLEX_CST:
13473 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13474 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13475 break;
13476 case VECTOR_CST:
13477 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13478 break;
13479 default:
13480 break;
13482 break;
13483 case tcc_exceptional:
13484 switch (code)
13486 case TREE_LIST:
13487 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13488 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13489 expr = TREE_CHAIN (expr);
13490 goto recursive_label;
13491 break;
13492 case TREE_VEC:
13493 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13494 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13495 break;
13496 default:
13497 break;
13499 break;
13500 case tcc_expression:
13501 case tcc_reference:
13502 case tcc_comparison:
13503 case tcc_unary:
13504 case tcc_binary:
13505 case tcc_statement:
13506 case tcc_vl_exp:
13507 len = TREE_OPERAND_LENGTH (expr);
13508 for (i = 0; i < len; ++i)
13509 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13510 break;
13511 case tcc_declaration:
13512 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13513 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13514 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13516 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13517 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13518 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13519 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13520 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13522 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13523 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13525 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13527 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13528 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13529 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13531 break;
13532 case tcc_type:
13533 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13534 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13535 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13536 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13537 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13538 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13539 if (INTEGRAL_TYPE_P (expr)
13540 || SCALAR_FLOAT_TYPE_P (expr))
13542 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13543 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13545 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13546 if (TREE_CODE (expr) == RECORD_TYPE
13547 || TREE_CODE (expr) == UNION_TYPE
13548 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13549 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13550 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13551 break;
13552 default:
13553 break;
13557 /* Helper function for outputting the checksum of a tree T. When
13558 debugging with gdb, you can "define mynext" to be "next" followed
13559 by "call debug_fold_checksum (op0)", then just trace down till the
13560 outputs differ. */
13562 void
13563 debug_fold_checksum (const_tree t)
13565 int i;
13566 unsigned char checksum[16];
13567 struct md5_ctx ctx;
13568 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13570 md5_init_ctx (&ctx);
13571 fold_checksum_tree (t, &ctx, ht);
13572 md5_finish_ctx (&ctx, checksum);
13573 htab_empty (ht);
13575 for (i = 0; i < 16; i++)
13576 fprintf (stderr, "%d ", checksum[i]);
13578 fprintf (stderr, "\n");
13581 #endif
13583 /* Fold a unary tree expression with code CODE of type TYPE with an
13584 operand OP0. Return a folded expression if successful. Otherwise,
13585 return a tree expression with code CODE of type TYPE with an
13586 operand OP0. */
13588 tree
13589 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13591 tree tem;
13592 #ifdef ENABLE_FOLD_CHECKING
13593 unsigned char checksum_before[16], checksum_after[16];
13594 struct md5_ctx ctx;
13595 htab_t ht;
13597 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13598 md5_init_ctx (&ctx);
13599 fold_checksum_tree (op0, &ctx, ht);
13600 md5_finish_ctx (&ctx, checksum_before);
13601 htab_empty (ht);
13602 #endif
13604 tem = fold_unary (code, type, op0);
13605 if (!tem)
13606 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13608 #ifdef ENABLE_FOLD_CHECKING
13609 md5_init_ctx (&ctx);
13610 fold_checksum_tree (op0, &ctx, ht);
13611 md5_finish_ctx (&ctx, checksum_after);
13612 htab_delete (ht);
13614 if (memcmp (checksum_before, checksum_after, 16))
13615 fold_check_failed (op0, tem);
13616 #endif
13617 return tem;
13620 /* Fold a binary tree expression with code CODE of type TYPE with
13621 operands OP0 and OP1. Return a folded expression if successful.
13622 Otherwise, return a tree expression with code CODE of type TYPE
13623 with operands OP0 and OP1. */
13625 tree
13626 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13627 MEM_STAT_DECL)
13629 tree tem;
13630 #ifdef ENABLE_FOLD_CHECKING
13631 unsigned char checksum_before_op0[16],
13632 checksum_before_op1[16],
13633 checksum_after_op0[16],
13634 checksum_after_op1[16];
13635 struct md5_ctx ctx;
13636 htab_t ht;
13638 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13639 md5_init_ctx (&ctx);
13640 fold_checksum_tree (op0, &ctx, ht);
13641 md5_finish_ctx (&ctx, checksum_before_op0);
13642 htab_empty (ht);
13644 md5_init_ctx (&ctx);
13645 fold_checksum_tree (op1, &ctx, ht);
13646 md5_finish_ctx (&ctx, checksum_before_op1);
13647 htab_empty (ht);
13648 #endif
13650 tem = fold_binary (code, type, op0, op1);
13651 if (!tem)
13652 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13654 #ifdef ENABLE_FOLD_CHECKING
13655 md5_init_ctx (&ctx);
13656 fold_checksum_tree (op0, &ctx, ht);
13657 md5_finish_ctx (&ctx, checksum_after_op0);
13658 htab_empty (ht);
13660 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13661 fold_check_failed (op0, tem);
13663 md5_init_ctx (&ctx);
13664 fold_checksum_tree (op1, &ctx, ht);
13665 md5_finish_ctx (&ctx, checksum_after_op1);
13666 htab_delete (ht);
13668 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13669 fold_check_failed (op1, tem);
13670 #endif
13671 return tem;
13674 /* Fold a ternary tree expression with code CODE of type TYPE with
13675 operands OP0, OP1, and OP2. Return a folded expression if
13676 successful. Otherwise, return a tree expression with code CODE of
13677 type TYPE with operands OP0, OP1, and OP2. */
13679 tree
13680 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13681 MEM_STAT_DECL)
13683 tree tem;
13684 #ifdef ENABLE_FOLD_CHECKING
13685 unsigned char checksum_before_op0[16],
13686 checksum_before_op1[16],
13687 checksum_before_op2[16],
13688 checksum_after_op0[16],
13689 checksum_after_op1[16],
13690 checksum_after_op2[16];
13691 struct md5_ctx ctx;
13692 htab_t ht;
13694 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13695 md5_init_ctx (&ctx);
13696 fold_checksum_tree (op0, &ctx, ht);
13697 md5_finish_ctx (&ctx, checksum_before_op0);
13698 htab_empty (ht);
13700 md5_init_ctx (&ctx);
13701 fold_checksum_tree (op1, &ctx, ht);
13702 md5_finish_ctx (&ctx, checksum_before_op1);
13703 htab_empty (ht);
13705 md5_init_ctx (&ctx);
13706 fold_checksum_tree (op2, &ctx, ht);
13707 md5_finish_ctx (&ctx, checksum_before_op2);
13708 htab_empty (ht);
13709 #endif
13711 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13712 tem = fold_ternary (code, type, op0, op1, op2);
13713 if (!tem)
13714 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13716 #ifdef ENABLE_FOLD_CHECKING
13717 md5_init_ctx (&ctx);
13718 fold_checksum_tree (op0, &ctx, ht);
13719 md5_finish_ctx (&ctx, checksum_after_op0);
13720 htab_empty (ht);
13722 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13723 fold_check_failed (op0, tem);
13725 md5_init_ctx (&ctx);
13726 fold_checksum_tree (op1, &ctx, ht);
13727 md5_finish_ctx (&ctx, checksum_after_op1);
13728 htab_empty (ht);
13730 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13731 fold_check_failed (op1, tem);
13733 md5_init_ctx (&ctx);
13734 fold_checksum_tree (op2, &ctx, ht);
13735 md5_finish_ctx (&ctx, checksum_after_op2);
13736 htab_delete (ht);
13738 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13739 fold_check_failed (op2, tem);
13740 #endif
13741 return tem;
13744 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13745 arguments in ARGARRAY, and a null static chain.
13746 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13747 of type TYPE from the given operands as constructed by build_call_array. */
13749 tree
13750 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13752 tree tem;
13753 #ifdef ENABLE_FOLD_CHECKING
13754 unsigned char checksum_before_fn[16],
13755 checksum_before_arglist[16],
13756 checksum_after_fn[16],
13757 checksum_after_arglist[16];
13758 struct md5_ctx ctx;
13759 htab_t ht;
13760 int i;
13762 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13763 md5_init_ctx (&ctx);
13764 fold_checksum_tree (fn, &ctx, ht);
13765 md5_finish_ctx (&ctx, checksum_before_fn);
13766 htab_empty (ht);
13768 md5_init_ctx (&ctx);
13769 for (i = 0; i < nargs; i++)
13770 fold_checksum_tree (argarray[i], &ctx, ht);
13771 md5_finish_ctx (&ctx, checksum_before_arglist);
13772 htab_empty (ht);
13773 #endif
13775 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13777 #ifdef ENABLE_FOLD_CHECKING
13778 md5_init_ctx (&ctx);
13779 fold_checksum_tree (fn, &ctx, ht);
13780 md5_finish_ctx (&ctx, checksum_after_fn);
13781 htab_empty (ht);
13783 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13784 fold_check_failed (fn, tem);
13786 md5_init_ctx (&ctx);
13787 for (i = 0; i < nargs; i++)
13788 fold_checksum_tree (argarray[i], &ctx, ht);
13789 md5_finish_ctx (&ctx, checksum_after_arglist);
13790 htab_delete (ht);
13792 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13793 fold_check_failed (NULL_TREE, tem);
13794 #endif
13795 return tem;
13798 /* Perform constant folding and related simplification of initializer
13799 expression EXPR. These behave identically to "fold_buildN" but ignore
13800 potential run-time traps and exceptions that fold must preserve. */
13802 #define START_FOLD_INIT \
13803 int saved_signaling_nans = flag_signaling_nans;\
13804 int saved_trapping_math = flag_trapping_math;\
13805 int saved_rounding_math = flag_rounding_math;\
13806 int saved_trapv = flag_trapv;\
13807 int saved_folding_initializer = folding_initializer;\
13808 flag_signaling_nans = 0;\
13809 flag_trapping_math = 0;\
13810 flag_rounding_math = 0;\
13811 flag_trapv = 0;\
13812 folding_initializer = 1;
13814 #define END_FOLD_INIT \
13815 flag_signaling_nans = saved_signaling_nans;\
13816 flag_trapping_math = saved_trapping_math;\
13817 flag_rounding_math = saved_rounding_math;\
13818 flag_trapv = saved_trapv;\
13819 folding_initializer = saved_folding_initializer;
13821 tree
13822 fold_build1_initializer (enum tree_code code, tree type, tree op)
13824 tree result;
13825 START_FOLD_INIT;
13827 result = fold_build1 (code, type, op);
13829 END_FOLD_INIT;
13830 return result;
13833 tree
13834 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13836 tree result;
13837 START_FOLD_INIT;
13839 result = fold_build2 (code, type, op0, op1);
13841 END_FOLD_INIT;
13842 return result;
13845 tree
13846 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13847 tree op2)
13849 tree result;
13850 START_FOLD_INIT;
13852 result = fold_build3 (code, type, op0, op1, op2);
13854 END_FOLD_INIT;
13855 return result;
13858 tree
13859 fold_build_call_array_initializer (tree type, tree fn,
13860 int nargs, tree *argarray)
13862 tree result;
13863 START_FOLD_INIT;
13865 result = fold_build_call_array (type, fn, nargs, argarray);
13867 END_FOLD_INIT;
13868 return result;
13871 #undef START_FOLD_INIT
13872 #undef END_FOLD_INIT
13874 /* Determine if first argument is a multiple of second argument. Return 0 if
13875 it is not, or we cannot easily determined it to be.
13877 An example of the sort of thing we care about (at this point; this routine
13878 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13879 fold cases do now) is discovering that
13881 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13883 is a multiple of
13885 SAVE_EXPR (J * 8)
13887 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13889 This code also handles discovering that
13891 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13893 is a multiple of 8 so we don't have to worry about dealing with a
13894 possible remainder.
13896 Note that we *look* inside a SAVE_EXPR only to determine how it was
13897 calculated; it is not safe for fold to do much of anything else with the
13898 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13899 at run time. For example, the latter example above *cannot* be implemented
13900 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13901 evaluation time of the original SAVE_EXPR is not necessarily the same at
13902 the time the new expression is evaluated. The only optimization of this
13903 sort that would be valid is changing
13905 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13907 divided by 8 to
13909 SAVE_EXPR (I) * SAVE_EXPR (J)
13911 (where the same SAVE_EXPR (J) is used in the original and the
13912 transformed version). */
13915 multiple_of_p (tree type, const_tree top, const_tree bottom)
13917 if (operand_equal_p (top, bottom, 0))
13918 return 1;
13920 if (TREE_CODE (type) != INTEGER_TYPE)
13921 return 0;
13923 switch (TREE_CODE (top))
13925 case BIT_AND_EXPR:
13926 /* Bitwise and provides a power of two multiple. If the mask is
13927 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13928 if (!integer_pow2p (bottom))
13929 return 0;
13930 /* FALLTHRU */
13932 case MULT_EXPR:
13933 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13934 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13936 case PLUS_EXPR:
13937 case MINUS_EXPR:
13938 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13939 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13941 case LSHIFT_EXPR:
13942 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13944 tree op1, t1;
13946 op1 = TREE_OPERAND (top, 1);
13947 /* const_binop may not detect overflow correctly,
13948 so check for it explicitly here. */
13949 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13950 > TREE_INT_CST_LOW (op1)
13951 && TREE_INT_CST_HIGH (op1) == 0
13952 && 0 != (t1 = fold_convert (type,
13953 const_binop (LSHIFT_EXPR,
13954 size_one_node,
13955 op1, 0)))
13956 && !TREE_OVERFLOW (t1))
13957 return multiple_of_p (type, t1, bottom);
13959 return 0;
13961 case NOP_EXPR:
13962 /* Can't handle conversions from non-integral or wider integral type. */
13963 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13964 || (TYPE_PRECISION (type)
13965 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13966 return 0;
13968 /* .. fall through ... */
13970 case SAVE_EXPR:
13971 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13973 case INTEGER_CST:
13974 if (TREE_CODE (bottom) != INTEGER_CST
13975 || integer_zerop (bottom)
13976 || (TYPE_UNSIGNED (type)
13977 && (tree_int_cst_sgn (top) < 0
13978 || tree_int_cst_sgn (bottom) < 0)))
13979 return 0;
13980 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13981 top, bottom, 0));
13983 default:
13984 return 0;
13988 /* Return true if `t' is known to be non-negative. If the return
13989 value is based on the assumption that signed overflow is undefined,
13990 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13991 *STRICT_OVERFLOW_P. */
13993 bool
13994 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13996 if (t == error_mark_node)
13997 return false;
13999 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14000 return true;
14002 switch (TREE_CODE (t))
14004 case SSA_NAME:
14005 /* Query VRP to see if it has recorded any information about
14006 the range of this object. */
14007 return ssa_name_nonnegative_p (t);
14009 case ABS_EXPR:
14010 /* We can't return 1 if flag_wrapv is set because
14011 ABS_EXPR<INT_MIN> = INT_MIN. */
14012 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14013 return true;
14014 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
14016 *strict_overflow_p = true;
14017 return true;
14019 break;
14021 case INTEGER_CST:
14022 return tree_int_cst_sgn (t) >= 0;
14024 case REAL_CST:
14025 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14027 case FIXED_CST:
14028 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14030 case POINTER_PLUS_EXPR:
14031 case PLUS_EXPR:
14032 if (FLOAT_TYPE_P (TREE_TYPE (t)))
14033 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14034 strict_overflow_p)
14035 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14036 strict_overflow_p));
14038 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14039 both unsigned and at least 2 bits shorter than the result. */
14040 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
14041 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
14042 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
14044 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
14045 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
14046 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14047 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14049 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14050 TYPE_PRECISION (inner2)) + 1;
14051 return prec < TYPE_PRECISION (TREE_TYPE (t));
14054 break;
14056 case MULT_EXPR:
14057 if (FLOAT_TYPE_P (TREE_TYPE (t)))
14059 /* x * x for floating point x is always non-negative. */
14060 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
14061 return true;
14062 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14063 strict_overflow_p)
14064 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14065 strict_overflow_p));
14068 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14069 both unsigned and their total bits is shorter than the result. */
14070 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
14071 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
14072 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
14074 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
14075 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
14076 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14077 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14078 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14079 < TYPE_PRECISION (TREE_TYPE (t));
14081 return false;
14083 case BIT_AND_EXPR:
14084 case MAX_EXPR:
14085 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14086 strict_overflow_p)
14087 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14088 strict_overflow_p));
14090 case BIT_IOR_EXPR:
14091 case BIT_XOR_EXPR:
14092 case MIN_EXPR:
14093 case RDIV_EXPR:
14094 case TRUNC_DIV_EXPR:
14095 case CEIL_DIV_EXPR:
14096 case FLOOR_DIV_EXPR:
14097 case ROUND_DIV_EXPR:
14098 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14099 strict_overflow_p)
14100 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14101 strict_overflow_p));
14103 case TRUNC_MOD_EXPR:
14104 case CEIL_MOD_EXPR:
14105 case FLOOR_MOD_EXPR:
14106 case ROUND_MOD_EXPR:
14107 case SAVE_EXPR:
14108 case NON_LVALUE_EXPR:
14109 case FLOAT_EXPR:
14110 case FIX_TRUNC_EXPR:
14111 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14112 strict_overflow_p);
14114 case COMPOUND_EXPR:
14115 case MODIFY_EXPR:
14116 case GIMPLE_MODIFY_STMT:
14117 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14118 strict_overflow_p);
14120 case BIND_EXPR:
14121 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14122 strict_overflow_p);
14124 case COND_EXPR:
14125 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14126 strict_overflow_p)
14127 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14128 strict_overflow_p));
14130 case NOP_EXPR:
14132 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14133 tree outer_type = TREE_TYPE (t);
14135 if (TREE_CODE (outer_type) == REAL_TYPE)
14137 if (TREE_CODE (inner_type) == REAL_TYPE)
14138 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14139 strict_overflow_p);
14140 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14142 if (TYPE_UNSIGNED (inner_type))
14143 return true;
14144 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14145 strict_overflow_p);
14148 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14150 if (TREE_CODE (inner_type) == REAL_TYPE)
14151 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
14152 strict_overflow_p);
14153 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14154 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14155 && TYPE_UNSIGNED (inner_type);
14158 break;
14160 case TARGET_EXPR:
14162 tree temp = TARGET_EXPR_SLOT (t);
14163 t = TARGET_EXPR_INITIAL (t);
14165 /* If the initializer is non-void, then it's a normal expression
14166 that will be assigned to the slot. */
14167 if (!VOID_TYPE_P (t))
14168 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14170 /* Otherwise, the initializer sets the slot in some way. One common
14171 way is an assignment statement at the end of the initializer. */
14172 while (1)
14174 if (TREE_CODE (t) == BIND_EXPR)
14175 t = expr_last (BIND_EXPR_BODY (t));
14176 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14177 || TREE_CODE (t) == TRY_CATCH_EXPR)
14178 t = expr_last (TREE_OPERAND (t, 0));
14179 else if (TREE_CODE (t) == STATEMENT_LIST)
14180 t = expr_last (t);
14181 else
14182 break;
14184 if ((TREE_CODE (t) == MODIFY_EXPR
14185 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14186 && GENERIC_TREE_OPERAND (t, 0) == temp)
14187 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14188 strict_overflow_p);
14190 return false;
14193 case CALL_EXPR:
14195 tree fndecl = get_callee_fndecl (t);
14196 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14197 switch (DECL_FUNCTION_CODE (fndecl))
14199 CASE_FLT_FN (BUILT_IN_ACOS):
14200 CASE_FLT_FN (BUILT_IN_ACOSH):
14201 CASE_FLT_FN (BUILT_IN_CABS):
14202 CASE_FLT_FN (BUILT_IN_COSH):
14203 CASE_FLT_FN (BUILT_IN_ERFC):
14204 CASE_FLT_FN (BUILT_IN_EXP):
14205 CASE_FLT_FN (BUILT_IN_EXP10):
14206 CASE_FLT_FN (BUILT_IN_EXP2):
14207 CASE_FLT_FN (BUILT_IN_FABS):
14208 CASE_FLT_FN (BUILT_IN_FDIM):
14209 CASE_FLT_FN (BUILT_IN_HYPOT):
14210 CASE_FLT_FN (BUILT_IN_POW10):
14211 CASE_INT_FN (BUILT_IN_FFS):
14212 CASE_INT_FN (BUILT_IN_PARITY):
14213 CASE_INT_FN (BUILT_IN_POPCOUNT):
14214 case BUILT_IN_BSWAP32:
14215 case BUILT_IN_BSWAP64:
14216 /* Always true. */
14217 return true;
14219 CASE_FLT_FN (BUILT_IN_SQRT):
14220 /* sqrt(-0.0) is -0.0. */
14221 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
14222 return true;
14223 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14224 strict_overflow_p);
14226 CASE_FLT_FN (BUILT_IN_ASINH):
14227 CASE_FLT_FN (BUILT_IN_ATAN):
14228 CASE_FLT_FN (BUILT_IN_ATANH):
14229 CASE_FLT_FN (BUILT_IN_CBRT):
14230 CASE_FLT_FN (BUILT_IN_CEIL):
14231 CASE_FLT_FN (BUILT_IN_ERF):
14232 CASE_FLT_FN (BUILT_IN_EXPM1):
14233 CASE_FLT_FN (BUILT_IN_FLOOR):
14234 CASE_FLT_FN (BUILT_IN_FMOD):
14235 CASE_FLT_FN (BUILT_IN_FREXP):
14236 CASE_FLT_FN (BUILT_IN_LCEIL):
14237 CASE_FLT_FN (BUILT_IN_LDEXP):
14238 CASE_FLT_FN (BUILT_IN_LFLOOR):
14239 CASE_FLT_FN (BUILT_IN_LLCEIL):
14240 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14241 CASE_FLT_FN (BUILT_IN_LLRINT):
14242 CASE_FLT_FN (BUILT_IN_LLROUND):
14243 CASE_FLT_FN (BUILT_IN_LRINT):
14244 CASE_FLT_FN (BUILT_IN_LROUND):
14245 CASE_FLT_FN (BUILT_IN_MODF):
14246 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14247 CASE_FLT_FN (BUILT_IN_RINT):
14248 CASE_FLT_FN (BUILT_IN_ROUND):
14249 CASE_FLT_FN (BUILT_IN_SCALB):
14250 CASE_FLT_FN (BUILT_IN_SCALBLN):
14251 CASE_FLT_FN (BUILT_IN_SCALBN):
14252 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14253 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14254 CASE_FLT_FN (BUILT_IN_SINH):
14255 CASE_FLT_FN (BUILT_IN_TANH):
14256 CASE_FLT_FN (BUILT_IN_TRUNC):
14257 /* True if the 1st argument is nonnegative. */
14258 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14259 strict_overflow_p);
14261 CASE_FLT_FN (BUILT_IN_FMAX):
14262 /* True if the 1st OR 2nd arguments are nonnegative. */
14263 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14264 strict_overflow_p)
14265 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14266 strict_overflow_p)));
14268 CASE_FLT_FN (BUILT_IN_FMIN):
14269 /* True if the 1st AND 2nd arguments are nonnegative. */
14270 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14271 strict_overflow_p)
14272 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14273 strict_overflow_p)));
14275 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14276 /* True if the 2nd argument is nonnegative. */
14277 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14278 strict_overflow_p);
14280 CASE_FLT_FN (BUILT_IN_POWI):
14281 /* True if the 1st argument is nonnegative or the second
14282 argument is an even integer. */
14283 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
14285 tree arg1 = CALL_EXPR_ARG (t, 1);
14286 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
14287 return true;
14289 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14290 strict_overflow_p);
14292 CASE_FLT_FN (BUILT_IN_POW):
14293 /* True if the 1st argument is nonnegative or the second
14294 argument is an even integer valued real. */
14295 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
14297 REAL_VALUE_TYPE c;
14298 HOST_WIDE_INT n;
14300 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
14301 n = real_to_integer (&c);
14302 if ((n & 1) == 0)
14304 REAL_VALUE_TYPE cint;
14305 real_from_integer (&cint, VOIDmode, n,
14306 n < 0 ? -1 : 0, 0);
14307 if (real_identical (&c, &cint))
14308 return true;
14311 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14312 strict_overflow_p);
14314 default:
14315 break;
14319 /* ... fall through ... */
14321 default:
14323 tree type = TREE_TYPE (t);
14324 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14325 && truth_value_p (TREE_CODE (t)))
14326 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14327 have a signed:1 type (where the value is -1 and 0). */
14328 return true;
14332 /* We don't know sign of `t', so be conservative and return false. */
14333 return false;
14336 /* Return true if `t' is known to be non-negative. Handle warnings
14337 about undefined signed overflow. */
14339 bool
14340 tree_expr_nonnegative_p (tree t)
14342 bool ret, strict_overflow_p;
14344 strict_overflow_p = false;
14345 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14346 if (strict_overflow_p)
14347 fold_overflow_warning (("assuming signed overflow does not occur when "
14348 "determining that expression is always "
14349 "non-negative"),
14350 WARN_STRICT_OVERFLOW_MISC);
14351 return ret;
14354 /* Return true when T is an address and is known to be nonzero.
14355 For floating point we further ensure that T is not denormal.
14356 Similar logic is present in nonzero_address in rtlanal.h.
14358 If the return value is based on the assumption that signed overflow
14359 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14360 change *STRICT_OVERFLOW_P. */
14362 bool
14363 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14365 tree type = TREE_TYPE (t);
14366 bool sub_strict_overflow_p;
14368 /* Doing something useful for floating point would need more work. */
14369 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14370 return false;
14372 switch (TREE_CODE (t))
14374 case SSA_NAME:
14375 /* Query VRP to see if it has recorded any information about
14376 the range of this object. */
14377 return ssa_name_nonzero_p (t);
14379 case ABS_EXPR:
14380 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14381 strict_overflow_p);
14383 case INTEGER_CST:
14384 return !integer_zerop (t);
14386 case POINTER_PLUS_EXPR:
14387 case PLUS_EXPR:
14388 if (TYPE_OVERFLOW_UNDEFINED (type))
14390 /* With the presence of negative values it is hard
14391 to say something. */
14392 sub_strict_overflow_p = false;
14393 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14394 &sub_strict_overflow_p)
14395 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14396 &sub_strict_overflow_p))
14397 return false;
14398 /* One of operands must be positive and the other non-negative. */
14399 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14400 overflows, on a twos-complement machine the sum of two
14401 nonnegative numbers can never be zero. */
14402 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14403 strict_overflow_p)
14404 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14405 strict_overflow_p));
14407 break;
14409 case MULT_EXPR:
14410 if (TYPE_OVERFLOW_UNDEFINED (type))
14412 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14413 strict_overflow_p)
14414 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14415 strict_overflow_p))
14417 *strict_overflow_p = true;
14418 return true;
14421 break;
14423 case NOP_EXPR:
14425 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14426 tree outer_type = TREE_TYPE (t);
14428 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14429 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14430 strict_overflow_p));
14432 break;
14434 case ADDR_EXPR:
14436 tree base = get_base_address (TREE_OPERAND (t, 0));
14438 if (!base)
14439 return false;
14441 /* Weak declarations may link to NULL. */
14442 if (VAR_OR_FUNCTION_DECL_P (base))
14443 return !DECL_WEAK (base);
14445 /* Constants are never weak. */
14446 if (CONSTANT_CLASS_P (base))
14447 return true;
14449 return false;
14452 case COND_EXPR:
14453 sub_strict_overflow_p = false;
14454 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14455 &sub_strict_overflow_p)
14456 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14457 &sub_strict_overflow_p))
14459 if (sub_strict_overflow_p)
14460 *strict_overflow_p = true;
14461 return true;
14463 break;
14465 case MIN_EXPR:
14466 sub_strict_overflow_p = false;
14467 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14468 &sub_strict_overflow_p)
14469 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14470 &sub_strict_overflow_p))
14472 if (sub_strict_overflow_p)
14473 *strict_overflow_p = true;
14475 break;
14477 case MAX_EXPR:
14478 sub_strict_overflow_p = false;
14479 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14480 &sub_strict_overflow_p))
14482 if (sub_strict_overflow_p)
14483 *strict_overflow_p = true;
14485 /* When both operands are nonzero, then MAX must be too. */
14486 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14487 strict_overflow_p))
14488 return true;
14490 /* MAX where operand 0 is positive is positive. */
14491 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14492 strict_overflow_p);
14494 /* MAX where operand 1 is positive is positive. */
14495 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14496 &sub_strict_overflow_p)
14497 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14498 &sub_strict_overflow_p))
14500 if (sub_strict_overflow_p)
14501 *strict_overflow_p = true;
14502 return true;
14504 break;
14506 case COMPOUND_EXPR:
14507 case MODIFY_EXPR:
14508 case GIMPLE_MODIFY_STMT:
14509 case BIND_EXPR:
14510 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14511 strict_overflow_p);
14513 case SAVE_EXPR:
14514 case NON_LVALUE_EXPR:
14515 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14516 strict_overflow_p);
14518 case BIT_IOR_EXPR:
14519 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14520 strict_overflow_p)
14521 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14522 strict_overflow_p));
14524 case CALL_EXPR:
14525 return alloca_call_p (t);
14527 default:
14528 break;
14530 return false;
14533 /* Return true when T is an address and is known to be nonzero.
14534 Handle warnings about undefined signed overflow. */
14536 bool
14537 tree_expr_nonzero_p (tree t)
14539 bool ret, strict_overflow_p;
14541 strict_overflow_p = false;
14542 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14543 if (strict_overflow_p)
14544 fold_overflow_warning (("assuming signed overflow does not occur when "
14545 "determining that expression is always "
14546 "non-zero"),
14547 WARN_STRICT_OVERFLOW_MISC);
14548 return ret;
14551 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14552 attempt to fold the expression to a constant without modifying TYPE,
14553 OP0 or OP1.
14555 If the expression could be simplified to a constant, then return
14556 the constant. If the expression would not be simplified to a
14557 constant, then return NULL_TREE. */
14559 tree
14560 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14562 tree tem = fold_binary (code, type, op0, op1);
14563 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14566 /* Given the components of a unary expression CODE, TYPE and OP0,
14567 attempt to fold the expression to a constant without modifying
14568 TYPE or OP0.
14570 If the expression could be simplified to a constant, then return
14571 the constant. If the expression would not be simplified to a
14572 constant, then return NULL_TREE. */
14574 tree
14575 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14577 tree tem = fold_unary (code, type, op0);
14578 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14581 /* If EXP represents referencing an element in a constant string
14582 (either via pointer arithmetic or array indexing), return the
14583 tree representing the value accessed, otherwise return NULL. */
14585 tree
14586 fold_read_from_constant_string (tree exp)
14588 if ((TREE_CODE (exp) == INDIRECT_REF
14589 || TREE_CODE (exp) == ARRAY_REF)
14590 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14592 tree exp1 = TREE_OPERAND (exp, 0);
14593 tree index;
14594 tree string;
14596 if (TREE_CODE (exp) == INDIRECT_REF)
14597 string = string_constant (exp1, &index);
14598 else
14600 tree low_bound = array_ref_low_bound (exp);
14601 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14603 /* Optimize the special-case of a zero lower bound.
14605 We convert the low_bound to sizetype to avoid some problems
14606 with constant folding. (E.g. suppose the lower bound is 1,
14607 and its mode is QI. Without the conversion,l (ARRAY
14608 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14609 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14610 if (! integer_zerop (low_bound))
14611 index = size_diffop (index, fold_convert (sizetype, low_bound));
14613 string = exp1;
14616 if (string
14617 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14618 && TREE_CODE (string) == STRING_CST
14619 && TREE_CODE (index) == INTEGER_CST
14620 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14621 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14622 == MODE_INT)
14623 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14624 return build_int_cst_type (TREE_TYPE (exp),
14625 (TREE_STRING_POINTER (string)
14626 [TREE_INT_CST_LOW (index)]));
14628 return NULL;
14631 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14632 an integer constant, real, or fixed-point constant.
14634 TYPE is the type of the result. */
14636 static tree
14637 fold_negate_const (tree arg0, tree type)
14639 tree t = NULL_TREE;
14641 switch (TREE_CODE (arg0))
14643 case INTEGER_CST:
14645 unsigned HOST_WIDE_INT low;
14646 HOST_WIDE_INT high;
14647 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14648 TREE_INT_CST_HIGH (arg0),
14649 &low, &high);
14650 t = force_fit_type_double (type, low, high, 1,
14651 (overflow | TREE_OVERFLOW (arg0))
14652 && !TYPE_UNSIGNED (type));
14653 break;
14656 case REAL_CST:
14657 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14658 break;
14660 case FIXED_CST:
14662 FIXED_VALUE_TYPE f;
14663 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14664 &(TREE_FIXED_CST (arg0)), NULL,
14665 TYPE_SATURATING (type));
14666 t = build_fixed (type, f);
14667 /* Propagate overflow flags. */
14668 if (overflow_p | TREE_OVERFLOW (arg0))
14670 TREE_OVERFLOW (t) = 1;
14671 TREE_CONSTANT_OVERFLOW (t) = 1;
14673 else if (TREE_CONSTANT_OVERFLOW (arg0))
14674 TREE_CONSTANT_OVERFLOW (t) = 1;
14675 break;
14678 default:
14679 gcc_unreachable ();
14682 return t;
14685 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14686 an integer constant or real constant.
14688 TYPE is the type of the result. */
14690 tree
14691 fold_abs_const (tree arg0, tree type)
14693 tree t = NULL_TREE;
14695 switch (TREE_CODE (arg0))
14697 case INTEGER_CST:
14698 /* If the value is unsigned, then the absolute value is
14699 the same as the ordinary value. */
14700 if (TYPE_UNSIGNED (type))
14701 t = arg0;
14702 /* Similarly, if the value is non-negative. */
14703 else if (INT_CST_LT (integer_minus_one_node, arg0))
14704 t = arg0;
14705 /* If the value is negative, then the absolute value is
14706 its negation. */
14707 else
14709 unsigned HOST_WIDE_INT low;
14710 HOST_WIDE_INT high;
14711 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14712 TREE_INT_CST_HIGH (arg0),
14713 &low, &high);
14714 t = force_fit_type_double (type, low, high, -1,
14715 overflow | TREE_OVERFLOW (arg0));
14717 break;
14719 case REAL_CST:
14720 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14721 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14722 else
14723 t = arg0;
14724 break;
14726 default:
14727 gcc_unreachable ();
14730 return t;
14733 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14734 constant. TYPE is the type of the result. */
14736 static tree
14737 fold_not_const (tree arg0, tree type)
14739 tree t = NULL_TREE;
14741 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14743 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14744 ~TREE_INT_CST_HIGH (arg0), 0,
14745 TREE_OVERFLOW (arg0));
14747 return t;
14750 /* Given CODE, a relational operator, the target type, TYPE and two
14751 constant operands OP0 and OP1, return the result of the
14752 relational operation. If the result is not a compile time
14753 constant, then return NULL_TREE. */
14755 static tree
14756 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14758 int result, invert;
14760 /* From here on, the only cases we handle are when the result is
14761 known to be a constant. */
14763 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14765 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14766 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14768 /* Handle the cases where either operand is a NaN. */
14769 if (real_isnan (c0) || real_isnan (c1))
14771 switch (code)
14773 case EQ_EXPR:
14774 case ORDERED_EXPR:
14775 result = 0;
14776 break;
14778 case NE_EXPR:
14779 case UNORDERED_EXPR:
14780 case UNLT_EXPR:
14781 case UNLE_EXPR:
14782 case UNGT_EXPR:
14783 case UNGE_EXPR:
14784 case UNEQ_EXPR:
14785 result = 1;
14786 break;
14788 case LT_EXPR:
14789 case LE_EXPR:
14790 case GT_EXPR:
14791 case GE_EXPR:
14792 case LTGT_EXPR:
14793 if (flag_trapping_math)
14794 return NULL_TREE;
14795 result = 0;
14796 break;
14798 default:
14799 gcc_unreachable ();
14802 return constant_boolean_node (result, type);
14805 return constant_boolean_node (real_compare (code, c0, c1), type);
14808 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14810 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14811 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14812 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14815 /* Handle equality/inequality of complex constants. */
14816 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14818 tree rcond = fold_relational_const (code, type,
14819 TREE_REALPART (op0),
14820 TREE_REALPART (op1));
14821 tree icond = fold_relational_const (code, type,
14822 TREE_IMAGPART (op0),
14823 TREE_IMAGPART (op1));
14824 if (code == EQ_EXPR)
14825 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14826 else if (code == NE_EXPR)
14827 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14828 else
14829 return NULL_TREE;
14832 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14834 To compute GT, swap the arguments and do LT.
14835 To compute GE, do LT and invert the result.
14836 To compute LE, swap the arguments, do LT and invert the result.
14837 To compute NE, do EQ and invert the result.
14839 Therefore, the code below must handle only EQ and LT. */
14841 if (code == LE_EXPR || code == GT_EXPR)
14843 tree tem = op0;
14844 op0 = op1;
14845 op1 = tem;
14846 code = swap_tree_comparison (code);
14849 /* Note that it is safe to invert for real values here because we
14850 have already handled the one case that it matters. */
14852 invert = 0;
14853 if (code == NE_EXPR || code == GE_EXPR)
14855 invert = 1;
14856 code = invert_tree_comparison (code, false);
14859 /* Compute a result for LT or EQ if args permit;
14860 Otherwise return T. */
14861 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14863 if (code == EQ_EXPR)
14864 result = tree_int_cst_equal (op0, op1);
14865 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14866 result = INT_CST_LT_UNSIGNED (op0, op1);
14867 else
14868 result = INT_CST_LT (op0, op1);
14870 else
14871 return NULL_TREE;
14873 if (invert)
14874 result ^= 1;
14875 return constant_boolean_node (result, type);
14878 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14879 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14880 itself. */
14882 tree
14883 fold_build_cleanup_point_expr (tree type, tree expr)
14885 /* If the expression does not have side effects then we don't have to wrap
14886 it with a cleanup point expression. */
14887 if (!TREE_SIDE_EFFECTS (expr))
14888 return expr;
14890 /* If the expression is a return, check to see if the expression inside the
14891 return has no side effects or the right hand side of the modify expression
14892 inside the return. If either don't have side effects set we don't need to
14893 wrap the expression in a cleanup point expression. Note we don't check the
14894 left hand side of the modify because it should always be a return decl. */
14895 if (TREE_CODE (expr) == RETURN_EXPR)
14897 tree op = TREE_OPERAND (expr, 0);
14898 if (!op || !TREE_SIDE_EFFECTS (op))
14899 return expr;
14900 op = TREE_OPERAND (op, 1);
14901 if (!TREE_SIDE_EFFECTS (op))
14902 return expr;
14905 return build1 (CLEANUP_POINT_EXPR, type, expr);
14908 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14909 of an indirection through OP0, or NULL_TREE if no simplification is
14910 possible. */
14912 tree
14913 fold_indirect_ref_1 (tree type, tree op0)
14915 tree sub = op0;
14916 tree subtype;
14918 STRIP_NOPS (sub);
14919 subtype = TREE_TYPE (sub);
14920 if (!POINTER_TYPE_P (subtype))
14921 return NULL_TREE;
14923 if (TREE_CODE (sub) == ADDR_EXPR)
14925 tree op = TREE_OPERAND (sub, 0);
14926 tree optype = TREE_TYPE (op);
14927 /* *&CONST_DECL -> to the value of the const decl. */
14928 if (TREE_CODE (op) == CONST_DECL)
14929 return DECL_INITIAL (op);
14930 /* *&p => p; make sure to handle *&"str"[cst] here. */
14931 if (type == optype)
14933 tree fop = fold_read_from_constant_string (op);
14934 if (fop)
14935 return fop;
14936 else
14937 return op;
14939 /* *(foo *)&fooarray => fooarray[0] */
14940 else if (TREE_CODE (optype) == ARRAY_TYPE
14941 && type == TREE_TYPE (optype))
14943 tree type_domain = TYPE_DOMAIN (optype);
14944 tree min_val = size_zero_node;
14945 if (type_domain && TYPE_MIN_VALUE (type_domain))
14946 min_val = TYPE_MIN_VALUE (type_domain);
14947 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14949 /* *(foo *)&complexfoo => __real__ complexfoo */
14950 else if (TREE_CODE (optype) == COMPLEX_TYPE
14951 && type == TREE_TYPE (optype))
14952 return fold_build1 (REALPART_EXPR, type, op);
14953 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14954 else if (TREE_CODE (optype) == VECTOR_TYPE
14955 && type == TREE_TYPE (optype))
14957 tree part_width = TYPE_SIZE (type);
14958 tree index = bitsize_int (0);
14959 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14963 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14964 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14965 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14967 tree op00 = TREE_OPERAND (sub, 0);
14968 tree op01 = TREE_OPERAND (sub, 1);
14969 tree op00type;
14971 STRIP_NOPS (op00);
14972 op00type = TREE_TYPE (op00);
14973 if (TREE_CODE (op00) == ADDR_EXPR
14974 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14975 && type == TREE_TYPE (TREE_TYPE (op00type)))
14977 tree size = TYPE_SIZE_UNIT (type);
14978 if (tree_int_cst_equal (size, op01))
14979 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14983 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14984 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14985 && type == TREE_TYPE (TREE_TYPE (subtype)))
14987 tree type_domain;
14988 tree min_val = size_zero_node;
14989 sub = build_fold_indirect_ref (sub);
14990 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14991 if (type_domain && TYPE_MIN_VALUE (type_domain))
14992 min_val = TYPE_MIN_VALUE (type_domain);
14993 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14996 return NULL_TREE;
14999 /* Builds an expression for an indirection through T, simplifying some
15000 cases. */
15002 tree
15003 build_fold_indirect_ref (tree t)
15005 tree type = TREE_TYPE (TREE_TYPE (t));
15006 tree sub = fold_indirect_ref_1 (type, t);
15008 if (sub)
15009 return sub;
15010 else
15011 return build1 (INDIRECT_REF, type, t);
15014 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15016 tree
15017 fold_indirect_ref (tree t)
15019 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15021 if (sub)
15022 return sub;
15023 else
15024 return t;
15027 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15028 whose result is ignored. The type of the returned tree need not be
15029 the same as the original expression. */
15031 tree
15032 fold_ignored_result (tree t)
15034 if (!TREE_SIDE_EFFECTS (t))
15035 return integer_zero_node;
15037 for (;;)
15038 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15040 case tcc_unary:
15041 t = TREE_OPERAND (t, 0);
15042 break;
15044 case tcc_binary:
15045 case tcc_comparison:
15046 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15047 t = TREE_OPERAND (t, 0);
15048 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15049 t = TREE_OPERAND (t, 1);
15050 else
15051 return t;
15052 break;
15054 case tcc_expression:
15055 switch (TREE_CODE (t))
15057 case COMPOUND_EXPR:
15058 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15059 return t;
15060 t = TREE_OPERAND (t, 0);
15061 break;
15063 case COND_EXPR:
15064 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15065 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15066 return t;
15067 t = TREE_OPERAND (t, 0);
15068 break;
15070 default:
15071 return t;
15073 break;
15075 default:
15076 return t;
15080 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15081 This can only be applied to objects of a sizetype. */
15083 tree
15084 round_up (tree value, int divisor)
15086 tree div = NULL_TREE;
15088 gcc_assert (divisor > 0);
15089 if (divisor == 1)
15090 return value;
15092 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15093 have to do anything. Only do this when we are not given a const,
15094 because in that case, this check is more expensive than just
15095 doing it. */
15096 if (TREE_CODE (value) != INTEGER_CST)
15098 div = build_int_cst (TREE_TYPE (value), divisor);
15100 if (multiple_of_p (TREE_TYPE (value), value, div))
15101 return value;
15104 /* If divisor is a power of two, simplify this to bit manipulation. */
15105 if (divisor == (divisor & -divisor))
15107 if (TREE_CODE (value) == INTEGER_CST)
15109 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15110 unsigned HOST_WIDE_INT high;
15111 bool overflow_p;
15113 if ((low & (divisor - 1)) == 0)
15114 return value;
15116 overflow_p = TREE_OVERFLOW (value);
15117 high = TREE_INT_CST_HIGH (value);
15118 low &= ~(divisor - 1);
15119 low += divisor;
15120 if (low == 0)
15122 high++;
15123 if (high == 0)
15124 overflow_p = true;
15127 return force_fit_type_double (TREE_TYPE (value), low, high,
15128 -1, overflow_p);
15130 else
15132 tree t;
15134 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15135 value = size_binop (PLUS_EXPR, value, t);
15136 t = build_int_cst (TREE_TYPE (value), -divisor);
15137 value = size_binop (BIT_AND_EXPR, value, t);
15140 else
15142 if (!div)
15143 div = build_int_cst (TREE_TYPE (value), divisor);
15144 value = size_binop (CEIL_DIV_EXPR, value, div);
15145 value = size_binop (MULT_EXPR, value, div);
15148 return value;
15151 /* Likewise, but round down. */
15153 tree
15154 round_down (tree value, int divisor)
15156 tree div = NULL_TREE;
15158 gcc_assert (divisor > 0);
15159 if (divisor == 1)
15160 return value;
15162 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15163 have to do anything. Only do this when we are not given a const,
15164 because in that case, this check is more expensive than just
15165 doing it. */
15166 if (TREE_CODE (value) != INTEGER_CST)
15168 div = build_int_cst (TREE_TYPE (value), divisor);
15170 if (multiple_of_p (TREE_TYPE (value), value, div))
15171 return value;
15174 /* If divisor is a power of two, simplify this to bit manipulation. */
15175 if (divisor == (divisor & -divisor))
15177 tree t;
15179 t = build_int_cst (TREE_TYPE (value), -divisor);
15180 value = size_binop (BIT_AND_EXPR, value, t);
15182 else
15184 if (!div)
15185 div = build_int_cst (TREE_TYPE (value), divisor);
15186 value = size_binop (FLOOR_DIV_EXPR, value, div);
15187 value = size_binop (MULT_EXPR, value, div);
15190 return value;
15193 /* Returns the pointer to the base of the object addressed by EXP and
15194 extracts the information about the offset of the access, storing it
15195 to PBITPOS and POFFSET. */
15197 static tree
15198 split_address_to_core_and_offset (tree exp,
15199 HOST_WIDE_INT *pbitpos, tree *poffset)
15201 tree core;
15202 enum machine_mode mode;
15203 int unsignedp, volatilep;
15204 HOST_WIDE_INT bitsize;
15206 if (TREE_CODE (exp) == ADDR_EXPR)
15208 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15209 poffset, &mode, &unsignedp, &volatilep,
15210 false);
15211 core = fold_addr_expr (core);
15213 else
15215 core = exp;
15216 *pbitpos = 0;
15217 *poffset = NULL_TREE;
15220 return core;
15223 /* Returns true if addresses of E1 and E2 differ by a constant, false
15224 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15226 bool
15227 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15229 tree core1, core2;
15230 HOST_WIDE_INT bitpos1, bitpos2;
15231 tree toffset1, toffset2, tdiff, type;
15233 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15234 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15236 if (bitpos1 % BITS_PER_UNIT != 0
15237 || bitpos2 % BITS_PER_UNIT != 0
15238 || !operand_equal_p (core1, core2, 0))
15239 return false;
15241 if (toffset1 && toffset2)
15243 type = TREE_TYPE (toffset1);
15244 if (type != TREE_TYPE (toffset2))
15245 toffset2 = fold_convert (type, toffset2);
15247 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15248 if (!cst_and_fits_in_hwi (tdiff))
15249 return false;
15251 *diff = int_cst_value (tdiff);
15253 else if (toffset1 || toffset2)
15255 /* If only one of the offsets is non-constant, the difference cannot
15256 be a constant. */
15257 return false;
15259 else
15260 *diff = 0;
15262 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15263 return true;
15266 /* Simplify the floating point expression EXP when the sign of the
15267 result is not significant. Return NULL_TREE if no simplification
15268 is possible. */
15270 tree
15271 fold_strip_sign_ops (tree exp)
15273 tree arg0, arg1;
15275 switch (TREE_CODE (exp))
15277 case ABS_EXPR:
15278 case NEGATE_EXPR:
15279 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15280 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15282 case MULT_EXPR:
15283 case RDIV_EXPR:
15284 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15285 return NULL_TREE;
15286 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15287 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15288 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15289 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15290 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15291 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15292 break;
15294 case COMPOUND_EXPR:
15295 arg0 = TREE_OPERAND (exp, 0);
15296 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15297 if (arg1)
15298 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15299 break;
15301 case COND_EXPR:
15302 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15303 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15304 if (arg0 || arg1)
15305 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15306 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15307 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15308 break;
15310 case CALL_EXPR:
15312 const enum built_in_function fcode = builtin_mathfn_code (exp);
15313 switch (fcode)
15315 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15316 /* Strip copysign function call, return the 1st argument. */
15317 arg0 = CALL_EXPR_ARG (exp, 0);
15318 arg1 = CALL_EXPR_ARG (exp, 1);
15319 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15321 default:
15322 /* Strip sign ops from the argument of "odd" math functions. */
15323 if (negate_mathfn_p (fcode))
15325 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15326 if (arg0)
15327 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15329 break;
15332 break;
15334 default:
15335 break;
15337 return NULL_TREE;