Fix ChangeLog date.
[official-gcc.git] / gcc / fold-const.c
blobceaa4464f5bcc420724d439ea239895c5c9ca012
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
68 /* Non-zero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
165 #define LOWPART(x) \
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
175 static void
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
188 static void
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 HOST_WIDE_INT *hi)
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
207 unsigned int prec;
208 int sign_extended_type;
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (type);
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
228 h1 = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)l1 < 0)
248 h1 = -1;
250 else
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 h1 = -1;
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 *lv = l1;
261 *hv = h1;
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
282 tree
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
285 bool overflowed)
287 int sign_extended_type;
288 bool overflow;
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
295 overflow = fit_double_type (low, high, &low, &high, type);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
300 if (overflowed
301 || overflowable < 0
302 || (overflowable > 0 && sign_extended_type))
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
309 return t;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 bool unsigned_p)
329 unsigned HOST_WIDE_INT l;
330 HOST_WIDE_INT h;
332 l = l1 + l2;
333 h = h1 + h2 + (l < l1);
335 *lv = l;
336 *hv = h;
338 if (unsigned_p)
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 if (l1 == 0)
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
359 else
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
394 carry = 0;
395 for (j = 0; j < 4; j++)
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
405 prod[i + 4] = carry;
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 if (h2 < 0)
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
443 if (count < 0)
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
464 else
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
504 unsigned HOST_WIDE_INT signmask;
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
520 else if (count >= HOST_BITS_PER_WIDE_INT)
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 else
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
536 *hv = signmask;
537 *lv = signmask;
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 else
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
567 count %= prec;
568 if (count < 0)
569 count += prec;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
589 count %= prec;
590 if (count < 0)
591 count += prec;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
636 if (hnum < 0)
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
644 if (hden < 0)
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
687 else
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
698 den_hi_sig = i;
699 break;
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
727 num_hi_sig = 4;
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
786 decode (quo, lquo, hquo);
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 switch (code)
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
813 else
814 return overflow;
815 break;
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
826 break;
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
861 else
862 return overflow;
864 break;
866 default:
867 gcc_unreachable ();
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
881 static tree
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
899 return NULL_TREE;
901 return build_int_cst_wide (type, quol, quoh);
904 /* This is non-zero if we should defer warnings about undefined
905 overflow. This facility exists because these warnings are a
906 special case. The code to estimate loop iterations does not want
907 to issue any warnings, since it works with expressions which do not
908 occur in user code. Various bits of cleanup code call fold(), but
909 only use the result if it has certain characteristics (e.g., is a
910 constant); that code only wants to issue a warning if the result is
911 used. */
913 static int fold_deferring_overflow_warnings;
915 /* If a warning about undefined overflow is deferred, this is the
916 warning. Note that this may cause us to turn two warnings into
917 one, but that is fine since it is sufficient to only give one
918 warning per expression. */
920 static const char* fold_deferred_overflow_warning;
922 /* If a warning about undefined overflow is deferred, this is the
923 level at which the warning should be emitted. */
925 static enum warn_strict_overflow_code fold_deferred_overflow_code;
927 /* Start deferring overflow warnings. We could use a stack here to
928 permit nested calls, but at present it is not necessary. */
930 void
931 fold_defer_overflow_warnings (void)
933 ++fold_deferring_overflow_warnings;
936 /* Stop deferring overflow warnings. If there is a pending warning,
937 and ISSUE is true, then issue the warning if appropriate. STMT is
938 the statement with which the warning should be associated (used for
939 location information); STMT may be NULL. CODE is the level of the
940 warning--a warn_strict_overflow_code value. This function will use
941 the smaller of CODE and the deferred code when deciding whether to
942 issue the warning. CODE may be zero to mean to always use the
943 deferred code. */
945 void
946 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
948 const char *warnmsg;
949 location_t locus;
951 gcc_assert (fold_deferring_overflow_warnings > 0);
952 --fold_deferring_overflow_warnings;
953 if (fold_deferring_overflow_warnings > 0)
955 if (fold_deferred_overflow_warning != NULL
956 && code != 0
957 && code < (int) fold_deferred_overflow_code)
958 fold_deferred_overflow_code = code;
959 return;
962 warnmsg = fold_deferred_overflow_warning;
963 fold_deferred_overflow_warning = NULL;
965 if (!issue || warnmsg == NULL)
966 return;
968 /* Use the smallest code level when deciding to issue the
969 warning. */
970 if (code == 0 || code > (int) fold_deferred_overflow_code)
971 code = fold_deferred_overflow_code;
973 if (!issue_strict_overflow_warning (code))
974 return;
976 if (stmt == NULL_TREE || !expr_has_location (stmt))
977 locus = input_location;
978 else
979 locus = expr_location (stmt);
980 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
983 /* Stop deferring overflow warnings, ignoring any deferred
984 warnings. */
986 void
987 fold_undefer_and_ignore_overflow_warnings (void)
989 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
992 /* Whether we are deferring overflow warnings. */
994 bool
995 fold_deferring_overflow_warnings_p (void)
997 return fold_deferring_overflow_warnings > 0;
1000 /* This is called when we fold something based on the fact that signed
1001 overflow is undefined. */
1003 static void
1004 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1006 gcc_assert (!flag_wrapv && !flag_trapv);
1007 if (fold_deferring_overflow_warnings > 0)
1009 if (fold_deferred_overflow_warning == NULL
1010 || wc < fold_deferred_overflow_code)
1012 fold_deferred_overflow_warning = gmsgid;
1013 fold_deferred_overflow_code = wc;
1016 else if (issue_strict_overflow_warning (wc))
1017 warning (OPT_Wstrict_overflow, gmsgid);
1020 /* Return true if the built-in mathematical function specified by CODE
1021 is odd, i.e. -f(x) == f(-x). */
1023 static bool
1024 negate_mathfn_p (enum built_in_function code)
1026 switch (code)
1028 CASE_FLT_FN (BUILT_IN_ASIN):
1029 CASE_FLT_FN (BUILT_IN_ASINH):
1030 CASE_FLT_FN (BUILT_IN_ATAN):
1031 CASE_FLT_FN (BUILT_IN_ATANH):
1032 CASE_FLT_FN (BUILT_IN_CASIN):
1033 CASE_FLT_FN (BUILT_IN_CASINH):
1034 CASE_FLT_FN (BUILT_IN_CATAN):
1035 CASE_FLT_FN (BUILT_IN_CATANH):
1036 CASE_FLT_FN (BUILT_IN_CBRT):
1037 CASE_FLT_FN (BUILT_IN_CPROJ):
1038 CASE_FLT_FN (BUILT_IN_CSIN):
1039 CASE_FLT_FN (BUILT_IN_CSINH):
1040 CASE_FLT_FN (BUILT_IN_CTAN):
1041 CASE_FLT_FN (BUILT_IN_CTANH):
1042 CASE_FLT_FN (BUILT_IN_ERF):
1043 CASE_FLT_FN (BUILT_IN_LLROUND):
1044 CASE_FLT_FN (BUILT_IN_LROUND):
1045 CASE_FLT_FN (BUILT_IN_ROUND):
1046 CASE_FLT_FN (BUILT_IN_SIN):
1047 CASE_FLT_FN (BUILT_IN_SINH):
1048 CASE_FLT_FN (BUILT_IN_TAN):
1049 CASE_FLT_FN (BUILT_IN_TANH):
1050 CASE_FLT_FN (BUILT_IN_TRUNC):
1051 return true;
1053 CASE_FLT_FN (BUILT_IN_LLRINT):
1054 CASE_FLT_FN (BUILT_IN_LRINT):
1055 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1056 CASE_FLT_FN (BUILT_IN_RINT):
1057 return !flag_rounding_math;
1059 default:
1060 break;
1062 return false;
1065 /* Check whether we may negate an integer constant T without causing
1066 overflow. */
1068 bool
1069 may_negate_without_overflow_p (tree t)
1071 unsigned HOST_WIDE_INT val;
1072 unsigned int prec;
1073 tree type;
1075 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1077 type = TREE_TYPE (t);
1078 if (TYPE_UNSIGNED (type))
1079 return false;
1081 prec = TYPE_PRECISION (type);
1082 if (prec > HOST_BITS_PER_WIDE_INT)
1084 if (TREE_INT_CST_LOW (t) != 0)
1085 return true;
1086 prec -= HOST_BITS_PER_WIDE_INT;
1087 val = TREE_INT_CST_HIGH (t);
1089 else
1090 val = TREE_INT_CST_LOW (t);
1091 if (prec < HOST_BITS_PER_WIDE_INT)
1092 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1093 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1096 /* Determine whether an expression T can be cheaply negated using
1097 the function negate_expr without introducing undefined overflow. */
1099 static bool
1100 negate_expr_p (tree t)
1102 tree type;
1104 if (t == 0)
1105 return false;
1107 type = TREE_TYPE (t);
1109 STRIP_SIGN_NOPS (t);
1110 switch (TREE_CODE (t))
1112 case INTEGER_CST:
1113 if (TYPE_OVERFLOW_WRAPS (type))
1114 return true;
1116 /* Check that -CST will not overflow type. */
1117 return may_negate_without_overflow_p (t);
1118 case BIT_NOT_EXPR:
1119 return (INTEGRAL_TYPE_P (type)
1120 && TYPE_OVERFLOW_WRAPS (type));
1122 case REAL_CST:
1123 case NEGATE_EXPR:
1124 return true;
1126 case COMPLEX_CST:
1127 return negate_expr_p (TREE_REALPART (t))
1128 && negate_expr_p (TREE_IMAGPART (t));
1130 case COMPLEX_EXPR:
1131 return negate_expr_p (TREE_OPERAND (t, 0))
1132 && negate_expr_p (TREE_OPERAND (t, 1));
1134 case CONJ_EXPR:
1135 return negate_expr_p (TREE_OPERAND (t, 0));
1137 case PLUS_EXPR:
1138 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1139 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1140 return false;
1141 /* -(A + B) -> (-B) - A. */
1142 if (negate_expr_p (TREE_OPERAND (t, 1))
1143 && reorder_operands_p (TREE_OPERAND (t, 0),
1144 TREE_OPERAND (t, 1)))
1145 return true;
1146 /* -(A + B) -> (-A) - B. */
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1149 case MINUS_EXPR:
1150 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1151 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1152 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1));
1156 case MULT_EXPR:
1157 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1158 break;
1160 /* Fall through. */
1162 case RDIV_EXPR:
1163 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1164 return negate_expr_p (TREE_OPERAND (t, 1))
1165 || negate_expr_p (TREE_OPERAND (t, 0));
1166 break;
1168 case TRUNC_DIV_EXPR:
1169 case ROUND_DIV_EXPR:
1170 case FLOOR_DIV_EXPR:
1171 case CEIL_DIV_EXPR:
1172 case EXACT_DIV_EXPR:
1173 /* In general we can't negate A / B, because if A is INT_MIN and
1174 B is 1, we may turn this into INT_MIN / -1 which is undefined
1175 and actually traps on some architectures. But if overflow is
1176 undefined, we can negate, because - (INT_MIN / 1) is an
1177 overflow. */
1178 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1179 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1180 break;
1181 return negate_expr_p (TREE_OPERAND (t, 1))
1182 || negate_expr_p (TREE_OPERAND (t, 0));
1184 case NOP_EXPR:
1185 /* Negate -((double)float) as (double)(-float). */
1186 if (TREE_CODE (type) == REAL_TYPE)
1188 tree tem = strip_float_extensions (t);
1189 if (tem != t)
1190 return negate_expr_p (tem);
1192 break;
1194 case CALL_EXPR:
1195 /* Negate -f(x) as f(-x). */
1196 if (negate_mathfn_p (builtin_mathfn_code (t)))
1197 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1198 break;
1200 case RSHIFT_EXPR:
1201 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1202 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1204 tree op1 = TREE_OPERAND (t, 1);
1205 if (TREE_INT_CST_HIGH (op1) == 0
1206 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1207 == TREE_INT_CST_LOW (op1))
1208 return true;
1210 break;
1212 default:
1213 break;
1215 return false;
1218 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1219 simplification is possible.
1220 If negate_expr_p would return true for T, NULL_TREE will never be
1221 returned. */
1223 static tree
1224 fold_negate_expr (tree t)
1226 tree type = TREE_TYPE (t);
1227 tree tem;
1229 switch (TREE_CODE (t))
1231 /* Convert - (~A) to A + 1. */
1232 case BIT_NOT_EXPR:
1233 if (INTEGRAL_TYPE_P (type))
1234 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1235 build_int_cst (type, 1));
1236 break;
1238 case INTEGER_CST:
1239 tem = fold_negate_const (t, type);
1240 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1241 || !TYPE_OVERFLOW_TRAPS (type))
1242 return tem;
1243 break;
1245 case REAL_CST:
1246 tem = fold_negate_const (t, type);
1247 /* Two's complement FP formats, such as c4x, may overflow. */
1248 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1249 return tem;
1250 break;
1252 case COMPLEX_CST:
1254 tree rpart = negate_expr (TREE_REALPART (t));
1255 tree ipart = negate_expr (TREE_IMAGPART (t));
1257 if ((TREE_CODE (rpart) == REAL_CST
1258 && TREE_CODE (ipart) == REAL_CST)
1259 || (TREE_CODE (rpart) == INTEGER_CST
1260 && TREE_CODE (ipart) == INTEGER_CST))
1261 return build_complex (type, rpart, ipart);
1263 break;
1265 case COMPLEX_EXPR:
1266 if (negate_expr_p (t))
1267 return fold_build2 (COMPLEX_EXPR, type,
1268 fold_negate_expr (TREE_OPERAND (t, 0)),
1269 fold_negate_expr (TREE_OPERAND (t, 1)));
1270 break;
1272 case CONJ_EXPR:
1273 if (negate_expr_p (t))
1274 return fold_build1 (CONJ_EXPR, type,
1275 fold_negate_expr (TREE_OPERAND (t, 0)));
1276 break;
1278 case NEGATE_EXPR:
1279 return TREE_OPERAND (t, 0);
1281 case PLUS_EXPR:
1282 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1283 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1285 /* -(A + B) -> (-B) - A. */
1286 if (negate_expr_p (TREE_OPERAND (t, 1))
1287 && reorder_operands_p (TREE_OPERAND (t, 0),
1288 TREE_OPERAND (t, 1)))
1290 tem = negate_expr (TREE_OPERAND (t, 1));
1291 return fold_build2 (MINUS_EXPR, type,
1292 tem, TREE_OPERAND (t, 0));
1295 /* -(A + B) -> (-A) - B. */
1296 if (negate_expr_p (TREE_OPERAND (t, 0)))
1298 tem = negate_expr (TREE_OPERAND (t, 0));
1299 return fold_build2 (MINUS_EXPR, type,
1300 tem, TREE_OPERAND (t, 1));
1303 break;
1305 case MINUS_EXPR:
1306 /* - (A - B) -> B - A */
1307 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1308 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1309 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1310 return fold_build2 (MINUS_EXPR, type,
1311 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1312 break;
1314 case MULT_EXPR:
1315 if (TYPE_UNSIGNED (type))
1316 break;
1318 /* Fall through. */
1320 case RDIV_EXPR:
1321 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1323 tem = TREE_OPERAND (t, 1);
1324 if (negate_expr_p (tem))
1325 return fold_build2 (TREE_CODE (t), type,
1326 TREE_OPERAND (t, 0), negate_expr (tem));
1327 tem = TREE_OPERAND (t, 0);
1328 if (negate_expr_p (tem))
1329 return fold_build2 (TREE_CODE (t), type,
1330 negate_expr (tem), TREE_OPERAND (t, 1));
1332 break;
1334 case TRUNC_DIV_EXPR:
1335 case ROUND_DIV_EXPR:
1336 case FLOOR_DIV_EXPR:
1337 case CEIL_DIV_EXPR:
1338 case EXACT_DIV_EXPR:
1339 /* In general we can't negate A / B, because if A is INT_MIN and
1340 B is 1, we may turn this into INT_MIN / -1 which is undefined
1341 and actually traps on some architectures. But if overflow is
1342 undefined, we can negate, because - (INT_MIN / 1) is an
1343 overflow. */
1344 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1346 const char * const warnmsg = G_("assuming signed overflow does not "
1347 "occur when negating a division");
1348 tem = TREE_OPERAND (t, 1);
1349 if (negate_expr_p (tem))
1351 if (INTEGRAL_TYPE_P (type)
1352 && (TREE_CODE (tem) != INTEGER_CST
1353 || integer_onep (tem)))
1354 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1355 return fold_build2 (TREE_CODE (t), type,
1356 TREE_OPERAND (t, 0), negate_expr (tem));
1358 tem = TREE_OPERAND (t, 0);
1359 if (negate_expr_p (tem))
1361 if (INTEGRAL_TYPE_P (type)
1362 && (TREE_CODE (tem) != INTEGER_CST
1363 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1364 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1365 return fold_build2 (TREE_CODE (t), type,
1366 negate_expr (tem), TREE_OPERAND (t, 1));
1369 break;
1371 case NOP_EXPR:
1372 /* Convert -((double)float) into (double)(-float). */
1373 if (TREE_CODE (type) == REAL_TYPE)
1375 tem = strip_float_extensions (t);
1376 if (tem != t && negate_expr_p (tem))
1377 return negate_expr (tem);
1379 break;
1381 case CALL_EXPR:
1382 /* Negate -f(x) as f(-x). */
1383 if (negate_mathfn_p (builtin_mathfn_code (t))
1384 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1386 tree fndecl, arg;
1388 fndecl = get_callee_fndecl (t);
1389 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1390 return build_call_expr (fndecl, 1, arg);
1392 break;
1394 case RSHIFT_EXPR:
1395 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1396 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1398 tree op1 = TREE_OPERAND (t, 1);
1399 if (TREE_INT_CST_HIGH (op1) == 0
1400 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1401 == TREE_INT_CST_LOW (op1))
1403 tree ntype = TYPE_UNSIGNED (type)
1404 ? lang_hooks.types.signed_type (type)
1405 : lang_hooks.types.unsigned_type (type);
1406 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1407 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1408 return fold_convert (type, temp);
1411 break;
1413 default:
1414 break;
1417 return NULL_TREE;
1420 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1421 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1422 return NULL_TREE. */
1424 static tree
1425 negate_expr (tree t)
1427 tree type, tem;
1429 if (t == NULL_TREE)
1430 return NULL_TREE;
1432 type = TREE_TYPE (t);
1433 STRIP_SIGN_NOPS (t);
1435 tem = fold_negate_expr (t);
1436 if (!tem)
1437 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1438 return fold_convert (type, tem);
1441 /* Split a tree IN into a constant, literal and variable parts that could be
1442 combined with CODE to make IN. "constant" means an expression with
1443 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1444 commutative arithmetic operation. Store the constant part into *CONP,
1445 the literal in *LITP and return the variable part. If a part isn't
1446 present, set it to null. If the tree does not decompose in this way,
1447 return the entire tree as the variable part and the other parts as null.
1449 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1450 case, we negate an operand that was subtracted. Except if it is a
1451 literal for which we use *MINUS_LITP instead.
1453 If NEGATE_P is true, we are negating all of IN, again except a literal
1454 for which we use *MINUS_LITP instead.
1456 If IN is itself a literal or constant, return it as appropriate.
1458 Note that we do not guarantee that any of the three values will be the
1459 same type as IN, but they will have the same signedness and mode. */
1461 static tree
1462 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1463 tree *minus_litp, int negate_p)
1465 tree var = 0;
1467 *conp = 0;
1468 *litp = 0;
1469 *minus_litp = 0;
1471 /* Strip any conversions that don't change the machine mode or signedness. */
1472 STRIP_SIGN_NOPS (in);
1474 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1475 *litp = in;
1476 else if (TREE_CODE (in) == code
1477 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1478 /* We can associate addition and subtraction together (even
1479 though the C standard doesn't say so) for integers because
1480 the value is not affected. For reals, the value might be
1481 affected, so we can't. */
1482 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1483 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1485 tree op0 = TREE_OPERAND (in, 0);
1486 tree op1 = TREE_OPERAND (in, 1);
1487 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1488 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1490 /* First see if either of the operands is a literal, then a constant. */
1491 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1492 *litp = op0, op0 = 0;
1493 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1494 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1496 if (op0 != 0 && TREE_CONSTANT (op0))
1497 *conp = op0, op0 = 0;
1498 else if (op1 != 0 && TREE_CONSTANT (op1))
1499 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1501 /* If we haven't dealt with either operand, this is not a case we can
1502 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1503 if (op0 != 0 && op1 != 0)
1504 var = in;
1505 else if (op0 != 0)
1506 var = op0;
1507 else
1508 var = op1, neg_var_p = neg1_p;
1510 /* Now do any needed negations. */
1511 if (neg_litp_p)
1512 *minus_litp = *litp, *litp = 0;
1513 if (neg_conp_p)
1514 *conp = negate_expr (*conp);
1515 if (neg_var_p)
1516 var = negate_expr (var);
1518 else if (TREE_CONSTANT (in))
1519 *conp = in;
1520 else
1521 var = in;
1523 if (negate_p)
1525 if (*litp)
1526 *minus_litp = *litp, *litp = 0;
1527 else if (*minus_litp)
1528 *litp = *minus_litp, *minus_litp = 0;
1529 *conp = negate_expr (*conp);
1530 var = negate_expr (var);
1533 return var;
1536 /* Re-associate trees split by the above function. T1 and T2 are either
1537 expressions to associate or null. Return the new expression, if any. If
1538 we build an operation, do it in TYPE and with CODE. */
1540 static tree
1541 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1543 if (t1 == 0)
1544 return t2;
1545 else if (t2 == 0)
1546 return t1;
1548 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1549 try to fold this since we will have infinite recursion. But do
1550 deal with any NEGATE_EXPRs. */
1551 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1552 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1554 if (code == PLUS_EXPR)
1556 if (TREE_CODE (t1) == NEGATE_EXPR)
1557 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1558 fold_convert (type, TREE_OPERAND (t1, 0)));
1559 else if (TREE_CODE (t2) == NEGATE_EXPR)
1560 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1561 fold_convert (type, TREE_OPERAND (t2, 0)));
1562 else if (integer_zerop (t2))
1563 return fold_convert (type, t1);
1565 else if (code == MINUS_EXPR)
1567 if (integer_zerop (t2))
1568 return fold_convert (type, t1);
1571 return build2 (code, type, fold_convert (type, t1),
1572 fold_convert (type, t2));
1575 return fold_build2 (code, type, fold_convert (type, t1),
1576 fold_convert (type, t2));
1579 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1580 for use in int_const_binop, size_binop and size_diffop. */
1582 static bool
1583 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1585 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1586 return false;
1587 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1588 return false;
1590 switch (code)
1592 case LSHIFT_EXPR:
1593 case RSHIFT_EXPR:
1594 case LROTATE_EXPR:
1595 case RROTATE_EXPR:
1596 return true;
1598 default:
1599 break;
1602 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1603 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1604 && TYPE_MODE (type1) == TYPE_MODE (type2);
1608 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1609 to produce a new constant. Return NULL_TREE if we don't know how
1610 to evaluate CODE at compile-time.
1612 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1614 tree
1615 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1617 unsigned HOST_WIDE_INT int1l, int2l;
1618 HOST_WIDE_INT int1h, int2h;
1619 unsigned HOST_WIDE_INT low;
1620 HOST_WIDE_INT hi;
1621 unsigned HOST_WIDE_INT garbagel;
1622 HOST_WIDE_INT garbageh;
1623 tree t;
1624 tree type = TREE_TYPE (arg1);
1625 int uns = TYPE_UNSIGNED (type);
1626 int is_sizetype
1627 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1628 int overflow = 0;
1630 int1l = TREE_INT_CST_LOW (arg1);
1631 int1h = TREE_INT_CST_HIGH (arg1);
1632 int2l = TREE_INT_CST_LOW (arg2);
1633 int2h = TREE_INT_CST_HIGH (arg2);
1635 switch (code)
1637 case BIT_IOR_EXPR:
1638 low = int1l | int2l, hi = int1h | int2h;
1639 break;
1641 case BIT_XOR_EXPR:
1642 low = int1l ^ int2l, hi = int1h ^ int2h;
1643 break;
1645 case BIT_AND_EXPR:
1646 low = int1l & int2l, hi = int1h & int2h;
1647 break;
1649 case RSHIFT_EXPR:
1650 int2l = -int2l;
1651 case LSHIFT_EXPR:
1652 /* It's unclear from the C standard whether shifts can overflow.
1653 The following code ignores overflow; perhaps a C standard
1654 interpretation ruling is needed. */
1655 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1656 &low, &hi, !uns);
1657 break;
1659 case RROTATE_EXPR:
1660 int2l = - int2l;
1661 case LROTATE_EXPR:
1662 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1663 &low, &hi);
1664 break;
1666 case PLUS_EXPR:
1667 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1668 break;
1670 case MINUS_EXPR:
1671 neg_double (int2l, int2h, &low, &hi);
1672 add_double (int1l, int1h, low, hi, &low, &hi);
1673 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1674 break;
1676 case MULT_EXPR:
1677 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1678 break;
1680 case TRUNC_DIV_EXPR:
1681 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1682 case EXACT_DIV_EXPR:
1683 /* This is a shortcut for a common special case. */
1684 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1685 && !TREE_OVERFLOW (arg1)
1686 && !TREE_OVERFLOW (arg2)
1687 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1689 if (code == CEIL_DIV_EXPR)
1690 int1l += int2l - 1;
1692 low = int1l / int2l, hi = 0;
1693 break;
1696 /* ... fall through ... */
1698 case ROUND_DIV_EXPR:
1699 if (int2h == 0 && int2l == 0)
1700 return NULL_TREE;
1701 if (int2h == 0 && int2l == 1)
1703 low = int1l, hi = int1h;
1704 break;
1706 if (int1l == int2l && int1h == int2h
1707 && ! (int1l == 0 && int1h == 0))
1709 low = 1, hi = 0;
1710 break;
1712 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1713 &low, &hi, &garbagel, &garbageh);
1714 break;
1716 case TRUNC_MOD_EXPR:
1717 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1718 /* This is a shortcut for a common special case. */
1719 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1720 && !TREE_OVERFLOW (arg1)
1721 && !TREE_OVERFLOW (arg2)
1722 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1724 if (code == CEIL_MOD_EXPR)
1725 int1l += int2l - 1;
1726 low = int1l % int2l, hi = 0;
1727 break;
1730 /* ... fall through ... */
1732 case ROUND_MOD_EXPR:
1733 if (int2h == 0 && int2l == 0)
1734 return NULL_TREE;
1735 overflow = div_and_round_double (code, uns,
1736 int1l, int1h, int2l, int2h,
1737 &garbagel, &garbageh, &low, &hi);
1738 break;
1740 case MIN_EXPR:
1741 case MAX_EXPR:
1742 if (uns)
1743 low = (((unsigned HOST_WIDE_INT) int1h
1744 < (unsigned HOST_WIDE_INT) int2h)
1745 || (((unsigned HOST_WIDE_INT) int1h
1746 == (unsigned HOST_WIDE_INT) int2h)
1747 && int1l < int2l));
1748 else
1749 low = (int1h < int2h
1750 || (int1h == int2h && int1l < int2l));
1752 if (low == (code == MIN_EXPR))
1753 low = int1l, hi = int1h;
1754 else
1755 low = int2l, hi = int2h;
1756 break;
1758 default:
1759 return NULL_TREE;
1762 if (notrunc)
1764 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1766 /* Propagate overflow flags ourselves. */
1767 if (((!uns || is_sizetype) && overflow)
1768 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1770 t = copy_node (t);
1771 TREE_OVERFLOW (t) = 1;
1774 else
1775 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1776 ((!uns || is_sizetype) && overflow)
1777 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1779 return t;
1782 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1783 constant. We assume ARG1 and ARG2 have the same data type, or at least
1784 are the same kind of constant and the same machine mode. Return zero if
1785 combining the constants is not allowed in the current operating mode.
1787 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1789 static tree
1790 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1792 /* Sanity check for the recursive cases. */
1793 if (!arg1 || !arg2)
1794 return NULL_TREE;
1796 STRIP_NOPS (arg1);
1797 STRIP_NOPS (arg2);
1799 if (TREE_CODE (arg1) == INTEGER_CST)
1800 return int_const_binop (code, arg1, arg2, notrunc);
1802 if (TREE_CODE (arg1) == REAL_CST)
1804 enum machine_mode mode;
1805 REAL_VALUE_TYPE d1;
1806 REAL_VALUE_TYPE d2;
1807 REAL_VALUE_TYPE value;
1808 REAL_VALUE_TYPE result;
1809 bool inexact;
1810 tree t, type;
1812 /* The following codes are handled by real_arithmetic. */
1813 switch (code)
1815 case PLUS_EXPR:
1816 case MINUS_EXPR:
1817 case MULT_EXPR:
1818 case RDIV_EXPR:
1819 case MIN_EXPR:
1820 case MAX_EXPR:
1821 break;
1823 default:
1824 return NULL_TREE;
1827 d1 = TREE_REAL_CST (arg1);
1828 d2 = TREE_REAL_CST (arg2);
1830 type = TREE_TYPE (arg1);
1831 mode = TYPE_MODE (type);
1833 /* Don't perform operation if we honor signaling NaNs and
1834 either operand is a NaN. */
1835 if (HONOR_SNANS (mode)
1836 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1837 return NULL_TREE;
1839 /* Don't perform operation if it would raise a division
1840 by zero exception. */
1841 if (code == RDIV_EXPR
1842 && REAL_VALUES_EQUAL (d2, dconst0)
1843 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1844 return NULL_TREE;
1846 /* If either operand is a NaN, just return it. Otherwise, set up
1847 for floating-point trap; we return an overflow. */
1848 if (REAL_VALUE_ISNAN (d1))
1849 return arg1;
1850 else if (REAL_VALUE_ISNAN (d2))
1851 return arg2;
1853 inexact = real_arithmetic (&value, code, &d1, &d2);
1854 real_convert (&result, mode, &value);
1856 /* Don't constant fold this floating point operation if
1857 the result has overflowed and flag_trapping_math. */
1858 if (flag_trapping_math
1859 && MODE_HAS_INFINITIES (mode)
1860 && REAL_VALUE_ISINF (result)
1861 && !REAL_VALUE_ISINF (d1)
1862 && !REAL_VALUE_ISINF (d2))
1863 return NULL_TREE;
1865 /* Don't constant fold this floating point operation if the
1866 result may dependent upon the run-time rounding mode and
1867 flag_rounding_math is set, or if GCC's software emulation
1868 is unable to accurately represent the result. */
1869 if ((flag_rounding_math
1870 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1871 && !flag_unsafe_math_optimizations))
1872 && (inexact || !real_identical (&result, &value)))
1873 return NULL_TREE;
1875 t = build_real (type, result);
1877 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1878 return t;
1881 if (TREE_CODE (arg1) == COMPLEX_CST)
1883 tree type = TREE_TYPE (arg1);
1884 tree r1 = TREE_REALPART (arg1);
1885 tree i1 = TREE_IMAGPART (arg1);
1886 tree r2 = TREE_REALPART (arg2);
1887 tree i2 = TREE_IMAGPART (arg2);
1888 tree real, imag;
1890 switch (code)
1892 case PLUS_EXPR:
1893 case MINUS_EXPR:
1894 real = const_binop (code, r1, r2, notrunc);
1895 imag = const_binop (code, i1, i2, notrunc);
1896 break;
1898 case MULT_EXPR:
1899 real = const_binop (MINUS_EXPR,
1900 const_binop (MULT_EXPR, r1, r2, notrunc),
1901 const_binop (MULT_EXPR, i1, i2, notrunc),
1902 notrunc);
1903 imag = const_binop (PLUS_EXPR,
1904 const_binop (MULT_EXPR, r1, i2, notrunc),
1905 const_binop (MULT_EXPR, i1, r2, notrunc),
1906 notrunc);
1907 break;
1909 case RDIV_EXPR:
1911 tree magsquared
1912 = const_binop (PLUS_EXPR,
1913 const_binop (MULT_EXPR, r2, r2, notrunc),
1914 const_binop (MULT_EXPR, i2, i2, notrunc),
1915 notrunc);
1916 tree t1
1917 = const_binop (PLUS_EXPR,
1918 const_binop (MULT_EXPR, r1, r2, notrunc),
1919 const_binop (MULT_EXPR, i1, i2, notrunc),
1920 notrunc);
1921 tree t2
1922 = const_binop (MINUS_EXPR,
1923 const_binop (MULT_EXPR, i1, r2, notrunc),
1924 const_binop (MULT_EXPR, r1, i2, notrunc),
1925 notrunc);
1927 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1928 code = TRUNC_DIV_EXPR;
1930 real = const_binop (code, t1, magsquared, notrunc);
1931 imag = const_binop (code, t2, magsquared, notrunc);
1933 break;
1935 default:
1936 return NULL_TREE;
1939 if (real && imag)
1940 return build_complex (type, real, imag);
1943 return NULL_TREE;
1946 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1947 indicates which particular sizetype to create. */
1949 tree
1950 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1952 return build_int_cst (sizetype_tab[(int) kind], number);
1955 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1956 is a tree code. The type of the result is taken from the operands.
1957 Both must be equivalent integer types, ala int_binop_types_match_p.
1958 If the operands are constant, so is the result. */
1960 tree
1961 size_binop (enum tree_code code, tree arg0, tree arg1)
1963 tree type = TREE_TYPE (arg0);
1965 if (arg0 == error_mark_node || arg1 == error_mark_node)
1966 return error_mark_node;
1968 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1969 TREE_TYPE (arg1)));
1971 /* Handle the special case of two integer constants faster. */
1972 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1974 /* And some specific cases even faster than that. */
1975 if (code == PLUS_EXPR)
1977 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1978 return arg1;
1979 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1980 return arg0;
1982 else if (code == MINUS_EXPR)
1984 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1985 return arg0;
1987 else if (code == MULT_EXPR)
1989 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1990 return arg1;
1993 /* Handle general case of two integer constants. */
1994 return int_const_binop (code, arg0, arg1, 0);
1997 return fold_build2 (code, type, arg0, arg1);
2000 /* Given two values, either both of sizetype or both of bitsizetype,
2001 compute the difference between the two values. Return the value
2002 in signed type corresponding to the type of the operands. */
2004 tree
2005 size_diffop (tree arg0, tree arg1)
2007 tree type = TREE_TYPE (arg0);
2008 tree ctype;
2010 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2011 TREE_TYPE (arg1)));
2013 /* If the type is already signed, just do the simple thing. */
2014 if (!TYPE_UNSIGNED (type))
2015 return size_binop (MINUS_EXPR, arg0, arg1);
2017 if (type == sizetype)
2018 ctype = ssizetype;
2019 else if (type == bitsizetype)
2020 ctype = sbitsizetype;
2021 else
2022 ctype = lang_hooks.types.signed_type (type);
2024 /* If either operand is not a constant, do the conversions to the signed
2025 type and subtract. The hardware will do the right thing with any
2026 overflow in the subtraction. */
2027 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2028 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2029 fold_convert (ctype, arg1));
2031 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2032 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2033 overflow) and negate (which can't either). Special-case a result
2034 of zero while we're here. */
2035 if (tree_int_cst_equal (arg0, arg1))
2036 return build_int_cst (ctype, 0);
2037 else if (tree_int_cst_lt (arg1, arg0))
2038 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2039 else
2040 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2041 fold_convert (ctype, size_binop (MINUS_EXPR,
2042 arg1, arg0)));
2045 /* A subroutine of fold_convert_const handling conversions of an
2046 INTEGER_CST to another integer type. */
2048 static tree
2049 fold_convert_const_int_from_int (tree type, tree arg1)
2051 tree t;
2053 /* Given an integer constant, make new constant with new type,
2054 appropriately sign-extended or truncated. */
2055 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2056 TREE_INT_CST_HIGH (arg1),
2057 /* Don't set the overflow when
2058 converting a pointer */
2059 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2060 (TREE_INT_CST_HIGH (arg1) < 0
2061 && (TYPE_UNSIGNED (type)
2062 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2063 | TREE_OVERFLOW (arg1));
2065 return t;
2068 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2069 to an integer type. */
2071 static tree
2072 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2074 int overflow = 0;
2075 tree t;
2077 /* The following code implements the floating point to integer
2078 conversion rules required by the Java Language Specification,
2079 that IEEE NaNs are mapped to zero and values that overflow
2080 the target precision saturate, i.e. values greater than
2081 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2082 are mapped to INT_MIN. These semantics are allowed by the
2083 C and C++ standards that simply state that the behavior of
2084 FP-to-integer conversion is unspecified upon overflow. */
2086 HOST_WIDE_INT high, low;
2087 REAL_VALUE_TYPE r;
2088 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2090 switch (code)
2092 case FIX_TRUNC_EXPR:
2093 real_trunc (&r, VOIDmode, &x);
2094 break;
2096 default:
2097 gcc_unreachable ();
2100 /* If R is NaN, return zero and show we have an overflow. */
2101 if (REAL_VALUE_ISNAN (r))
2103 overflow = 1;
2104 high = 0;
2105 low = 0;
2108 /* See if R is less than the lower bound or greater than the
2109 upper bound. */
2111 if (! overflow)
2113 tree lt = TYPE_MIN_VALUE (type);
2114 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2115 if (REAL_VALUES_LESS (r, l))
2117 overflow = 1;
2118 high = TREE_INT_CST_HIGH (lt);
2119 low = TREE_INT_CST_LOW (lt);
2123 if (! overflow)
2125 tree ut = TYPE_MAX_VALUE (type);
2126 if (ut)
2128 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2129 if (REAL_VALUES_LESS (u, r))
2131 overflow = 1;
2132 high = TREE_INT_CST_HIGH (ut);
2133 low = TREE_INT_CST_LOW (ut);
2138 if (! overflow)
2139 REAL_VALUE_TO_INT (&low, &high, r);
2141 t = force_fit_type_double (type, low, high, -1,
2142 overflow | TREE_OVERFLOW (arg1));
2143 return t;
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to another floating point type. */
2149 static tree
2150 fold_convert_const_real_from_real (tree type, tree arg1)
2152 REAL_VALUE_TYPE value;
2153 tree t;
2155 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2156 t = build_real (type, value);
2158 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2159 return t;
2162 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2163 type TYPE. If no simplification can be done return NULL_TREE. */
2165 static tree
2166 fold_convert_const (enum tree_code code, tree type, tree arg1)
2168 if (TREE_TYPE (arg1) == type)
2169 return arg1;
2171 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2173 if (TREE_CODE (arg1) == INTEGER_CST)
2174 return fold_convert_const_int_from_int (type, arg1);
2175 else if (TREE_CODE (arg1) == REAL_CST)
2176 return fold_convert_const_int_from_real (code, type, arg1);
2178 else if (TREE_CODE (type) == REAL_TYPE)
2180 if (TREE_CODE (arg1) == INTEGER_CST)
2181 return build_real_from_int_cst (type, arg1);
2182 if (TREE_CODE (arg1) == REAL_CST)
2183 return fold_convert_const_real_from_real (type, arg1);
2185 return NULL_TREE;
2188 /* Construct a vector of zero elements of vector type TYPE. */
2190 static tree
2191 build_zero_vector (tree type)
2193 tree elem, list;
2194 int i, units;
2196 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2197 units = TYPE_VECTOR_SUBPARTS (type);
2199 list = NULL_TREE;
2200 for (i = 0; i < units; i++)
2201 list = tree_cons (NULL_TREE, elem, list);
2202 return build_vector (type, list);
2205 /* Convert expression ARG to type TYPE. Used by the middle-end for
2206 simple conversions in preference to calling the front-end's convert. */
2208 tree
2209 fold_convert (tree type, tree arg)
2211 tree orig = TREE_TYPE (arg);
2212 tree tem;
2214 if (type == orig)
2215 return arg;
2217 if (TREE_CODE (arg) == ERROR_MARK
2218 || TREE_CODE (type) == ERROR_MARK
2219 || TREE_CODE (orig) == ERROR_MARK)
2220 return error_mark_node;
2222 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2223 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2224 TYPE_MAIN_VARIANT (orig)))
2225 return fold_build1 (NOP_EXPR, type, arg);
2227 switch (TREE_CODE (type))
2229 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2230 case POINTER_TYPE: case REFERENCE_TYPE:
2231 case OFFSET_TYPE:
2232 if (TREE_CODE (arg) == INTEGER_CST)
2234 tem = fold_convert_const (NOP_EXPR, type, arg);
2235 if (tem != NULL_TREE)
2236 return tem;
2238 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2239 || TREE_CODE (orig) == OFFSET_TYPE)
2240 return fold_build1 (NOP_EXPR, type, arg);
2241 if (TREE_CODE (orig) == COMPLEX_TYPE)
2243 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2244 return fold_convert (type, tem);
2246 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2247 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2248 return fold_build1 (NOP_EXPR, type, arg);
2250 case REAL_TYPE:
2251 if (TREE_CODE (arg) == INTEGER_CST)
2253 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2257 else if (TREE_CODE (arg) == REAL_CST)
2259 tem = fold_convert_const (NOP_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2261 return tem;
2264 switch (TREE_CODE (orig))
2266 case INTEGER_TYPE:
2267 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2268 case POINTER_TYPE: case REFERENCE_TYPE:
2269 return fold_build1 (FLOAT_EXPR, type, arg);
2271 case REAL_TYPE:
2272 return fold_build1 (NOP_EXPR, type, arg);
2274 case COMPLEX_TYPE:
2275 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2276 return fold_convert (type, tem);
2278 default:
2279 gcc_unreachable ();
2282 case COMPLEX_TYPE:
2283 switch (TREE_CODE (orig))
2285 case INTEGER_TYPE:
2286 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2287 case POINTER_TYPE: case REFERENCE_TYPE:
2288 case REAL_TYPE:
2289 return build2 (COMPLEX_EXPR, type,
2290 fold_convert (TREE_TYPE (type), arg),
2291 fold_convert (TREE_TYPE (type), integer_zero_node));
2292 case COMPLEX_TYPE:
2294 tree rpart, ipart;
2296 if (TREE_CODE (arg) == COMPLEX_EXPR)
2298 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2299 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2300 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2303 arg = save_expr (arg);
2304 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2305 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2306 rpart = fold_convert (TREE_TYPE (type), rpart);
2307 ipart = fold_convert (TREE_TYPE (type), ipart);
2308 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2311 default:
2312 gcc_unreachable ();
2315 case VECTOR_TYPE:
2316 if (integer_zerop (arg))
2317 return build_zero_vector (type);
2318 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2319 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2320 || TREE_CODE (orig) == VECTOR_TYPE);
2321 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2323 case VOID_TYPE:
2324 tem = fold_ignored_result (arg);
2325 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2326 return tem;
2327 return fold_build1 (NOP_EXPR, type, tem);
2329 default:
2330 gcc_unreachable ();
2334 /* Return false if expr can be assumed not to be an lvalue, true
2335 otherwise. */
2337 static bool
2338 maybe_lvalue_p (tree x)
2340 /* We only need to wrap lvalue tree codes. */
2341 switch (TREE_CODE (x))
2343 case VAR_DECL:
2344 case PARM_DECL:
2345 case RESULT_DECL:
2346 case LABEL_DECL:
2347 case FUNCTION_DECL:
2348 case SSA_NAME:
2350 case COMPONENT_REF:
2351 case INDIRECT_REF:
2352 case ALIGN_INDIRECT_REF:
2353 case MISALIGNED_INDIRECT_REF:
2354 case ARRAY_REF:
2355 case ARRAY_RANGE_REF:
2356 case BIT_FIELD_REF:
2357 case OBJ_TYPE_REF:
2359 case REALPART_EXPR:
2360 case IMAGPART_EXPR:
2361 case PREINCREMENT_EXPR:
2362 case PREDECREMENT_EXPR:
2363 case SAVE_EXPR:
2364 case TRY_CATCH_EXPR:
2365 case WITH_CLEANUP_EXPR:
2366 case COMPOUND_EXPR:
2367 case MODIFY_EXPR:
2368 case GIMPLE_MODIFY_STMT:
2369 case TARGET_EXPR:
2370 case COND_EXPR:
2371 case BIND_EXPR:
2372 case MIN_EXPR:
2373 case MAX_EXPR:
2374 break;
2376 default:
2377 /* Assume the worst for front-end tree codes. */
2378 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2379 break;
2380 return false;
2383 return true;
2386 /* Return an expr equal to X but certainly not valid as an lvalue. */
2388 tree
2389 non_lvalue (tree x)
2391 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2392 us. */
2393 if (in_gimple_form)
2394 return x;
2396 if (! maybe_lvalue_p (x))
2397 return x;
2398 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2401 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2402 Zero means allow extended lvalues. */
2404 int pedantic_lvalues;
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2409 static tree
2410 pedantic_non_lvalue (tree x)
2412 if (pedantic_lvalues)
2413 return non_lvalue (x);
2414 else
2415 return x;
2418 /* Given a tree comparison code, return the code that is the logical inverse
2419 of the given code. It is not safe to do this for floating-point
2420 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2421 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2423 enum tree_code
2424 invert_tree_comparison (enum tree_code code, bool honor_nans)
2426 if (honor_nans && flag_trapping_math)
2427 return ERROR_MARK;
2429 switch (code)
2431 case EQ_EXPR:
2432 return NE_EXPR;
2433 case NE_EXPR:
2434 return EQ_EXPR;
2435 case GT_EXPR:
2436 return honor_nans ? UNLE_EXPR : LE_EXPR;
2437 case GE_EXPR:
2438 return honor_nans ? UNLT_EXPR : LT_EXPR;
2439 case LT_EXPR:
2440 return honor_nans ? UNGE_EXPR : GE_EXPR;
2441 case LE_EXPR:
2442 return honor_nans ? UNGT_EXPR : GT_EXPR;
2443 case LTGT_EXPR:
2444 return UNEQ_EXPR;
2445 case UNEQ_EXPR:
2446 return LTGT_EXPR;
2447 case UNGT_EXPR:
2448 return LE_EXPR;
2449 case UNGE_EXPR:
2450 return LT_EXPR;
2451 case UNLT_EXPR:
2452 return GE_EXPR;
2453 case UNLE_EXPR:
2454 return GT_EXPR;
2455 case ORDERED_EXPR:
2456 return UNORDERED_EXPR;
2457 case UNORDERED_EXPR:
2458 return ORDERED_EXPR;
2459 default:
2460 gcc_unreachable ();
2464 /* Similar, but return the comparison that results if the operands are
2465 swapped. This is safe for floating-point. */
2467 enum tree_code
2468 swap_tree_comparison (enum tree_code code)
2470 switch (code)
2472 case EQ_EXPR:
2473 case NE_EXPR:
2474 case ORDERED_EXPR:
2475 case UNORDERED_EXPR:
2476 case LTGT_EXPR:
2477 case UNEQ_EXPR:
2478 return code;
2479 case GT_EXPR:
2480 return LT_EXPR;
2481 case GE_EXPR:
2482 return LE_EXPR;
2483 case LT_EXPR:
2484 return GT_EXPR;
2485 case LE_EXPR:
2486 return GE_EXPR;
2487 case UNGT_EXPR:
2488 return UNLT_EXPR;
2489 case UNGE_EXPR:
2490 return UNLE_EXPR;
2491 case UNLT_EXPR:
2492 return UNGT_EXPR;
2493 case UNLE_EXPR:
2494 return UNGE_EXPR;
2495 default:
2496 gcc_unreachable ();
2501 /* Convert a comparison tree code from an enum tree_code representation
2502 into a compcode bit-based encoding. This function is the inverse of
2503 compcode_to_comparison. */
2505 static enum comparison_code
2506 comparison_to_compcode (enum tree_code code)
2508 switch (code)
2510 case LT_EXPR:
2511 return COMPCODE_LT;
2512 case EQ_EXPR:
2513 return COMPCODE_EQ;
2514 case LE_EXPR:
2515 return COMPCODE_LE;
2516 case GT_EXPR:
2517 return COMPCODE_GT;
2518 case NE_EXPR:
2519 return COMPCODE_NE;
2520 case GE_EXPR:
2521 return COMPCODE_GE;
2522 case ORDERED_EXPR:
2523 return COMPCODE_ORD;
2524 case UNORDERED_EXPR:
2525 return COMPCODE_UNORD;
2526 case UNLT_EXPR:
2527 return COMPCODE_UNLT;
2528 case UNEQ_EXPR:
2529 return COMPCODE_UNEQ;
2530 case UNLE_EXPR:
2531 return COMPCODE_UNLE;
2532 case UNGT_EXPR:
2533 return COMPCODE_UNGT;
2534 case LTGT_EXPR:
2535 return COMPCODE_LTGT;
2536 case UNGE_EXPR:
2537 return COMPCODE_UNGE;
2538 default:
2539 gcc_unreachable ();
2543 /* Convert a compcode bit-based encoding of a comparison operator back
2544 to GCC's enum tree_code representation. This function is the
2545 inverse of comparison_to_compcode. */
2547 static enum tree_code
2548 compcode_to_comparison (enum comparison_code code)
2550 switch (code)
2552 case COMPCODE_LT:
2553 return LT_EXPR;
2554 case COMPCODE_EQ:
2555 return EQ_EXPR;
2556 case COMPCODE_LE:
2557 return LE_EXPR;
2558 case COMPCODE_GT:
2559 return GT_EXPR;
2560 case COMPCODE_NE:
2561 return NE_EXPR;
2562 case COMPCODE_GE:
2563 return GE_EXPR;
2564 case COMPCODE_ORD:
2565 return ORDERED_EXPR;
2566 case COMPCODE_UNORD:
2567 return UNORDERED_EXPR;
2568 case COMPCODE_UNLT:
2569 return UNLT_EXPR;
2570 case COMPCODE_UNEQ:
2571 return UNEQ_EXPR;
2572 case COMPCODE_UNLE:
2573 return UNLE_EXPR;
2574 case COMPCODE_UNGT:
2575 return UNGT_EXPR;
2576 case COMPCODE_LTGT:
2577 return LTGT_EXPR;
2578 case COMPCODE_UNGE:
2579 return UNGE_EXPR;
2580 default:
2581 gcc_unreachable ();
2585 /* Return a tree for the comparison which is the combination of
2586 doing the AND or OR (depending on CODE) of the two operations LCODE
2587 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2588 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2589 if this makes the transformation invalid. */
2591 tree
2592 combine_comparisons (enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2596 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 enum comparison_code compcode;
2601 switch (code)
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2605 break;
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2609 break;
2611 default:
2612 return NULL_TREE;
2615 if (!honor_nans)
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2625 else if (flag_trapping_math)
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2647 rtrap = false;
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2651 if (rtrap && !ltrap
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2653 return NULL_TREE;
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2657 return NULL_TREE;
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2664 else
2665 return fold_build2 (compcode_to_comparison (compcode),
2666 truth_type, ll_arg, lr_arg);
2669 /* Return nonzero if CODE is a tree code that represents a truth value. */
2671 static int
2672 truth_value_p (enum tree_code code)
2674 return (TREE_CODE_CLASS (code) == tcc_comparison
2675 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2676 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2677 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2680 /* Return nonzero if two operands (typically of the same tree node)
2681 are necessarily equal. If either argument has side-effects this
2682 function returns zero. FLAGS modifies behavior as follows:
2684 If OEP_ONLY_CONST is set, only return nonzero for constants.
2685 This function tests whether the operands are indistinguishable;
2686 it does not test whether they are equal using C's == operation.
2687 The distinction is important for IEEE floating point, because
2688 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2689 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2691 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2692 even though it may hold multiple values during a function.
2693 This is because a GCC tree node guarantees that nothing else is
2694 executed between the evaluation of its "operands" (which may often
2695 be evaluated in arbitrary order). Hence if the operands themselves
2696 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2697 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2698 unset means assuming isochronic (or instantaneous) tree equivalence.
2699 Unless comparing arbitrary expression trees, such as from different
2700 statements, this flag can usually be left unset.
2702 If OEP_PURE_SAME is set, then pure functions with identical arguments
2703 are considered the same. It is used when the caller has other ways
2704 to ensure that global memory is unchanged in between. */
2707 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2709 /* If either is ERROR_MARK, they aren't equal. */
2710 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2711 return 0;
2713 /* If both types don't have the same signedness, then we can't consider
2714 them equal. We must check this before the STRIP_NOPS calls
2715 because they may change the signedness of the arguments. */
2716 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2717 return 0;
2719 /* If both types don't have the same precision, then it is not safe
2720 to strip NOPs. */
2721 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2722 return 0;
2724 STRIP_NOPS (arg0);
2725 STRIP_NOPS (arg1);
2727 /* In case both args are comparisons but with different comparison
2728 code, try to swap the comparison operands of one arg to produce
2729 a match and compare that variant. */
2730 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2731 && COMPARISON_CLASS_P (arg0)
2732 && COMPARISON_CLASS_P (arg1))
2734 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2736 if (TREE_CODE (arg0) == swap_code)
2737 return operand_equal_p (TREE_OPERAND (arg0, 0),
2738 TREE_OPERAND (arg1, 1), flags)
2739 && operand_equal_p (TREE_OPERAND (arg0, 1),
2740 TREE_OPERAND (arg1, 0), flags);
2743 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2744 /* This is needed for conversions and for COMPONENT_REF.
2745 Might as well play it safe and always test this. */
2746 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2747 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2748 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2749 return 0;
2751 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2752 We don't care about side effects in that case because the SAVE_EXPR
2753 takes care of that for us. In all other cases, two expressions are
2754 equal if they have no side effects. If we have two identical
2755 expressions with side effects that should be treated the same due
2756 to the only side effects being identical SAVE_EXPR's, that will
2757 be detected in the recursive calls below. */
2758 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2759 && (TREE_CODE (arg0) == SAVE_EXPR
2760 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2761 return 1;
2763 /* Next handle constant cases, those for which we can return 1 even
2764 if ONLY_CONST is set. */
2765 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2766 switch (TREE_CODE (arg0))
2768 case INTEGER_CST:
2769 return tree_int_cst_equal (arg0, arg1);
2771 case REAL_CST:
2772 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2773 TREE_REAL_CST (arg1)))
2774 return 1;
2777 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2779 /* If we do not distinguish between signed and unsigned zero,
2780 consider them equal. */
2781 if (real_zerop (arg0) && real_zerop (arg1))
2782 return 1;
2784 return 0;
2786 case VECTOR_CST:
2788 tree v1, v2;
2790 v1 = TREE_VECTOR_CST_ELTS (arg0);
2791 v2 = TREE_VECTOR_CST_ELTS (arg1);
2792 while (v1 && v2)
2794 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2795 flags))
2796 return 0;
2797 v1 = TREE_CHAIN (v1);
2798 v2 = TREE_CHAIN (v2);
2801 return v1 == v2;
2804 case COMPLEX_CST:
2805 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2806 flags)
2807 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2808 flags));
2810 case STRING_CST:
2811 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2812 && ! memcmp (TREE_STRING_POINTER (arg0),
2813 TREE_STRING_POINTER (arg1),
2814 TREE_STRING_LENGTH (arg0)));
2816 case ADDR_EXPR:
2817 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2819 default:
2820 break;
2823 if (flags & OEP_ONLY_CONST)
2824 return 0;
2826 /* Define macros to test an operand from arg0 and arg1 for equality and a
2827 variant that allows null and views null as being different from any
2828 non-null value. In the latter case, if either is null, the both
2829 must be; otherwise, do the normal comparison. */
2830 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2831 TREE_OPERAND (arg1, N), flags)
2833 #define OP_SAME_WITH_NULL(N) \
2834 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2835 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2837 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2839 case tcc_unary:
2840 /* Two conversions are equal only if signedness and modes match. */
2841 switch (TREE_CODE (arg0))
2843 case NOP_EXPR:
2844 case CONVERT_EXPR:
2845 case FIX_TRUNC_EXPR:
2846 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2847 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2848 return 0;
2849 break;
2850 default:
2851 break;
2854 return OP_SAME (0);
2857 case tcc_comparison:
2858 case tcc_binary:
2859 if (OP_SAME (0) && OP_SAME (1))
2860 return 1;
2862 /* For commutative ops, allow the other order. */
2863 return (commutative_tree_code (TREE_CODE (arg0))
2864 && operand_equal_p (TREE_OPERAND (arg0, 0),
2865 TREE_OPERAND (arg1, 1), flags)
2866 && operand_equal_p (TREE_OPERAND (arg0, 1),
2867 TREE_OPERAND (arg1, 0), flags));
2869 case tcc_reference:
2870 /* If either of the pointer (or reference) expressions we are
2871 dereferencing contain a side effect, these cannot be equal. */
2872 if (TREE_SIDE_EFFECTS (arg0)
2873 || TREE_SIDE_EFFECTS (arg1))
2874 return 0;
2876 switch (TREE_CODE (arg0))
2878 case INDIRECT_REF:
2879 case ALIGN_INDIRECT_REF:
2880 case MISALIGNED_INDIRECT_REF:
2881 case REALPART_EXPR:
2882 case IMAGPART_EXPR:
2883 return OP_SAME (0);
2885 case ARRAY_REF:
2886 case ARRAY_RANGE_REF:
2887 /* Operands 2 and 3 may be null. */
2888 return (OP_SAME (0)
2889 && OP_SAME (1)
2890 && OP_SAME_WITH_NULL (2)
2891 && OP_SAME_WITH_NULL (3));
2893 case COMPONENT_REF:
2894 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2895 may be NULL when we're called to compare MEM_EXPRs. */
2896 return OP_SAME_WITH_NULL (0)
2897 && OP_SAME (1)
2898 && OP_SAME_WITH_NULL (2);
2900 case BIT_FIELD_REF:
2901 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2903 default:
2904 return 0;
2907 case tcc_expression:
2908 switch (TREE_CODE (arg0))
2910 case ADDR_EXPR:
2911 case TRUTH_NOT_EXPR:
2912 return OP_SAME (0);
2914 case TRUTH_ANDIF_EXPR:
2915 case TRUTH_ORIF_EXPR:
2916 return OP_SAME (0) && OP_SAME (1);
2918 case TRUTH_AND_EXPR:
2919 case TRUTH_OR_EXPR:
2920 case TRUTH_XOR_EXPR:
2921 if (OP_SAME (0) && OP_SAME (1))
2922 return 1;
2924 /* Otherwise take into account this is a commutative operation. */
2925 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2926 TREE_OPERAND (arg1, 1), flags)
2927 && operand_equal_p (TREE_OPERAND (arg0, 1),
2928 TREE_OPERAND (arg1, 0), flags));
2930 default:
2931 return 0;
2934 case tcc_vl_exp:
2935 switch (TREE_CODE (arg0))
2937 case CALL_EXPR:
2938 /* If the CALL_EXPRs call different functions, then they
2939 clearly can not be equal. */
2940 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2941 flags))
2942 return 0;
2945 unsigned int cef = call_expr_flags (arg0);
2946 if (flags & OEP_PURE_SAME)
2947 cef &= ECF_CONST | ECF_PURE;
2948 else
2949 cef &= ECF_CONST;
2950 if (!cef)
2951 return 0;
2954 /* Now see if all the arguments are the same. */
2956 call_expr_arg_iterator iter0, iter1;
2957 tree a0, a1;
2958 for (a0 = first_call_expr_arg (arg0, &iter0),
2959 a1 = first_call_expr_arg (arg1, &iter1);
2960 a0 && a1;
2961 a0 = next_call_expr_arg (&iter0),
2962 a1 = next_call_expr_arg (&iter1))
2963 if (! operand_equal_p (a0, a1, flags))
2964 return 0;
2966 /* If we get here and both argument lists are exhausted
2967 then the CALL_EXPRs are equal. */
2968 return ! (a0 || a1);
2970 default:
2971 return 0;
2974 case tcc_declaration:
2975 /* Consider __builtin_sqrt equal to sqrt. */
2976 return (TREE_CODE (arg0) == FUNCTION_DECL
2977 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2978 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2979 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2981 default:
2982 return 0;
2985 #undef OP_SAME
2986 #undef OP_SAME_WITH_NULL
2989 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2990 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2992 When in doubt, return 0. */
2994 static int
2995 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2997 int unsignedp1, unsignedpo;
2998 tree primarg0, primarg1, primother;
2999 unsigned int correct_width;
3001 if (operand_equal_p (arg0, arg1, 0))
3002 return 1;
3004 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3005 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3006 return 0;
3008 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3009 and see if the inner values are the same. This removes any
3010 signedness comparison, which doesn't matter here. */
3011 primarg0 = arg0, primarg1 = arg1;
3012 STRIP_NOPS (primarg0);
3013 STRIP_NOPS (primarg1);
3014 if (operand_equal_p (primarg0, primarg1, 0))
3015 return 1;
3017 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3018 actual comparison operand, ARG0.
3020 First throw away any conversions to wider types
3021 already present in the operands. */
3023 primarg1 = get_narrower (arg1, &unsignedp1);
3024 primother = get_narrower (other, &unsignedpo);
3026 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3027 if (unsignedp1 == unsignedpo
3028 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3029 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3031 tree type = TREE_TYPE (arg0);
3033 /* Make sure shorter operand is extended the right way
3034 to match the longer operand. */
3035 primarg1 = fold_convert (get_signed_or_unsigned_type
3036 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3038 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3039 return 1;
3042 return 0;
3045 /* See if ARG is an expression that is either a comparison or is performing
3046 arithmetic on comparisons. The comparisons must only be comparing
3047 two different values, which will be stored in *CVAL1 and *CVAL2; if
3048 they are nonzero it means that some operands have already been found.
3049 No variables may be used anywhere else in the expression except in the
3050 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3051 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3053 If this is true, return 1. Otherwise, return zero. */
3055 static int
3056 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3058 enum tree_code code = TREE_CODE (arg);
3059 enum tree_code_class class = TREE_CODE_CLASS (code);
3061 /* We can handle some of the tcc_expression cases here. */
3062 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3063 class = tcc_unary;
3064 else if (class == tcc_expression
3065 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3066 || code == COMPOUND_EXPR))
3067 class = tcc_binary;
3069 else if (class == tcc_expression && code == SAVE_EXPR
3070 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3072 /* If we've already found a CVAL1 or CVAL2, this expression is
3073 two complex to handle. */
3074 if (*cval1 || *cval2)
3075 return 0;
3077 class = tcc_unary;
3078 *save_p = 1;
3081 switch (class)
3083 case tcc_unary:
3084 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3086 case tcc_binary:
3087 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3088 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3089 cval1, cval2, save_p));
3091 case tcc_constant:
3092 return 1;
3094 case tcc_expression:
3095 if (code == COND_EXPR)
3096 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3097 cval1, cval2, save_p)
3098 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3099 cval1, cval2, save_p)
3100 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3101 cval1, cval2, save_p));
3102 return 0;
3104 case tcc_comparison:
3105 /* First see if we can handle the first operand, then the second. For
3106 the second operand, we know *CVAL1 can't be zero. It must be that
3107 one side of the comparison is each of the values; test for the
3108 case where this isn't true by failing if the two operands
3109 are the same. */
3111 if (operand_equal_p (TREE_OPERAND (arg, 0),
3112 TREE_OPERAND (arg, 1), 0))
3113 return 0;
3115 if (*cval1 == 0)
3116 *cval1 = TREE_OPERAND (arg, 0);
3117 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3119 else if (*cval2 == 0)
3120 *cval2 = TREE_OPERAND (arg, 0);
3121 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3123 else
3124 return 0;
3126 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3128 else if (*cval2 == 0)
3129 *cval2 = TREE_OPERAND (arg, 1);
3130 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3132 else
3133 return 0;
3135 return 1;
3137 default:
3138 return 0;
3142 /* ARG is a tree that is known to contain just arithmetic operations and
3143 comparisons. Evaluate the operations in the tree substituting NEW0 for
3144 any occurrence of OLD0 as an operand of a comparison and likewise for
3145 NEW1 and OLD1. */
3147 static tree
3148 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3150 tree type = TREE_TYPE (arg);
3151 enum tree_code code = TREE_CODE (arg);
3152 enum tree_code_class class = TREE_CODE_CLASS (code);
3154 /* We can handle some of the tcc_expression cases here. */
3155 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3156 class = tcc_unary;
3157 else if (class == tcc_expression
3158 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3159 class = tcc_binary;
3161 switch (class)
3163 case tcc_unary:
3164 return fold_build1 (code, type,
3165 eval_subst (TREE_OPERAND (arg, 0),
3166 old0, new0, old1, new1));
3168 case tcc_binary:
3169 return fold_build2 (code, type,
3170 eval_subst (TREE_OPERAND (arg, 0),
3171 old0, new0, old1, new1),
3172 eval_subst (TREE_OPERAND (arg, 1),
3173 old0, new0, old1, new1));
3175 case tcc_expression:
3176 switch (code)
3178 case SAVE_EXPR:
3179 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3181 case COMPOUND_EXPR:
3182 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3184 case COND_EXPR:
3185 return fold_build3 (code, type,
3186 eval_subst (TREE_OPERAND (arg, 0),
3187 old0, new0, old1, new1),
3188 eval_subst (TREE_OPERAND (arg, 1),
3189 old0, new0, old1, new1),
3190 eval_subst (TREE_OPERAND (arg, 2),
3191 old0, new0, old1, new1));
3192 default:
3193 break;
3195 /* Fall through - ??? */
3197 case tcc_comparison:
3199 tree arg0 = TREE_OPERAND (arg, 0);
3200 tree arg1 = TREE_OPERAND (arg, 1);
3202 /* We need to check both for exact equality and tree equality. The
3203 former will be true if the operand has a side-effect. In that
3204 case, we know the operand occurred exactly once. */
3206 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3207 arg0 = new0;
3208 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3209 arg0 = new1;
3211 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3212 arg1 = new0;
3213 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3214 arg1 = new1;
3216 return fold_build2 (code, type, arg0, arg1);
3219 default:
3220 return arg;
3224 /* Return a tree for the case when the result of an expression is RESULT
3225 converted to TYPE and OMITTED was previously an operand of the expression
3226 but is now not needed (e.g., we folded OMITTED * 0).
3228 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3229 the conversion of RESULT to TYPE. */
3231 tree
3232 omit_one_operand (tree type, tree result, tree omitted)
3234 tree t = fold_convert (type, result);
3236 if (TREE_SIDE_EFFECTS (omitted))
3237 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3239 return non_lvalue (t);
3242 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3244 static tree
3245 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3247 tree t = fold_convert (type, result);
3249 if (TREE_SIDE_EFFECTS (omitted))
3250 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3252 return pedantic_non_lvalue (t);
3255 /* Return a tree for the case when the result of an expression is RESULT
3256 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3257 of the expression but are now not needed.
3259 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3260 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3261 evaluated before OMITTED2. Otherwise, if neither has side effects,
3262 just do the conversion of RESULT to TYPE. */
3264 tree
3265 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3267 tree t = fold_convert (type, result);
3269 if (TREE_SIDE_EFFECTS (omitted2))
3270 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3271 if (TREE_SIDE_EFFECTS (omitted1))
3272 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3274 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3278 /* Return a simplified tree node for the truth-negation of ARG. This
3279 never alters ARG itself. We assume that ARG is an operation that
3280 returns a truth value (0 or 1).
3282 FIXME: one would think we would fold the result, but it causes
3283 problems with the dominator optimizer. */
3285 tree
3286 fold_truth_not_expr (tree arg)
3288 tree type = TREE_TYPE (arg);
3289 enum tree_code code = TREE_CODE (arg);
3291 /* If this is a comparison, we can simply invert it, except for
3292 floating-point non-equality comparisons, in which case we just
3293 enclose a TRUTH_NOT_EXPR around what we have. */
3295 if (TREE_CODE_CLASS (code) == tcc_comparison)
3297 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3298 if (FLOAT_TYPE_P (op_type)
3299 && flag_trapping_math
3300 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3301 && code != NE_EXPR && code != EQ_EXPR)
3302 return NULL_TREE;
3303 else
3305 code = invert_tree_comparison (code,
3306 HONOR_NANS (TYPE_MODE (op_type)));
3307 if (code == ERROR_MARK)
3308 return NULL_TREE;
3309 else
3310 return build2 (code, type,
3311 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3315 switch (code)
3317 case INTEGER_CST:
3318 return constant_boolean_node (integer_zerop (arg), type);
3320 case TRUTH_AND_EXPR:
3321 return build2 (TRUTH_OR_EXPR, type,
3322 invert_truthvalue (TREE_OPERAND (arg, 0)),
3323 invert_truthvalue (TREE_OPERAND (arg, 1)));
3325 case TRUTH_OR_EXPR:
3326 return build2 (TRUTH_AND_EXPR, type,
3327 invert_truthvalue (TREE_OPERAND (arg, 0)),
3328 invert_truthvalue (TREE_OPERAND (arg, 1)));
3330 case TRUTH_XOR_EXPR:
3331 /* Here we can invert either operand. We invert the first operand
3332 unless the second operand is a TRUTH_NOT_EXPR in which case our
3333 result is the XOR of the first operand with the inside of the
3334 negation of the second operand. */
3336 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3337 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3338 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3339 else
3340 return build2 (TRUTH_XOR_EXPR, type,
3341 invert_truthvalue (TREE_OPERAND (arg, 0)),
3342 TREE_OPERAND (arg, 1));
3344 case TRUTH_ANDIF_EXPR:
3345 return build2 (TRUTH_ORIF_EXPR, type,
3346 invert_truthvalue (TREE_OPERAND (arg, 0)),
3347 invert_truthvalue (TREE_OPERAND (arg, 1)));
3349 case TRUTH_ORIF_EXPR:
3350 return build2 (TRUTH_ANDIF_EXPR, type,
3351 invert_truthvalue (TREE_OPERAND (arg, 0)),
3352 invert_truthvalue (TREE_OPERAND (arg, 1)));
3354 case TRUTH_NOT_EXPR:
3355 return TREE_OPERAND (arg, 0);
3357 case COND_EXPR:
3359 tree arg1 = TREE_OPERAND (arg, 1);
3360 tree arg2 = TREE_OPERAND (arg, 2);
3361 /* A COND_EXPR may have a throw as one operand, which
3362 then has void type. Just leave void operands
3363 as they are. */
3364 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3365 VOID_TYPE_P (TREE_TYPE (arg1))
3366 ? arg1 : invert_truthvalue (arg1),
3367 VOID_TYPE_P (TREE_TYPE (arg2))
3368 ? arg2 : invert_truthvalue (arg2));
3371 case COMPOUND_EXPR:
3372 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3373 invert_truthvalue (TREE_OPERAND (arg, 1)));
3375 case NON_LVALUE_EXPR:
3376 return invert_truthvalue (TREE_OPERAND (arg, 0));
3378 case NOP_EXPR:
3379 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3380 return build1 (TRUTH_NOT_EXPR, type, arg);
3382 case CONVERT_EXPR:
3383 case FLOAT_EXPR:
3384 return build1 (TREE_CODE (arg), type,
3385 invert_truthvalue (TREE_OPERAND (arg, 0)));
3387 case BIT_AND_EXPR:
3388 if (!integer_onep (TREE_OPERAND (arg, 1)))
3389 break;
3390 return build2 (EQ_EXPR, type, arg,
3391 build_int_cst (type, 0));
3393 case SAVE_EXPR:
3394 return build1 (TRUTH_NOT_EXPR, type, arg);
3396 case CLEANUP_POINT_EXPR:
3397 return build1 (CLEANUP_POINT_EXPR, type,
3398 invert_truthvalue (TREE_OPERAND (arg, 0)));
3400 default:
3401 break;
3404 return NULL_TREE;
3407 /* Return a simplified tree node for the truth-negation of ARG. This
3408 never alters ARG itself. We assume that ARG is an operation that
3409 returns a truth value (0 or 1).
3411 FIXME: one would think we would fold the result, but it causes
3412 problems with the dominator optimizer. */
3414 tree
3415 invert_truthvalue (tree arg)
3417 tree tem;
3419 if (TREE_CODE (arg) == ERROR_MARK)
3420 return arg;
3422 tem = fold_truth_not_expr (arg);
3423 if (!tem)
3424 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3426 return tem;
3429 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3430 operands are another bit-wise operation with a common input. If so,
3431 distribute the bit operations to save an operation and possibly two if
3432 constants are involved. For example, convert
3433 (A | B) & (A | C) into A | (B & C)
3434 Further simplification will occur if B and C are constants.
3436 If this optimization cannot be done, 0 will be returned. */
3438 static tree
3439 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3441 tree common;
3442 tree left, right;
3444 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3445 || TREE_CODE (arg0) == code
3446 || (TREE_CODE (arg0) != BIT_AND_EXPR
3447 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3448 return 0;
3450 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3452 common = TREE_OPERAND (arg0, 0);
3453 left = TREE_OPERAND (arg0, 1);
3454 right = TREE_OPERAND (arg1, 1);
3456 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3458 common = TREE_OPERAND (arg0, 0);
3459 left = TREE_OPERAND (arg0, 1);
3460 right = TREE_OPERAND (arg1, 0);
3462 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3464 common = TREE_OPERAND (arg0, 1);
3465 left = TREE_OPERAND (arg0, 0);
3466 right = TREE_OPERAND (arg1, 1);
3468 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3470 common = TREE_OPERAND (arg0, 1);
3471 left = TREE_OPERAND (arg0, 0);
3472 right = TREE_OPERAND (arg1, 0);
3474 else
3475 return 0;
3477 return fold_build2 (TREE_CODE (arg0), type, common,
3478 fold_build2 (code, type, left, right));
3481 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3482 with code CODE. This optimization is unsafe. */
3483 static tree
3484 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3486 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3487 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3489 /* (A / C) +- (B / C) -> (A +- B) / C. */
3490 if (mul0 == mul1
3491 && operand_equal_p (TREE_OPERAND (arg0, 1),
3492 TREE_OPERAND (arg1, 1), 0))
3493 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3494 fold_build2 (code, type,
3495 TREE_OPERAND (arg0, 0),
3496 TREE_OPERAND (arg1, 0)),
3497 TREE_OPERAND (arg0, 1));
3499 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3500 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3501 TREE_OPERAND (arg1, 0), 0)
3502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3503 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3505 REAL_VALUE_TYPE r0, r1;
3506 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3507 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3508 if (!mul0)
3509 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3510 if (!mul1)
3511 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3512 real_arithmetic (&r0, code, &r0, &r1);
3513 return fold_build2 (MULT_EXPR, type,
3514 TREE_OPERAND (arg0, 0),
3515 build_real (type, r0));
3518 return NULL_TREE;
3521 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3522 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3524 static tree
3525 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3526 int unsignedp)
3528 tree result;
3530 if (bitpos == 0)
3532 tree size = TYPE_SIZE (TREE_TYPE (inner));
3533 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3534 || POINTER_TYPE_P (TREE_TYPE (inner)))
3535 && host_integerp (size, 0)
3536 && tree_low_cst (size, 0) == bitsize)
3537 return fold_convert (type, inner);
3540 result = build3 (BIT_FIELD_REF, type, inner,
3541 size_int (bitsize), bitsize_int (bitpos));
3543 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3545 return result;
3548 /* Optimize a bit-field compare.
3550 There are two cases: First is a compare against a constant and the
3551 second is a comparison of two items where the fields are at the same
3552 bit position relative to the start of a chunk (byte, halfword, word)
3553 large enough to contain it. In these cases we can avoid the shift
3554 implicit in bitfield extractions.
3556 For constants, we emit a compare of the shifted constant with the
3557 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3558 compared. For two fields at the same position, we do the ANDs with the
3559 similar mask and compare the result of the ANDs.
3561 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3562 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3563 are the left and right operands of the comparison, respectively.
3565 If the optimization described above can be done, we return the resulting
3566 tree. Otherwise we return zero. */
3568 static tree
3569 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3570 tree lhs, tree rhs)
3572 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3573 tree type = TREE_TYPE (lhs);
3574 tree signed_type, unsigned_type;
3575 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3576 enum machine_mode lmode, rmode, nmode;
3577 int lunsignedp, runsignedp;
3578 int lvolatilep = 0, rvolatilep = 0;
3579 tree linner, rinner = NULL_TREE;
3580 tree mask;
3581 tree offset;
3583 /* Get all the information about the extractions being done. If the bit size
3584 if the same as the size of the underlying object, we aren't doing an
3585 extraction at all and so can do nothing. We also don't want to
3586 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3587 then will no longer be able to replace it. */
3588 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3589 &lunsignedp, &lvolatilep, false);
3590 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3591 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3592 return 0;
3594 if (!const_p)
3596 /* If this is not a constant, we can only do something if bit positions,
3597 sizes, and signedness are the same. */
3598 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3599 &runsignedp, &rvolatilep, false);
3601 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3602 || lunsignedp != runsignedp || offset != 0
3603 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3604 return 0;
3607 /* See if we can find a mode to refer to this field. We should be able to,
3608 but fail if we can't. */
3609 nmode = get_best_mode (lbitsize, lbitpos,
3610 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3611 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3612 TYPE_ALIGN (TREE_TYPE (rinner))),
3613 word_mode, lvolatilep || rvolatilep);
3614 if (nmode == VOIDmode)
3615 return 0;
3617 /* Set signed and unsigned types of the precision of this mode for the
3618 shifts below. */
3619 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3620 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3622 /* Compute the bit position and size for the new reference and our offset
3623 within it. If the new reference is the same size as the original, we
3624 won't optimize anything, so return zero. */
3625 nbitsize = GET_MODE_BITSIZE (nmode);
3626 nbitpos = lbitpos & ~ (nbitsize - 1);
3627 lbitpos -= nbitpos;
3628 if (nbitsize == lbitsize)
3629 return 0;
3631 if (BYTES_BIG_ENDIAN)
3632 lbitpos = nbitsize - lbitsize - lbitpos;
3634 /* Make the mask to be used against the extracted field. */
3635 mask = build_int_cst_type (unsigned_type, -1);
3636 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3637 mask = const_binop (RSHIFT_EXPR, mask,
3638 size_int (nbitsize - lbitsize - lbitpos), 0);
3640 if (! const_p)
3641 /* If not comparing with constant, just rework the comparison
3642 and return. */
3643 return fold_build2 (code, compare_type,
3644 fold_build2 (BIT_AND_EXPR, unsigned_type,
3645 make_bit_field_ref (linner,
3646 unsigned_type,
3647 nbitsize, nbitpos,
3649 mask),
3650 fold_build2 (BIT_AND_EXPR, unsigned_type,
3651 make_bit_field_ref (rinner,
3652 unsigned_type,
3653 nbitsize, nbitpos,
3655 mask));
3657 /* Otherwise, we are handling the constant case. See if the constant is too
3658 big for the field. Warn and return a tree of for 0 (false) if so. We do
3659 this not only for its own sake, but to avoid having to test for this
3660 error case below. If we didn't, we might generate wrong code.
3662 For unsigned fields, the constant shifted right by the field length should
3663 be all zero. For signed fields, the high-order bits should agree with
3664 the sign bit. */
3666 if (lunsignedp)
3668 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3669 fold_convert (unsigned_type, rhs),
3670 size_int (lbitsize), 0)))
3672 warning (0, "comparison is always %d due to width of bit-field",
3673 code == NE_EXPR);
3674 return constant_boolean_node (code == NE_EXPR, compare_type);
3677 else
3679 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3680 size_int (lbitsize - 1), 0);
3681 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3683 warning (0, "comparison is always %d due to width of bit-field",
3684 code == NE_EXPR);
3685 return constant_boolean_node (code == NE_EXPR, compare_type);
3689 /* Single-bit compares should always be against zero. */
3690 if (lbitsize == 1 && ! integer_zerop (rhs))
3692 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3693 rhs = build_int_cst (type, 0);
3696 /* Make a new bitfield reference, shift the constant over the
3697 appropriate number of bits and mask it with the computed mask
3698 (in case this was a signed field). If we changed it, make a new one. */
3699 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3700 if (lvolatilep)
3702 TREE_SIDE_EFFECTS (lhs) = 1;
3703 TREE_THIS_VOLATILE (lhs) = 1;
3706 rhs = const_binop (BIT_AND_EXPR,
3707 const_binop (LSHIFT_EXPR,
3708 fold_convert (unsigned_type, rhs),
3709 size_int (lbitpos), 0),
3710 mask, 0);
3712 return build2 (code, compare_type,
3713 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3714 rhs);
3717 /* Subroutine for fold_truthop: decode a field reference.
3719 If EXP is a comparison reference, we return the innermost reference.
3721 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3722 set to the starting bit number.
3724 If the innermost field can be completely contained in a mode-sized
3725 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3727 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3728 otherwise it is not changed.
3730 *PUNSIGNEDP is set to the signedness of the field.
3732 *PMASK is set to the mask used. This is either contained in a
3733 BIT_AND_EXPR or derived from the width of the field.
3735 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3737 Return 0 if this is not a component reference or is one that we can't
3738 do anything with. */
3740 static tree
3741 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3742 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3743 int *punsignedp, int *pvolatilep,
3744 tree *pmask, tree *pand_mask)
3746 tree outer_type = 0;
3747 tree and_mask = 0;
3748 tree mask, inner, offset;
3749 tree unsigned_type;
3750 unsigned int precision;
3752 /* All the optimizations using this function assume integer fields.
3753 There are problems with FP fields since the type_for_size call
3754 below can fail for, e.g., XFmode. */
3755 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3756 return 0;
3758 /* We are interested in the bare arrangement of bits, so strip everything
3759 that doesn't affect the machine mode. However, record the type of the
3760 outermost expression if it may matter below. */
3761 if (TREE_CODE (exp) == NOP_EXPR
3762 || TREE_CODE (exp) == CONVERT_EXPR
3763 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3764 outer_type = TREE_TYPE (exp);
3765 STRIP_NOPS (exp);
3767 if (TREE_CODE (exp) == BIT_AND_EXPR)
3769 and_mask = TREE_OPERAND (exp, 1);
3770 exp = TREE_OPERAND (exp, 0);
3771 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3772 if (TREE_CODE (and_mask) != INTEGER_CST)
3773 return 0;
3776 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3777 punsignedp, pvolatilep, false);
3778 if ((inner == exp && and_mask == 0)
3779 || *pbitsize < 0 || offset != 0
3780 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3781 return 0;
3783 /* If the number of bits in the reference is the same as the bitsize of
3784 the outer type, then the outer type gives the signedness. Otherwise
3785 (in case of a small bitfield) the signedness is unchanged. */
3786 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3787 *punsignedp = TYPE_UNSIGNED (outer_type);
3789 /* Compute the mask to access the bitfield. */
3790 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3791 precision = TYPE_PRECISION (unsigned_type);
3793 mask = build_int_cst_type (unsigned_type, -1);
3795 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3796 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3798 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3799 if (and_mask != 0)
3800 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3801 fold_convert (unsigned_type, and_mask), mask);
3803 *pmask = mask;
3804 *pand_mask = and_mask;
3805 return inner;
3808 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3809 bit positions. */
3811 static int
3812 all_ones_mask_p (tree mask, int size)
3814 tree type = TREE_TYPE (mask);
3815 unsigned int precision = TYPE_PRECISION (type);
3816 tree tmask;
3818 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3820 return
3821 tree_int_cst_equal (mask,
3822 const_binop (RSHIFT_EXPR,
3823 const_binop (LSHIFT_EXPR, tmask,
3824 size_int (precision - size),
3826 size_int (precision - size), 0));
3829 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3830 represents the sign bit of EXP's type. If EXP represents a sign
3831 or zero extension, also test VAL against the unextended type.
3832 The return value is the (sub)expression whose sign bit is VAL,
3833 or NULL_TREE otherwise. */
3835 static tree
3836 sign_bit_p (tree exp, tree val)
3838 unsigned HOST_WIDE_INT mask_lo, lo;
3839 HOST_WIDE_INT mask_hi, hi;
3840 int width;
3841 tree t;
3843 /* Tree EXP must have an integral type. */
3844 t = TREE_TYPE (exp);
3845 if (! INTEGRAL_TYPE_P (t))
3846 return NULL_TREE;
3848 /* Tree VAL must be an integer constant. */
3849 if (TREE_CODE (val) != INTEGER_CST
3850 || TREE_OVERFLOW (val))
3851 return NULL_TREE;
3853 width = TYPE_PRECISION (t);
3854 if (width > HOST_BITS_PER_WIDE_INT)
3856 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3857 lo = 0;
3859 mask_hi = ((unsigned HOST_WIDE_INT) -1
3860 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3861 mask_lo = -1;
3863 else
3865 hi = 0;
3866 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3868 mask_hi = 0;
3869 mask_lo = ((unsigned HOST_WIDE_INT) -1
3870 >> (HOST_BITS_PER_WIDE_INT - width));
3873 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3874 treat VAL as if it were unsigned. */
3875 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3876 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3877 return exp;
3879 /* Handle extension from a narrower type. */
3880 if (TREE_CODE (exp) == NOP_EXPR
3881 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3882 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3884 return NULL_TREE;
3887 /* Subroutine for fold_truthop: determine if an operand is simple enough
3888 to be evaluated unconditionally. */
3890 static int
3891 simple_operand_p (tree exp)
3893 /* Strip any conversions that don't change the machine mode. */
3894 STRIP_NOPS (exp);
3896 return (CONSTANT_CLASS_P (exp)
3897 || TREE_CODE (exp) == SSA_NAME
3898 || (DECL_P (exp)
3899 && ! TREE_ADDRESSABLE (exp)
3900 && ! TREE_THIS_VOLATILE (exp)
3901 && ! DECL_NONLOCAL (exp)
3902 /* Don't regard global variables as simple. They may be
3903 allocated in ways unknown to the compiler (shared memory,
3904 #pragma weak, etc). */
3905 && ! TREE_PUBLIC (exp)
3906 && ! DECL_EXTERNAL (exp)
3907 /* Loading a static variable is unduly expensive, but global
3908 registers aren't expensive. */
3909 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3912 /* The following functions are subroutines to fold_range_test and allow it to
3913 try to change a logical combination of comparisons into a range test.
3915 For example, both
3916 X == 2 || X == 3 || X == 4 || X == 5
3918 X >= 2 && X <= 5
3919 are converted to
3920 (unsigned) (X - 2) <= 3
3922 We describe each set of comparisons as being either inside or outside
3923 a range, using a variable named like IN_P, and then describe the
3924 range with a lower and upper bound. If one of the bounds is omitted,
3925 it represents either the highest or lowest value of the type.
3927 In the comments below, we represent a range by two numbers in brackets
3928 preceded by a "+" to designate being inside that range, or a "-" to
3929 designate being outside that range, so the condition can be inverted by
3930 flipping the prefix. An omitted bound is represented by a "-". For
3931 example, "- [-, 10]" means being outside the range starting at the lowest
3932 possible value and ending at 10, in other words, being greater than 10.
3933 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3934 always false.
3936 We set up things so that the missing bounds are handled in a consistent
3937 manner so neither a missing bound nor "true" and "false" need to be
3938 handled using a special case. */
3940 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3941 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3942 and UPPER1_P are nonzero if the respective argument is an upper bound
3943 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3944 must be specified for a comparison. ARG1 will be converted to ARG0's
3945 type if both are specified. */
3947 static tree
3948 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3949 tree arg1, int upper1_p)
3951 tree tem;
3952 int result;
3953 int sgn0, sgn1;
3955 /* If neither arg represents infinity, do the normal operation.
3956 Else, if not a comparison, return infinity. Else handle the special
3957 comparison rules. Note that most of the cases below won't occur, but
3958 are handled for consistency. */
3960 if (arg0 != 0 && arg1 != 0)
3962 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3963 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3964 STRIP_NOPS (tem);
3965 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3968 if (TREE_CODE_CLASS (code) != tcc_comparison)
3969 return 0;
3971 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3972 for neither. In real maths, we cannot assume open ended ranges are
3973 the same. But, this is computer arithmetic, where numbers are finite.
3974 We can therefore make the transformation of any unbounded range with
3975 the value Z, Z being greater than any representable number. This permits
3976 us to treat unbounded ranges as equal. */
3977 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3978 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3979 switch (code)
3981 case EQ_EXPR:
3982 result = sgn0 == sgn1;
3983 break;
3984 case NE_EXPR:
3985 result = sgn0 != sgn1;
3986 break;
3987 case LT_EXPR:
3988 result = sgn0 < sgn1;
3989 break;
3990 case LE_EXPR:
3991 result = sgn0 <= sgn1;
3992 break;
3993 case GT_EXPR:
3994 result = sgn0 > sgn1;
3995 break;
3996 case GE_EXPR:
3997 result = sgn0 >= sgn1;
3998 break;
3999 default:
4000 gcc_unreachable ();
4003 return constant_boolean_node (result, type);
4006 /* Given EXP, a logical expression, set the range it is testing into
4007 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4008 actually being tested. *PLOW and *PHIGH will be made of the same
4009 type as the returned expression. If EXP is not a comparison, we
4010 will most likely not be returning a useful value and range. Set
4011 *STRICT_OVERFLOW_P to true if the return value is only valid
4012 because signed overflow is undefined; otherwise, do not change
4013 *STRICT_OVERFLOW_P. */
4015 static tree
4016 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4017 bool *strict_overflow_p)
4019 enum tree_code code;
4020 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4021 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4022 int in_p, n_in_p;
4023 tree low, high, n_low, n_high;
4025 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4026 and see if we can refine the range. Some of the cases below may not
4027 happen, but it doesn't seem worth worrying about this. We "continue"
4028 the outer loop when we've changed something; otherwise we "break"
4029 the switch, which will "break" the while. */
4031 in_p = 0;
4032 low = high = build_int_cst (TREE_TYPE (exp), 0);
4034 while (1)
4036 code = TREE_CODE (exp);
4037 exp_type = TREE_TYPE (exp);
4039 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4041 if (TREE_OPERAND_LENGTH (exp) > 0)
4042 arg0 = TREE_OPERAND (exp, 0);
4043 if (TREE_CODE_CLASS (code) == tcc_comparison
4044 || TREE_CODE_CLASS (code) == tcc_unary
4045 || TREE_CODE_CLASS (code) == tcc_binary)
4046 arg0_type = TREE_TYPE (arg0);
4047 if (TREE_CODE_CLASS (code) == tcc_binary
4048 || TREE_CODE_CLASS (code) == tcc_comparison
4049 || (TREE_CODE_CLASS (code) == tcc_expression
4050 && TREE_OPERAND_LENGTH (exp) > 1))
4051 arg1 = TREE_OPERAND (exp, 1);
4054 switch (code)
4056 case TRUTH_NOT_EXPR:
4057 in_p = ! in_p, exp = arg0;
4058 continue;
4060 case EQ_EXPR: case NE_EXPR:
4061 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4062 /* We can only do something if the range is testing for zero
4063 and if the second operand is an integer constant. Note that
4064 saying something is "in" the range we make is done by
4065 complementing IN_P since it will set in the initial case of
4066 being not equal to zero; "out" is leaving it alone. */
4067 if (low == 0 || high == 0
4068 || ! integer_zerop (low) || ! integer_zerop (high)
4069 || TREE_CODE (arg1) != INTEGER_CST)
4070 break;
4072 switch (code)
4074 case NE_EXPR: /* - [c, c] */
4075 low = high = arg1;
4076 break;
4077 case EQ_EXPR: /* + [c, c] */
4078 in_p = ! in_p, low = high = arg1;
4079 break;
4080 case GT_EXPR: /* - [-, c] */
4081 low = 0, high = arg1;
4082 break;
4083 case GE_EXPR: /* + [c, -] */
4084 in_p = ! in_p, low = arg1, high = 0;
4085 break;
4086 case LT_EXPR: /* - [c, -] */
4087 low = arg1, high = 0;
4088 break;
4089 case LE_EXPR: /* + [-, c] */
4090 in_p = ! in_p, low = 0, high = arg1;
4091 break;
4092 default:
4093 gcc_unreachable ();
4096 /* If this is an unsigned comparison, we also know that EXP is
4097 greater than or equal to zero. We base the range tests we make
4098 on that fact, so we record it here so we can parse existing
4099 range tests. We test arg0_type since often the return type
4100 of, e.g. EQ_EXPR, is boolean. */
4101 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4103 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4104 in_p, low, high, 1,
4105 build_int_cst (arg0_type, 0),
4106 NULL_TREE))
4107 break;
4109 in_p = n_in_p, low = n_low, high = n_high;
4111 /* If the high bound is missing, but we have a nonzero low
4112 bound, reverse the range so it goes from zero to the low bound
4113 minus 1. */
4114 if (high == 0 && low && ! integer_zerop (low))
4116 in_p = ! in_p;
4117 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4118 integer_one_node, 0);
4119 low = build_int_cst (arg0_type, 0);
4123 exp = arg0;
4124 continue;
4126 case NEGATE_EXPR:
4127 /* (-x) IN [a,b] -> x in [-b, -a] */
4128 n_low = range_binop (MINUS_EXPR, exp_type,
4129 build_int_cst (exp_type, 0),
4130 0, high, 1);
4131 n_high = range_binop (MINUS_EXPR, exp_type,
4132 build_int_cst (exp_type, 0),
4133 0, low, 0);
4134 low = n_low, high = n_high;
4135 exp = arg0;
4136 continue;
4138 case BIT_NOT_EXPR:
4139 /* ~ X -> -X - 1 */
4140 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4141 build_int_cst (exp_type, 1));
4142 continue;
4144 case PLUS_EXPR: case MINUS_EXPR:
4145 if (TREE_CODE (arg1) != INTEGER_CST)
4146 break;
4148 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4149 move a constant to the other side. */
4150 if (!TYPE_UNSIGNED (arg0_type)
4151 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4152 break;
4154 /* If EXP is signed, any overflow in the computation is undefined,
4155 so we don't worry about it so long as our computations on
4156 the bounds don't overflow. For unsigned, overflow is defined
4157 and this is exactly the right thing. */
4158 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4159 arg0_type, low, 0, arg1, 0);
4160 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4161 arg0_type, high, 1, arg1, 0);
4162 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4163 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4164 break;
4166 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4167 *strict_overflow_p = true;
4169 /* Check for an unsigned range which has wrapped around the maximum
4170 value thus making n_high < n_low, and normalize it. */
4171 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4173 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4174 integer_one_node, 0);
4175 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4176 integer_one_node, 0);
4178 /* If the range is of the form +/- [ x+1, x ], we won't
4179 be able to normalize it. But then, it represents the
4180 whole range or the empty set, so make it
4181 +/- [ -, - ]. */
4182 if (tree_int_cst_equal (n_low, low)
4183 && tree_int_cst_equal (n_high, high))
4184 low = high = 0;
4185 else
4186 in_p = ! in_p;
4188 else
4189 low = n_low, high = n_high;
4191 exp = arg0;
4192 continue;
4194 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4195 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4196 break;
4198 if (! INTEGRAL_TYPE_P (arg0_type)
4199 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4200 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4201 break;
4203 n_low = low, n_high = high;
4205 if (n_low != 0)
4206 n_low = fold_convert (arg0_type, n_low);
4208 if (n_high != 0)
4209 n_high = fold_convert (arg0_type, n_high);
4212 /* If we're converting arg0 from an unsigned type, to exp,
4213 a signed type, we will be doing the comparison as unsigned.
4214 The tests above have already verified that LOW and HIGH
4215 are both positive.
4217 So we have to ensure that we will handle large unsigned
4218 values the same way that the current signed bounds treat
4219 negative values. */
4221 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4223 tree high_positive;
4224 tree equiv_type = lang_hooks.types.type_for_mode
4225 (TYPE_MODE (arg0_type), 1);
4227 /* A range without an upper bound is, naturally, unbounded.
4228 Since convert would have cropped a very large value, use
4229 the max value for the destination type. */
4230 high_positive
4231 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4232 : TYPE_MAX_VALUE (arg0_type);
4234 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4235 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4236 fold_convert (arg0_type,
4237 high_positive),
4238 build_int_cst (arg0_type, 1));
4240 /* If the low bound is specified, "and" the range with the
4241 range for which the original unsigned value will be
4242 positive. */
4243 if (low != 0)
4245 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4246 1, n_low, n_high, 1,
4247 fold_convert (arg0_type,
4248 integer_zero_node),
4249 high_positive))
4250 break;
4252 in_p = (n_in_p == in_p);
4254 else
4256 /* Otherwise, "or" the range with the range of the input
4257 that will be interpreted as negative. */
4258 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4259 0, n_low, n_high, 1,
4260 fold_convert (arg0_type,
4261 integer_zero_node),
4262 high_positive))
4263 break;
4265 in_p = (in_p != n_in_p);
4269 exp = arg0;
4270 low = n_low, high = n_high;
4271 continue;
4273 default:
4274 break;
4277 break;
4280 /* If EXP is a constant, we can evaluate whether this is true or false. */
4281 if (TREE_CODE (exp) == INTEGER_CST)
4283 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4284 exp, 0, low, 0))
4285 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4286 exp, 1, high, 1)));
4287 low = high = 0;
4288 exp = 0;
4291 *pin_p = in_p, *plow = low, *phigh = high;
4292 return exp;
4295 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4296 type, TYPE, return an expression to test if EXP is in (or out of, depending
4297 on IN_P) the range. Return 0 if the test couldn't be created. */
4299 static tree
4300 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4302 tree etype = TREE_TYPE (exp);
4303 tree value;
4305 #ifdef HAVE_canonicalize_funcptr_for_compare
4306 /* Disable this optimization for function pointer expressions
4307 on targets that require function pointer canonicalization. */
4308 if (HAVE_canonicalize_funcptr_for_compare
4309 && TREE_CODE (etype) == POINTER_TYPE
4310 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4311 return NULL_TREE;
4312 #endif
4314 if (! in_p)
4316 value = build_range_check (type, exp, 1, low, high);
4317 if (value != 0)
4318 return invert_truthvalue (value);
4320 return 0;
4323 if (low == 0 && high == 0)
4324 return build_int_cst (type, 1);
4326 if (low == 0)
4327 return fold_build2 (LE_EXPR, type, exp,
4328 fold_convert (etype, high));
4330 if (high == 0)
4331 return fold_build2 (GE_EXPR, type, exp,
4332 fold_convert (etype, low));
4334 if (operand_equal_p (low, high, 0))
4335 return fold_build2 (EQ_EXPR, type, exp,
4336 fold_convert (etype, low));
4338 if (integer_zerop (low))
4340 if (! TYPE_UNSIGNED (etype))
4342 etype = lang_hooks.types.unsigned_type (etype);
4343 high = fold_convert (etype, high);
4344 exp = fold_convert (etype, exp);
4346 return build_range_check (type, exp, 1, 0, high);
4349 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4350 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4352 unsigned HOST_WIDE_INT lo;
4353 HOST_WIDE_INT hi;
4354 int prec;
4356 prec = TYPE_PRECISION (etype);
4357 if (prec <= HOST_BITS_PER_WIDE_INT)
4359 hi = 0;
4360 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4362 else
4364 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4365 lo = (unsigned HOST_WIDE_INT) -1;
4368 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4370 if (TYPE_UNSIGNED (etype))
4372 etype = lang_hooks.types.signed_type (etype);
4373 exp = fold_convert (etype, exp);
4375 return fold_build2 (GT_EXPR, type, exp,
4376 build_int_cst (etype, 0));
4380 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4381 This requires wrap-around arithmetics for the type of the expression. */
4382 switch (TREE_CODE (etype))
4384 case INTEGER_TYPE:
4385 /* There is no requirement that LOW be within the range of ETYPE
4386 if the latter is a subtype. It must, however, be within the base
4387 type of ETYPE. So be sure we do the subtraction in that type. */
4388 if (TREE_TYPE (etype))
4389 etype = TREE_TYPE (etype);
4390 break;
4392 case ENUMERAL_TYPE:
4393 case BOOLEAN_TYPE:
4394 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4395 TYPE_UNSIGNED (etype));
4396 break;
4398 default:
4399 break;
4402 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4403 if (TREE_CODE (etype) == INTEGER_TYPE
4404 && !TYPE_OVERFLOW_WRAPS (etype))
4406 tree utype, minv, maxv;
4408 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4409 for the type in question, as we rely on this here. */
4410 utype = lang_hooks.types.unsigned_type (etype);
4411 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4412 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4413 integer_one_node, 1);
4414 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4416 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4417 minv, 1, maxv, 1)))
4418 etype = utype;
4419 else
4420 return 0;
4423 high = fold_convert (etype, high);
4424 low = fold_convert (etype, low);
4425 exp = fold_convert (etype, exp);
4427 value = const_binop (MINUS_EXPR, high, low, 0);
4429 if (value != 0 && !TREE_OVERFLOW (value))
4430 return build_range_check (type,
4431 fold_build2 (MINUS_EXPR, etype, exp, low),
4432 1, build_int_cst (etype, 0), value);
4434 return 0;
4437 /* Return the predecessor of VAL in its type, handling the infinite case. */
4439 static tree
4440 range_predecessor (tree val)
4442 tree type = TREE_TYPE (val);
4444 if (INTEGRAL_TYPE_P (type)
4445 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4446 return 0;
4447 else
4448 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4451 /* Return the successor of VAL in its type, handling the infinite case. */
4453 static tree
4454 range_successor (tree val)
4456 tree type = TREE_TYPE (val);
4458 if (INTEGRAL_TYPE_P (type)
4459 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4460 return 0;
4461 else
4462 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4465 /* Given two ranges, see if we can merge them into one. Return 1 if we
4466 can, 0 if we can't. Set the output range into the specified parameters. */
4468 static int
4469 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4470 tree high0, int in1_p, tree low1, tree high1)
4472 int no_overlap;
4473 int subset;
4474 int temp;
4475 tree tem;
4476 int in_p;
4477 tree low, high;
4478 int lowequal = ((low0 == 0 && low1 == 0)
4479 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4480 low0, 0, low1, 0)));
4481 int highequal = ((high0 == 0 && high1 == 0)
4482 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4483 high0, 1, high1, 1)));
4485 /* Make range 0 be the range that starts first, or ends last if they
4486 start at the same value. Swap them if it isn't. */
4487 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4488 low0, 0, low1, 0))
4489 || (lowequal
4490 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4491 high1, 1, high0, 1))))
4493 temp = in0_p, in0_p = in1_p, in1_p = temp;
4494 tem = low0, low0 = low1, low1 = tem;
4495 tem = high0, high0 = high1, high1 = tem;
4498 /* Now flag two cases, whether the ranges are disjoint or whether the
4499 second range is totally subsumed in the first. Note that the tests
4500 below are simplified by the ones above. */
4501 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4502 high0, 1, low1, 0));
4503 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4504 high1, 1, high0, 1));
4506 /* We now have four cases, depending on whether we are including or
4507 excluding the two ranges. */
4508 if (in0_p && in1_p)
4510 /* If they don't overlap, the result is false. If the second range
4511 is a subset it is the result. Otherwise, the range is from the start
4512 of the second to the end of the first. */
4513 if (no_overlap)
4514 in_p = 0, low = high = 0;
4515 else if (subset)
4516 in_p = 1, low = low1, high = high1;
4517 else
4518 in_p = 1, low = low1, high = high0;
4521 else if (in0_p && ! in1_p)
4523 /* If they don't overlap, the result is the first range. If they are
4524 equal, the result is false. If the second range is a subset of the
4525 first, and the ranges begin at the same place, we go from just after
4526 the end of the second range to the end of the first. If the second
4527 range is not a subset of the first, or if it is a subset and both
4528 ranges end at the same place, the range starts at the start of the
4529 first range and ends just before the second range.
4530 Otherwise, we can't describe this as a single range. */
4531 if (no_overlap)
4532 in_p = 1, low = low0, high = high0;
4533 else if (lowequal && highequal)
4534 in_p = 0, low = high = 0;
4535 else if (subset && lowequal)
4537 low = range_successor (high1);
4538 high = high0;
4539 in_p = (low != 0);
4541 else if (! subset || highequal)
4543 low = low0;
4544 high = range_predecessor (low1);
4545 in_p = (high != 0);
4547 else
4548 return 0;
4551 else if (! in0_p && in1_p)
4553 /* If they don't overlap, the result is the second range. If the second
4554 is a subset of the first, the result is false. Otherwise,
4555 the range starts just after the first range and ends at the
4556 end of the second. */
4557 if (no_overlap)
4558 in_p = 1, low = low1, high = high1;
4559 else if (subset || highequal)
4560 in_p = 0, low = high = 0;
4561 else
4563 low = range_successor (high0);
4564 high = high1;
4565 in_p = (low != 0);
4569 else
4571 /* The case where we are excluding both ranges. Here the complex case
4572 is if they don't overlap. In that case, the only time we have a
4573 range is if they are adjacent. If the second is a subset of the
4574 first, the result is the first. Otherwise, the range to exclude
4575 starts at the beginning of the first range and ends at the end of the
4576 second. */
4577 if (no_overlap)
4579 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4580 range_successor (high0),
4581 1, low1, 0)))
4582 in_p = 0, low = low0, high = high1;
4583 else
4585 /* Canonicalize - [min, x] into - [-, x]. */
4586 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4587 switch (TREE_CODE (TREE_TYPE (low0)))
4589 case ENUMERAL_TYPE:
4590 if (TYPE_PRECISION (TREE_TYPE (low0))
4591 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4592 break;
4593 /* FALLTHROUGH */
4594 case INTEGER_TYPE:
4595 if (tree_int_cst_equal (low0,
4596 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4597 low0 = 0;
4598 break;
4599 case POINTER_TYPE:
4600 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4601 && integer_zerop (low0))
4602 low0 = 0;
4603 break;
4604 default:
4605 break;
4608 /* Canonicalize - [x, max] into - [x, -]. */
4609 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4610 switch (TREE_CODE (TREE_TYPE (high1)))
4612 case ENUMERAL_TYPE:
4613 if (TYPE_PRECISION (TREE_TYPE (high1))
4614 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4615 break;
4616 /* FALLTHROUGH */
4617 case INTEGER_TYPE:
4618 if (tree_int_cst_equal (high1,
4619 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4620 high1 = 0;
4621 break;
4622 case POINTER_TYPE:
4623 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4624 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4625 high1, 1,
4626 integer_one_node, 1)))
4627 high1 = 0;
4628 break;
4629 default:
4630 break;
4633 /* The ranges might be also adjacent between the maximum and
4634 minimum values of the given type. For
4635 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4636 return + [x + 1, y - 1]. */
4637 if (low0 == 0 && high1 == 0)
4639 low = range_successor (high0);
4640 high = range_predecessor (low1);
4641 if (low == 0 || high == 0)
4642 return 0;
4644 in_p = 1;
4646 else
4647 return 0;
4650 else if (subset)
4651 in_p = 0, low = low0, high = high0;
4652 else
4653 in_p = 0, low = low0, high = high1;
4656 *pin_p = in_p, *plow = low, *phigh = high;
4657 return 1;
4661 /* Subroutine of fold, looking inside expressions of the form
4662 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4663 of the COND_EXPR. This function is being used also to optimize
4664 A op B ? C : A, by reversing the comparison first.
4666 Return a folded expression whose code is not a COND_EXPR
4667 anymore, or NULL_TREE if no folding opportunity is found. */
4669 static tree
4670 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4672 enum tree_code comp_code = TREE_CODE (arg0);
4673 tree arg00 = TREE_OPERAND (arg0, 0);
4674 tree arg01 = TREE_OPERAND (arg0, 1);
4675 tree arg1_type = TREE_TYPE (arg1);
4676 tree tem;
4678 STRIP_NOPS (arg1);
4679 STRIP_NOPS (arg2);
4681 /* If we have A op 0 ? A : -A, consider applying the following
4682 transformations:
4684 A == 0? A : -A same as -A
4685 A != 0? A : -A same as A
4686 A >= 0? A : -A same as abs (A)
4687 A > 0? A : -A same as abs (A)
4688 A <= 0? A : -A same as -abs (A)
4689 A < 0? A : -A same as -abs (A)
4691 None of these transformations work for modes with signed
4692 zeros. If A is +/-0, the first two transformations will
4693 change the sign of the result (from +0 to -0, or vice
4694 versa). The last four will fix the sign of the result,
4695 even though the original expressions could be positive or
4696 negative, depending on the sign of A.
4698 Note that all these transformations are correct if A is
4699 NaN, since the two alternatives (A and -A) are also NaNs. */
4700 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4701 ? real_zerop (arg01)
4702 : integer_zerop (arg01))
4703 && ((TREE_CODE (arg2) == NEGATE_EXPR
4704 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4705 /* In the case that A is of the form X-Y, '-A' (arg2) may
4706 have already been folded to Y-X, check for that. */
4707 || (TREE_CODE (arg1) == MINUS_EXPR
4708 && TREE_CODE (arg2) == MINUS_EXPR
4709 && operand_equal_p (TREE_OPERAND (arg1, 0),
4710 TREE_OPERAND (arg2, 1), 0)
4711 && operand_equal_p (TREE_OPERAND (arg1, 1),
4712 TREE_OPERAND (arg2, 0), 0))))
4713 switch (comp_code)
4715 case EQ_EXPR:
4716 case UNEQ_EXPR:
4717 tem = fold_convert (arg1_type, arg1);
4718 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4719 case NE_EXPR:
4720 case LTGT_EXPR:
4721 return pedantic_non_lvalue (fold_convert (type, arg1));
4722 case UNGE_EXPR:
4723 case UNGT_EXPR:
4724 if (flag_trapping_math)
4725 break;
4726 /* Fall through. */
4727 case GE_EXPR:
4728 case GT_EXPR:
4729 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4730 arg1 = fold_convert (lang_hooks.types.signed_type
4731 (TREE_TYPE (arg1)), arg1);
4732 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4733 return pedantic_non_lvalue (fold_convert (type, tem));
4734 case UNLE_EXPR:
4735 case UNLT_EXPR:
4736 if (flag_trapping_math)
4737 break;
4738 case LE_EXPR:
4739 case LT_EXPR:
4740 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4741 arg1 = fold_convert (lang_hooks.types.signed_type
4742 (TREE_TYPE (arg1)), arg1);
4743 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4744 return negate_expr (fold_convert (type, tem));
4745 default:
4746 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4747 break;
4750 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4751 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4752 both transformations are correct when A is NaN: A != 0
4753 is then true, and A == 0 is false. */
4755 if (integer_zerop (arg01) && integer_zerop (arg2))
4757 if (comp_code == NE_EXPR)
4758 return pedantic_non_lvalue (fold_convert (type, arg1));
4759 else if (comp_code == EQ_EXPR)
4760 return build_int_cst (type, 0);
4763 /* Try some transformations of A op B ? A : B.
4765 A == B? A : B same as B
4766 A != B? A : B same as A
4767 A >= B? A : B same as max (A, B)
4768 A > B? A : B same as max (B, A)
4769 A <= B? A : B same as min (A, B)
4770 A < B? A : B same as min (B, A)
4772 As above, these transformations don't work in the presence
4773 of signed zeros. For example, if A and B are zeros of
4774 opposite sign, the first two transformations will change
4775 the sign of the result. In the last four, the original
4776 expressions give different results for (A=+0, B=-0) and
4777 (A=-0, B=+0), but the transformed expressions do not.
4779 The first two transformations are correct if either A or B
4780 is a NaN. In the first transformation, the condition will
4781 be false, and B will indeed be chosen. In the case of the
4782 second transformation, the condition A != B will be true,
4783 and A will be chosen.
4785 The conversions to max() and min() are not correct if B is
4786 a number and A is not. The conditions in the original
4787 expressions will be false, so all four give B. The min()
4788 and max() versions would give a NaN instead. */
4789 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4790 /* Avoid these transformations if the COND_EXPR may be used
4791 as an lvalue in the C++ front-end. PR c++/19199. */
4792 && (in_gimple_form
4793 || (strcmp (lang_hooks.name, "GNU C++") != 0
4794 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4795 || ! maybe_lvalue_p (arg1)
4796 || ! maybe_lvalue_p (arg2)))
4798 tree comp_op0 = arg00;
4799 tree comp_op1 = arg01;
4800 tree comp_type = TREE_TYPE (comp_op0);
4802 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4803 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4805 comp_type = type;
4806 comp_op0 = arg1;
4807 comp_op1 = arg2;
4810 switch (comp_code)
4812 case EQ_EXPR:
4813 return pedantic_non_lvalue (fold_convert (type, arg2));
4814 case NE_EXPR:
4815 return pedantic_non_lvalue (fold_convert (type, arg1));
4816 case LE_EXPR:
4817 case LT_EXPR:
4818 case UNLE_EXPR:
4819 case UNLT_EXPR:
4820 /* In C++ a ?: expression can be an lvalue, so put the
4821 operand which will be used if they are equal first
4822 so that we can convert this back to the
4823 corresponding COND_EXPR. */
4824 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4826 comp_op0 = fold_convert (comp_type, comp_op0);
4827 comp_op1 = fold_convert (comp_type, comp_op1);
4828 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4829 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4830 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4831 return pedantic_non_lvalue (fold_convert (type, tem));
4833 break;
4834 case GE_EXPR:
4835 case GT_EXPR:
4836 case UNGE_EXPR:
4837 case UNGT_EXPR:
4838 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4840 comp_op0 = fold_convert (comp_type, comp_op0);
4841 comp_op1 = fold_convert (comp_type, comp_op1);
4842 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4843 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4844 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4845 return pedantic_non_lvalue (fold_convert (type, tem));
4847 break;
4848 case UNEQ_EXPR:
4849 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4850 return pedantic_non_lvalue (fold_convert (type, arg2));
4851 break;
4852 case LTGT_EXPR:
4853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4854 return pedantic_non_lvalue (fold_convert (type, arg1));
4855 break;
4856 default:
4857 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4858 break;
4862 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4863 we might still be able to simplify this. For example,
4864 if C1 is one less or one more than C2, this might have started
4865 out as a MIN or MAX and been transformed by this function.
4866 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4868 if (INTEGRAL_TYPE_P (type)
4869 && TREE_CODE (arg01) == INTEGER_CST
4870 && TREE_CODE (arg2) == INTEGER_CST)
4871 switch (comp_code)
4873 case EQ_EXPR:
4874 /* We can replace A with C1 in this case. */
4875 arg1 = fold_convert (type, arg01);
4876 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4878 case LT_EXPR:
4879 /* If C1 is C2 + 1, this is min(A, C2). */
4880 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4881 OEP_ONLY_CONST)
4882 && operand_equal_p (arg01,
4883 const_binop (PLUS_EXPR, arg2,
4884 build_int_cst (type, 1), 0),
4885 OEP_ONLY_CONST))
4886 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4887 type, arg1, arg2));
4888 break;
4890 case LE_EXPR:
4891 /* If C1 is C2 - 1, this is min(A, C2). */
4892 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4893 OEP_ONLY_CONST)
4894 && operand_equal_p (arg01,
4895 const_binop (MINUS_EXPR, arg2,
4896 build_int_cst (type, 1), 0),
4897 OEP_ONLY_CONST))
4898 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4899 type, arg1, arg2));
4900 break;
4902 case GT_EXPR:
4903 /* If C1 is C2 - 1, this is max(A, C2). */
4904 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4905 OEP_ONLY_CONST)
4906 && operand_equal_p (arg01,
4907 const_binop (MINUS_EXPR, arg2,
4908 build_int_cst (type, 1), 0),
4909 OEP_ONLY_CONST))
4910 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4911 type, arg1, arg2));
4912 break;
4914 case GE_EXPR:
4915 /* If C1 is C2 + 1, this is max(A, C2). */
4916 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4917 OEP_ONLY_CONST)
4918 && operand_equal_p (arg01,
4919 const_binop (PLUS_EXPR, arg2,
4920 build_int_cst (type, 1), 0),
4921 OEP_ONLY_CONST))
4922 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4923 type, arg1, arg2));
4924 break;
4925 case NE_EXPR:
4926 break;
4927 default:
4928 gcc_unreachable ();
4931 return NULL_TREE;
4936 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4937 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4938 #endif
4940 /* EXP is some logical combination of boolean tests. See if we can
4941 merge it into some range test. Return the new tree if so. */
4943 static tree
4944 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4946 int or_op = (code == TRUTH_ORIF_EXPR
4947 || code == TRUTH_OR_EXPR);
4948 int in0_p, in1_p, in_p;
4949 tree low0, low1, low, high0, high1, high;
4950 bool strict_overflow_p = false;
4951 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4952 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4953 tree tem;
4954 const char * const warnmsg = G_("assuming signed overflow does not occur "
4955 "when simplifying range test");
4957 /* If this is an OR operation, invert both sides; we will invert
4958 again at the end. */
4959 if (or_op)
4960 in0_p = ! in0_p, in1_p = ! in1_p;
4962 /* If both expressions are the same, if we can merge the ranges, and we
4963 can build the range test, return it or it inverted. If one of the
4964 ranges is always true or always false, consider it to be the same
4965 expression as the other. */
4966 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4967 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4968 in1_p, low1, high1)
4969 && 0 != (tem = (build_range_check (type,
4970 lhs != 0 ? lhs
4971 : rhs != 0 ? rhs : integer_zero_node,
4972 in_p, low, high))))
4974 if (strict_overflow_p)
4975 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4976 return or_op ? invert_truthvalue (tem) : tem;
4979 /* On machines where the branch cost is expensive, if this is a
4980 short-circuited branch and the underlying object on both sides
4981 is the same, make a non-short-circuit operation. */
4982 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4983 && lhs != 0 && rhs != 0
4984 && (code == TRUTH_ANDIF_EXPR
4985 || code == TRUTH_ORIF_EXPR)
4986 && operand_equal_p (lhs, rhs, 0))
4988 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4989 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4990 which cases we can't do this. */
4991 if (simple_operand_p (lhs))
4992 return build2 (code == TRUTH_ANDIF_EXPR
4993 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4994 type, op0, op1);
4996 else if (lang_hooks.decls.global_bindings_p () == 0
4997 && ! CONTAINS_PLACEHOLDER_P (lhs))
4999 tree common = save_expr (lhs);
5001 if (0 != (lhs = build_range_check (type, common,
5002 or_op ? ! in0_p : in0_p,
5003 low0, high0))
5004 && (0 != (rhs = build_range_check (type, common,
5005 or_op ? ! in1_p : in1_p,
5006 low1, high1))))
5008 if (strict_overflow_p)
5009 fold_overflow_warning (warnmsg,
5010 WARN_STRICT_OVERFLOW_COMPARISON);
5011 return build2 (code == TRUTH_ANDIF_EXPR
5012 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5013 type, lhs, rhs);
5018 return 0;
5021 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5022 bit value. Arrange things so the extra bits will be set to zero if and
5023 only if C is signed-extended to its full width. If MASK is nonzero,
5024 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5026 static tree
5027 unextend (tree c, int p, int unsignedp, tree mask)
5029 tree type = TREE_TYPE (c);
5030 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5031 tree temp;
5033 if (p == modesize || unsignedp)
5034 return c;
5036 /* We work by getting just the sign bit into the low-order bit, then
5037 into the high-order bit, then sign-extend. We then XOR that value
5038 with C. */
5039 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5040 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5042 /* We must use a signed type in order to get an arithmetic right shift.
5043 However, we must also avoid introducing accidental overflows, so that
5044 a subsequent call to integer_zerop will work. Hence we must
5045 do the type conversion here. At this point, the constant is either
5046 zero or one, and the conversion to a signed type can never overflow.
5047 We could get an overflow if this conversion is done anywhere else. */
5048 if (TYPE_UNSIGNED (type))
5049 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5051 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5052 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5053 if (mask != 0)
5054 temp = const_binop (BIT_AND_EXPR, temp,
5055 fold_convert (TREE_TYPE (c), mask), 0);
5056 /* If necessary, convert the type back to match the type of C. */
5057 if (TYPE_UNSIGNED (type))
5058 temp = fold_convert (type, temp);
5060 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5063 /* Find ways of folding logical expressions of LHS and RHS:
5064 Try to merge two comparisons to the same innermost item.
5065 Look for range tests like "ch >= '0' && ch <= '9'".
5066 Look for combinations of simple terms on machines with expensive branches
5067 and evaluate the RHS unconditionally.
5069 For example, if we have p->a == 2 && p->b == 4 and we can make an
5070 object large enough to span both A and B, we can do this with a comparison
5071 against the object ANDed with the a mask.
5073 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5074 operations to do this with one comparison.
5076 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5077 function and the one above.
5079 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5080 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5082 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5083 two operands.
5085 We return the simplified tree or 0 if no optimization is possible. */
5087 static tree
5088 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5090 /* If this is the "or" of two comparisons, we can do something if
5091 the comparisons are NE_EXPR. If this is the "and", we can do something
5092 if the comparisons are EQ_EXPR. I.e.,
5093 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5095 WANTED_CODE is this operation code. For single bit fields, we can
5096 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5097 comparison for one-bit fields. */
5099 enum tree_code wanted_code;
5100 enum tree_code lcode, rcode;
5101 tree ll_arg, lr_arg, rl_arg, rr_arg;
5102 tree ll_inner, lr_inner, rl_inner, rr_inner;
5103 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5104 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5105 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5106 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5107 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5108 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5109 enum machine_mode lnmode, rnmode;
5110 tree ll_mask, lr_mask, rl_mask, rr_mask;
5111 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5112 tree l_const, r_const;
5113 tree lntype, rntype, result;
5114 int first_bit, end_bit;
5115 int volatilep;
5116 tree orig_lhs = lhs, orig_rhs = rhs;
5117 enum tree_code orig_code = code;
5119 /* Start by getting the comparison codes. Fail if anything is volatile.
5120 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5121 it were surrounded with a NE_EXPR. */
5123 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5124 return 0;
5126 lcode = TREE_CODE (lhs);
5127 rcode = TREE_CODE (rhs);
5129 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5131 lhs = build2 (NE_EXPR, truth_type, lhs,
5132 build_int_cst (TREE_TYPE (lhs), 0));
5133 lcode = NE_EXPR;
5136 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5138 rhs = build2 (NE_EXPR, truth_type, rhs,
5139 build_int_cst (TREE_TYPE (rhs), 0));
5140 rcode = NE_EXPR;
5143 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5144 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5145 return 0;
5147 ll_arg = TREE_OPERAND (lhs, 0);
5148 lr_arg = TREE_OPERAND (lhs, 1);
5149 rl_arg = TREE_OPERAND (rhs, 0);
5150 rr_arg = TREE_OPERAND (rhs, 1);
5152 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5153 if (simple_operand_p (ll_arg)
5154 && simple_operand_p (lr_arg))
5156 tree result;
5157 if (operand_equal_p (ll_arg, rl_arg, 0)
5158 && operand_equal_p (lr_arg, rr_arg, 0))
5160 result = combine_comparisons (code, lcode, rcode,
5161 truth_type, ll_arg, lr_arg);
5162 if (result)
5163 return result;
5165 else if (operand_equal_p (ll_arg, rr_arg, 0)
5166 && operand_equal_p (lr_arg, rl_arg, 0))
5168 result = combine_comparisons (code, lcode,
5169 swap_tree_comparison (rcode),
5170 truth_type, ll_arg, lr_arg);
5171 if (result)
5172 return result;
5176 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5177 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5179 /* If the RHS can be evaluated unconditionally and its operands are
5180 simple, it wins to evaluate the RHS unconditionally on machines
5181 with expensive branches. In this case, this isn't a comparison
5182 that can be merged. Avoid doing this if the RHS is a floating-point
5183 comparison since those can trap. */
5185 if (BRANCH_COST >= 2
5186 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5187 && simple_operand_p (rl_arg)
5188 && simple_operand_p (rr_arg))
5190 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5191 if (code == TRUTH_OR_EXPR
5192 && lcode == NE_EXPR && integer_zerop (lr_arg)
5193 && rcode == NE_EXPR && integer_zerop (rr_arg)
5194 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5195 return build2 (NE_EXPR, truth_type,
5196 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5197 ll_arg, rl_arg),
5198 build_int_cst (TREE_TYPE (ll_arg), 0));
5200 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5201 if (code == TRUTH_AND_EXPR
5202 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5203 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5204 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5205 return build2 (EQ_EXPR, truth_type,
5206 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5207 ll_arg, rl_arg),
5208 build_int_cst (TREE_TYPE (ll_arg), 0));
5210 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5212 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5213 return build2 (code, truth_type, lhs, rhs);
5214 return NULL_TREE;
5218 /* See if the comparisons can be merged. Then get all the parameters for
5219 each side. */
5221 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5222 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5223 return 0;
5225 volatilep = 0;
5226 ll_inner = decode_field_reference (ll_arg,
5227 &ll_bitsize, &ll_bitpos, &ll_mode,
5228 &ll_unsignedp, &volatilep, &ll_mask,
5229 &ll_and_mask);
5230 lr_inner = decode_field_reference (lr_arg,
5231 &lr_bitsize, &lr_bitpos, &lr_mode,
5232 &lr_unsignedp, &volatilep, &lr_mask,
5233 &lr_and_mask);
5234 rl_inner = decode_field_reference (rl_arg,
5235 &rl_bitsize, &rl_bitpos, &rl_mode,
5236 &rl_unsignedp, &volatilep, &rl_mask,
5237 &rl_and_mask);
5238 rr_inner = decode_field_reference (rr_arg,
5239 &rr_bitsize, &rr_bitpos, &rr_mode,
5240 &rr_unsignedp, &volatilep, &rr_mask,
5241 &rr_and_mask);
5243 /* It must be true that the inner operation on the lhs of each
5244 comparison must be the same if we are to be able to do anything.
5245 Then see if we have constants. If not, the same must be true for
5246 the rhs's. */
5247 if (volatilep || ll_inner == 0 || rl_inner == 0
5248 || ! operand_equal_p (ll_inner, rl_inner, 0))
5249 return 0;
5251 if (TREE_CODE (lr_arg) == INTEGER_CST
5252 && TREE_CODE (rr_arg) == INTEGER_CST)
5253 l_const = lr_arg, r_const = rr_arg;
5254 else if (lr_inner == 0 || rr_inner == 0
5255 || ! operand_equal_p (lr_inner, rr_inner, 0))
5256 return 0;
5257 else
5258 l_const = r_const = 0;
5260 /* If either comparison code is not correct for our logical operation,
5261 fail. However, we can convert a one-bit comparison against zero into
5262 the opposite comparison against that bit being set in the field. */
5264 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5265 if (lcode != wanted_code)
5267 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5269 /* Make the left operand unsigned, since we are only interested
5270 in the value of one bit. Otherwise we are doing the wrong
5271 thing below. */
5272 ll_unsignedp = 1;
5273 l_const = ll_mask;
5275 else
5276 return 0;
5279 /* This is analogous to the code for l_const above. */
5280 if (rcode != wanted_code)
5282 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5284 rl_unsignedp = 1;
5285 r_const = rl_mask;
5287 else
5288 return 0;
5291 /* See if we can find a mode that contains both fields being compared on
5292 the left. If we can't, fail. Otherwise, update all constants and masks
5293 to be relative to a field of that size. */
5294 first_bit = MIN (ll_bitpos, rl_bitpos);
5295 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5296 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5297 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5298 volatilep);
5299 if (lnmode == VOIDmode)
5300 return 0;
5302 lnbitsize = GET_MODE_BITSIZE (lnmode);
5303 lnbitpos = first_bit & ~ (lnbitsize - 1);
5304 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5305 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5307 if (BYTES_BIG_ENDIAN)
5309 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5310 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5313 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5314 size_int (xll_bitpos), 0);
5315 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5316 size_int (xrl_bitpos), 0);
5318 if (l_const)
5320 l_const = fold_convert (lntype, l_const);
5321 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5322 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5323 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5324 fold_build1 (BIT_NOT_EXPR,
5325 lntype, ll_mask),
5326 0)))
5328 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5330 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5333 if (r_const)
5335 r_const = fold_convert (lntype, r_const);
5336 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5337 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5338 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5339 fold_build1 (BIT_NOT_EXPR,
5340 lntype, rl_mask),
5341 0)))
5343 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5345 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5349 /* If the right sides are not constant, do the same for it. Also,
5350 disallow this optimization if a size or signedness mismatch occurs
5351 between the left and right sides. */
5352 if (l_const == 0)
5354 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5355 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5356 /* Make sure the two fields on the right
5357 correspond to the left without being swapped. */
5358 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5359 return 0;
5361 first_bit = MIN (lr_bitpos, rr_bitpos);
5362 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5363 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5364 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5365 volatilep);
5366 if (rnmode == VOIDmode)
5367 return 0;
5369 rnbitsize = GET_MODE_BITSIZE (rnmode);
5370 rnbitpos = first_bit & ~ (rnbitsize - 1);
5371 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5372 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5374 if (BYTES_BIG_ENDIAN)
5376 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5377 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5380 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5381 size_int (xlr_bitpos), 0);
5382 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5383 size_int (xrr_bitpos), 0);
5385 /* Make a mask that corresponds to both fields being compared.
5386 Do this for both items being compared. If the operands are the
5387 same size and the bits being compared are in the same position
5388 then we can do this by masking both and comparing the masked
5389 results. */
5390 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5391 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5392 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5394 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5395 ll_unsignedp || rl_unsignedp);
5396 if (! all_ones_mask_p (ll_mask, lnbitsize))
5397 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5399 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5400 lr_unsignedp || rr_unsignedp);
5401 if (! all_ones_mask_p (lr_mask, rnbitsize))
5402 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5404 return build2 (wanted_code, truth_type, lhs, rhs);
5407 /* There is still another way we can do something: If both pairs of
5408 fields being compared are adjacent, we may be able to make a wider
5409 field containing them both.
5411 Note that we still must mask the lhs/rhs expressions. Furthermore,
5412 the mask must be shifted to account for the shift done by
5413 make_bit_field_ref. */
5414 if ((ll_bitsize + ll_bitpos == rl_bitpos
5415 && lr_bitsize + lr_bitpos == rr_bitpos)
5416 || (ll_bitpos == rl_bitpos + rl_bitsize
5417 && lr_bitpos == rr_bitpos + rr_bitsize))
5419 tree type;
5421 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5422 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5423 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5424 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5426 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5427 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5428 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5429 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5431 /* Convert to the smaller type before masking out unwanted bits. */
5432 type = lntype;
5433 if (lntype != rntype)
5435 if (lnbitsize > rnbitsize)
5437 lhs = fold_convert (rntype, lhs);
5438 ll_mask = fold_convert (rntype, ll_mask);
5439 type = rntype;
5441 else if (lnbitsize < rnbitsize)
5443 rhs = fold_convert (lntype, rhs);
5444 lr_mask = fold_convert (lntype, lr_mask);
5445 type = lntype;
5449 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5450 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5452 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5453 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5455 return build2 (wanted_code, truth_type, lhs, rhs);
5458 return 0;
5461 /* Handle the case of comparisons with constants. If there is something in
5462 common between the masks, those bits of the constants must be the same.
5463 If not, the condition is always false. Test for this to avoid generating
5464 incorrect code below. */
5465 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5466 if (! integer_zerop (result)
5467 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5468 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5470 if (wanted_code == NE_EXPR)
5472 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5473 return constant_boolean_node (true, truth_type);
5475 else
5477 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5478 return constant_boolean_node (false, truth_type);
5482 /* Construct the expression we will return. First get the component
5483 reference we will make. Unless the mask is all ones the width of
5484 that field, perform the mask operation. Then compare with the
5485 merged constant. */
5486 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5487 ll_unsignedp || rl_unsignedp);
5489 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5490 if (! all_ones_mask_p (ll_mask, lnbitsize))
5491 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5493 return build2 (wanted_code, truth_type, result,
5494 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5497 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5498 constant. */
5500 static tree
5501 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5503 tree arg0 = op0;
5504 enum tree_code op_code;
5505 tree comp_const = op1;
5506 tree minmax_const;
5507 int consts_equal, consts_lt;
5508 tree inner;
5510 STRIP_SIGN_NOPS (arg0);
5512 op_code = TREE_CODE (arg0);
5513 minmax_const = TREE_OPERAND (arg0, 1);
5514 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5515 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5516 inner = TREE_OPERAND (arg0, 0);
5518 /* If something does not permit us to optimize, return the original tree. */
5519 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5520 || TREE_CODE (comp_const) != INTEGER_CST
5521 || TREE_OVERFLOW (comp_const)
5522 || TREE_CODE (minmax_const) != INTEGER_CST
5523 || TREE_OVERFLOW (minmax_const))
5524 return NULL_TREE;
5526 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5527 and GT_EXPR, doing the rest with recursive calls using logical
5528 simplifications. */
5529 switch (code)
5531 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5533 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5534 type, op0, op1);
5535 if (tem)
5536 return invert_truthvalue (tem);
5537 return NULL_TREE;
5540 case GE_EXPR:
5541 return
5542 fold_build2 (TRUTH_ORIF_EXPR, type,
5543 optimize_minmax_comparison
5544 (EQ_EXPR, type, arg0, comp_const),
5545 optimize_minmax_comparison
5546 (GT_EXPR, type, arg0, comp_const));
5548 case EQ_EXPR:
5549 if (op_code == MAX_EXPR && consts_equal)
5550 /* MAX (X, 0) == 0 -> X <= 0 */
5551 return fold_build2 (LE_EXPR, type, inner, comp_const);
5553 else if (op_code == MAX_EXPR && consts_lt)
5554 /* MAX (X, 0) == 5 -> X == 5 */
5555 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5557 else if (op_code == MAX_EXPR)
5558 /* MAX (X, 0) == -1 -> false */
5559 return omit_one_operand (type, integer_zero_node, inner);
5561 else if (consts_equal)
5562 /* MIN (X, 0) == 0 -> X >= 0 */
5563 return fold_build2 (GE_EXPR, type, inner, comp_const);
5565 else if (consts_lt)
5566 /* MIN (X, 0) == 5 -> false */
5567 return omit_one_operand (type, integer_zero_node, inner);
5569 else
5570 /* MIN (X, 0) == -1 -> X == -1 */
5571 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5573 case GT_EXPR:
5574 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5575 /* MAX (X, 0) > 0 -> X > 0
5576 MAX (X, 0) > 5 -> X > 5 */
5577 return fold_build2 (GT_EXPR, type, inner, comp_const);
5579 else if (op_code == MAX_EXPR)
5580 /* MAX (X, 0) > -1 -> true */
5581 return omit_one_operand (type, integer_one_node, inner);
5583 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5584 /* MIN (X, 0) > 0 -> false
5585 MIN (X, 0) > 5 -> false */
5586 return omit_one_operand (type, integer_zero_node, inner);
5588 else
5589 /* MIN (X, 0) > -1 -> X > -1 */
5590 return fold_build2 (GT_EXPR, type, inner, comp_const);
5592 default:
5593 return NULL_TREE;
5597 /* T is an integer expression that is being multiplied, divided, or taken a
5598 modulus (CODE says which and what kind of divide or modulus) by a
5599 constant C. See if we can eliminate that operation by folding it with
5600 other operations already in T. WIDE_TYPE, if non-null, is a type that
5601 should be used for the computation if wider than our type.
5603 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5604 (X * 2) + (Y * 4). We must, however, be assured that either the original
5605 expression would not overflow or that overflow is undefined for the type
5606 in the language in question.
5608 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5609 the machine has a multiply-accumulate insn or that this is part of an
5610 addressing calculation.
5612 If we return a non-null expression, it is an equivalent form of the
5613 original computation, but need not be in the original type.
5615 We set *STRICT_OVERFLOW_P to true if the return values depends on
5616 signed overflow being undefined. Otherwise we do not change
5617 *STRICT_OVERFLOW_P. */
5619 static tree
5620 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5621 bool *strict_overflow_p)
5623 /* To avoid exponential search depth, refuse to allow recursion past
5624 three levels. Beyond that (1) it's highly unlikely that we'll find
5625 something interesting and (2) we've probably processed it before
5626 when we built the inner expression. */
5628 static int depth;
5629 tree ret;
5631 if (depth > 3)
5632 return NULL;
5634 depth++;
5635 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5636 depth--;
5638 return ret;
5641 static tree
5642 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5643 bool *strict_overflow_p)
5645 tree type = TREE_TYPE (t);
5646 enum tree_code tcode = TREE_CODE (t);
5647 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5648 > GET_MODE_SIZE (TYPE_MODE (type)))
5649 ? wide_type : type);
5650 tree t1, t2;
5651 int same_p = tcode == code;
5652 tree op0 = NULL_TREE, op1 = NULL_TREE;
5653 bool sub_strict_overflow_p;
5655 /* Don't deal with constants of zero here; they confuse the code below. */
5656 if (integer_zerop (c))
5657 return NULL_TREE;
5659 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5660 op0 = TREE_OPERAND (t, 0);
5662 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5663 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5665 /* Note that we need not handle conditional operations here since fold
5666 already handles those cases. So just do arithmetic here. */
5667 switch (tcode)
5669 case INTEGER_CST:
5670 /* For a constant, we can always simplify if we are a multiply
5671 or (for divide and modulus) if it is a multiple of our constant. */
5672 if (code == MULT_EXPR
5673 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5674 return const_binop (code, fold_convert (ctype, t),
5675 fold_convert (ctype, c), 0);
5676 break;
5678 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5679 /* If op0 is an expression ... */
5680 if ((COMPARISON_CLASS_P (op0)
5681 || UNARY_CLASS_P (op0)
5682 || BINARY_CLASS_P (op0)
5683 || VL_EXP_CLASS_P (op0)
5684 || EXPRESSION_CLASS_P (op0))
5685 /* ... and is unsigned, and its type is smaller than ctype,
5686 then we cannot pass through as widening. */
5687 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5688 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5689 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5690 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5691 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5692 /* ... or this is a truncation (t is narrower than op0),
5693 then we cannot pass through this narrowing. */
5694 || (GET_MODE_SIZE (TYPE_MODE (type))
5695 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5696 /* ... or signedness changes for division or modulus,
5697 then we cannot pass through this conversion. */
5698 || (code != MULT_EXPR
5699 && (TYPE_UNSIGNED (ctype)
5700 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5701 break;
5703 /* Pass the constant down and see if we can make a simplification. If
5704 we can, replace this expression with the inner simplification for
5705 possible later conversion to our or some other type. */
5706 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5707 && TREE_CODE (t2) == INTEGER_CST
5708 && !TREE_OVERFLOW (t2)
5709 && (0 != (t1 = extract_muldiv (op0, t2, code,
5710 code == MULT_EXPR
5711 ? ctype : NULL_TREE,
5712 strict_overflow_p))))
5713 return t1;
5714 break;
5716 case ABS_EXPR:
5717 /* If widening the type changes it from signed to unsigned, then we
5718 must avoid building ABS_EXPR itself as unsigned. */
5719 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5721 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5722 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5723 != 0)
5725 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5726 return fold_convert (ctype, t1);
5728 break;
5730 /* FALLTHROUGH */
5731 case NEGATE_EXPR:
5732 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5733 != 0)
5734 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5735 break;
5737 case MIN_EXPR: case MAX_EXPR:
5738 /* If widening the type changes the signedness, then we can't perform
5739 this optimization as that changes the result. */
5740 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5741 break;
5743 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5744 sub_strict_overflow_p = false;
5745 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5746 &sub_strict_overflow_p)) != 0
5747 && (t2 = extract_muldiv (op1, c, code, wide_type,
5748 &sub_strict_overflow_p)) != 0)
5750 if (tree_int_cst_sgn (c) < 0)
5751 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5752 if (sub_strict_overflow_p)
5753 *strict_overflow_p = true;
5754 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5755 fold_convert (ctype, t2));
5757 break;
5759 case LSHIFT_EXPR: case RSHIFT_EXPR:
5760 /* If the second operand is constant, this is a multiplication
5761 or floor division, by a power of two, so we can treat it that
5762 way unless the multiplier or divisor overflows. Signed
5763 left-shift overflow is implementation-defined rather than
5764 undefined in C90, so do not convert signed left shift into
5765 multiplication. */
5766 if (TREE_CODE (op1) == INTEGER_CST
5767 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5768 /* const_binop may not detect overflow correctly,
5769 so check for it explicitly here. */
5770 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5771 && TREE_INT_CST_HIGH (op1) == 0
5772 && 0 != (t1 = fold_convert (ctype,
5773 const_binop (LSHIFT_EXPR,
5774 size_one_node,
5775 op1, 0)))
5776 && !TREE_OVERFLOW (t1))
5777 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5778 ? MULT_EXPR : FLOOR_DIV_EXPR,
5779 ctype, fold_convert (ctype, op0), t1),
5780 c, code, wide_type, strict_overflow_p);
5781 break;
5783 case PLUS_EXPR: case MINUS_EXPR:
5784 /* See if we can eliminate the operation on both sides. If we can, we
5785 can return a new PLUS or MINUS. If we can't, the only remaining
5786 cases where we can do anything are if the second operand is a
5787 constant. */
5788 sub_strict_overflow_p = false;
5789 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5790 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5791 if (t1 != 0 && t2 != 0
5792 && (code == MULT_EXPR
5793 /* If not multiplication, we can only do this if both operands
5794 are divisible by c. */
5795 || (multiple_of_p (ctype, op0, c)
5796 && multiple_of_p (ctype, op1, c))))
5798 if (sub_strict_overflow_p)
5799 *strict_overflow_p = true;
5800 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5801 fold_convert (ctype, t2));
5804 /* If this was a subtraction, negate OP1 and set it to be an addition.
5805 This simplifies the logic below. */
5806 if (tcode == MINUS_EXPR)
5807 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5809 if (TREE_CODE (op1) != INTEGER_CST)
5810 break;
5812 /* If either OP1 or C are negative, this optimization is not safe for
5813 some of the division and remainder types while for others we need
5814 to change the code. */
5815 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5817 if (code == CEIL_DIV_EXPR)
5818 code = FLOOR_DIV_EXPR;
5819 else if (code == FLOOR_DIV_EXPR)
5820 code = CEIL_DIV_EXPR;
5821 else if (code != MULT_EXPR
5822 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5823 break;
5826 /* If it's a multiply or a division/modulus operation of a multiple
5827 of our constant, do the operation and verify it doesn't overflow. */
5828 if (code == MULT_EXPR
5829 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5831 op1 = const_binop (code, fold_convert (ctype, op1),
5832 fold_convert (ctype, c), 0);
5833 /* We allow the constant to overflow with wrapping semantics. */
5834 if (op1 == 0
5835 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5836 break;
5838 else
5839 break;
5841 /* If we have an unsigned type is not a sizetype, we cannot widen
5842 the operation since it will change the result if the original
5843 computation overflowed. */
5844 if (TYPE_UNSIGNED (ctype)
5845 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5846 && ctype != type)
5847 break;
5849 /* If we were able to eliminate our operation from the first side,
5850 apply our operation to the second side and reform the PLUS. */
5851 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5852 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5854 /* The last case is if we are a multiply. In that case, we can
5855 apply the distributive law to commute the multiply and addition
5856 if the multiplication of the constants doesn't overflow. */
5857 if (code == MULT_EXPR)
5858 return fold_build2 (tcode, ctype,
5859 fold_build2 (code, ctype,
5860 fold_convert (ctype, op0),
5861 fold_convert (ctype, c)),
5862 op1);
5864 break;
5866 case MULT_EXPR:
5867 /* We have a special case here if we are doing something like
5868 (C * 8) % 4 since we know that's zero. */
5869 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5870 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5871 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5872 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5873 return omit_one_operand (type, integer_zero_node, op0);
5875 /* ... fall through ... */
5877 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5878 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5879 /* If we can extract our operation from the LHS, do so and return a
5880 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5881 do something only if the second operand is a constant. */
5882 if (same_p
5883 && (t1 = extract_muldiv (op0, c, code, wide_type,
5884 strict_overflow_p)) != 0)
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5886 fold_convert (ctype, op1));
5887 else if (tcode == MULT_EXPR && code == MULT_EXPR
5888 && (t1 = extract_muldiv (op1, c, code, wide_type,
5889 strict_overflow_p)) != 0)
5890 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5891 fold_convert (ctype, t1));
5892 else if (TREE_CODE (op1) != INTEGER_CST)
5893 return 0;
5895 /* If these are the same operation types, we can associate them
5896 assuming no overflow. */
5897 if (tcode == code
5898 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5899 fold_convert (ctype, c), 0))
5900 && !TREE_OVERFLOW (t1))
5901 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type that is not a sizetype, we cannot do
5909 this since it will change the result if the original computation
5910 overflowed. */
5911 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5912 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5913 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5914 || (tcode == MULT_EXPR
5915 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5916 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5918 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5920 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5921 *strict_overflow_p = true;
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5923 fold_convert (ctype,
5924 const_binop (TRUNC_DIV_EXPR,
5925 op1, c, 0)));
5927 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5929 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5930 *strict_overflow_p = true;
5931 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5932 fold_convert (ctype,
5933 const_binop (TRUNC_DIV_EXPR,
5934 c, op1, 0)));
5937 break;
5939 default:
5940 break;
5943 return 0;
5946 /* Return a node which has the indicated constant VALUE (either 0 or
5947 1), and is of the indicated TYPE. */
5949 tree
5950 constant_boolean_node (int value, tree type)
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5956 else
5957 return build_int_cst (type, value);
5961 /* Return true if expr looks like an ARRAY_REF and set base and
5962 offset to the appropriate trees. If there is no offset,
5963 offset is set to NULL_TREE. Base will be canonicalized to
5964 something you can get the element type from using
5965 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5966 in bytes to the base. */
5968 static bool
5969 extract_array_ref (tree expr, tree *base, tree *offset)
5971 /* One canonical form is a PLUS_EXPR with the first
5972 argument being an ADDR_EXPR with a possible NOP_EXPR
5973 attached. */
5974 if (TREE_CODE (expr) == PLUS_EXPR)
5976 tree op0 = TREE_OPERAND (expr, 0);
5977 tree inner_base, dummy1;
5978 /* Strip NOP_EXPRs here because the C frontends and/or
5979 folders present us (int *)&x.a + 4B possibly. */
5980 STRIP_NOPS (op0);
5981 if (extract_array_ref (op0, &inner_base, &dummy1))
5983 *base = inner_base;
5984 if (dummy1 == NULL_TREE)
5985 *offset = TREE_OPERAND (expr, 1);
5986 else
5987 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5988 dummy1, TREE_OPERAND (expr, 1));
5989 return true;
5992 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5993 which we transform into an ADDR_EXPR with appropriate
5994 offset. For other arguments to the ADDR_EXPR we assume
5995 zero offset and as such do not care about the ADDR_EXPR
5996 type and strip possible nops from it. */
5997 else if (TREE_CODE (expr) == ADDR_EXPR)
5999 tree op0 = TREE_OPERAND (expr, 0);
6000 if (TREE_CODE (op0) == ARRAY_REF)
6002 tree idx = TREE_OPERAND (op0, 1);
6003 *base = TREE_OPERAND (op0, 0);
6004 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6005 array_ref_element_size (op0));
6007 else
6009 /* Handle array-to-pointer decay as &a. */
6010 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6011 *base = TREE_OPERAND (expr, 0);
6012 else
6013 *base = expr;
6014 *offset = NULL_TREE;
6016 return true;
6018 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6019 else if (SSA_VAR_P (expr)
6020 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6022 *base = expr;
6023 *offset = NULL_TREE;
6024 return true;
6027 return false;
6031 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6032 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6033 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6034 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6035 COND is the first argument to CODE; otherwise (as in the example
6036 given here), it is the second argument. TYPE is the type of the
6037 original expression. Return NULL_TREE if no simplification is
6038 possible. */
6040 static tree
6041 fold_binary_op_with_conditional_arg (enum tree_code code,
6042 tree type, tree op0, tree op1,
6043 tree cond, tree arg, int cond_first_p)
6045 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6046 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6047 tree test, true_value, false_value;
6048 tree lhs = NULL_TREE;
6049 tree rhs = NULL_TREE;
6051 /* This transformation is only worthwhile if we don't have to wrap
6052 arg in a SAVE_EXPR, and the operation can be simplified on at least
6053 one of the branches once its pushed inside the COND_EXPR. */
6054 if (!TREE_CONSTANT (arg))
6055 return NULL_TREE;
6057 if (TREE_CODE (cond) == COND_EXPR)
6059 test = TREE_OPERAND (cond, 0);
6060 true_value = TREE_OPERAND (cond, 1);
6061 false_value = TREE_OPERAND (cond, 2);
6062 /* If this operand throws an expression, then it does not make
6063 sense to try to perform a logical or arithmetic operation
6064 involving it. */
6065 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6066 lhs = true_value;
6067 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6068 rhs = false_value;
6070 else
6072 tree testtype = TREE_TYPE (cond);
6073 test = cond;
6074 true_value = constant_boolean_node (true, testtype);
6075 false_value = constant_boolean_node (false, testtype);
6078 arg = fold_convert (arg_type, arg);
6079 if (lhs == 0)
6081 true_value = fold_convert (cond_type, true_value);
6082 if (cond_first_p)
6083 lhs = fold_build2 (code, type, true_value, arg);
6084 else
6085 lhs = fold_build2 (code, type, arg, true_value);
6087 if (rhs == 0)
6089 false_value = fold_convert (cond_type, false_value);
6090 if (cond_first_p)
6091 rhs = fold_build2 (code, type, false_value, arg);
6092 else
6093 rhs = fold_build2 (code, type, arg, false_value);
6096 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6097 return fold_convert (type, test);
6101 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6103 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6104 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6105 ADDEND is the same as X.
6107 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6108 and finite. The problematic cases are when X is zero, and its mode
6109 has signed zeros. In the case of rounding towards -infinity,
6110 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6111 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6113 static bool
6114 fold_real_zero_addition_p (tree type, tree addend, int negate)
6116 if (!real_zerop (addend))
6117 return false;
6119 /* Don't allow the fold with -fsignaling-nans. */
6120 if (HONOR_SNANS (TYPE_MODE (type)))
6121 return false;
6123 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6124 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6125 return true;
6127 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6128 if (TREE_CODE (addend) == REAL_CST
6129 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6130 negate = !negate;
6132 /* The mode has signed zeros, and we have to honor their sign.
6133 In this situation, there is only one case we can return true for.
6134 X - 0 is the same as X unless rounding towards -infinity is
6135 supported. */
6136 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6139 /* Subroutine of fold() that checks comparisons of built-in math
6140 functions against real constants.
6142 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6143 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6144 is the type of the result and ARG0 and ARG1 are the operands of the
6145 comparison. ARG1 must be a TREE_REAL_CST.
6147 The function returns the constant folded tree if a simplification
6148 can be made, and NULL_TREE otherwise. */
6150 static tree
6151 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6152 tree type, tree arg0, tree arg1)
6154 REAL_VALUE_TYPE c;
6156 if (BUILTIN_SQRT_P (fcode))
6158 tree arg = CALL_EXPR_ARG (arg0, 0);
6159 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6161 c = TREE_REAL_CST (arg1);
6162 if (REAL_VALUE_NEGATIVE (c))
6164 /* sqrt(x) < y is always false, if y is negative. */
6165 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6166 return omit_one_operand (type, integer_zero_node, arg);
6168 /* sqrt(x) > y is always true, if y is negative and we
6169 don't care about NaNs, i.e. negative values of x. */
6170 if (code == NE_EXPR || !HONOR_NANS (mode))
6171 return omit_one_operand (type, integer_one_node, arg);
6173 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6174 return fold_build2 (GE_EXPR, type, arg,
6175 build_real (TREE_TYPE (arg), dconst0));
6177 else if (code == GT_EXPR || code == GE_EXPR)
6179 REAL_VALUE_TYPE c2;
6181 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6182 real_convert (&c2, mode, &c2);
6184 if (REAL_VALUE_ISINF (c2))
6186 /* sqrt(x) > y is x == +Inf, when y is very large. */
6187 if (HONOR_INFINITIES (mode))
6188 return fold_build2 (EQ_EXPR, type, arg,
6189 build_real (TREE_TYPE (arg), c2));
6191 /* sqrt(x) > y is always false, when y is very large
6192 and we don't care about infinities. */
6193 return omit_one_operand (type, integer_zero_node, arg);
6196 /* sqrt(x) > c is the same as x > c*c. */
6197 return fold_build2 (code, type, arg,
6198 build_real (TREE_TYPE (arg), c2));
6200 else if (code == LT_EXPR || code == LE_EXPR)
6202 REAL_VALUE_TYPE c2;
6204 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6205 real_convert (&c2, mode, &c2);
6207 if (REAL_VALUE_ISINF (c2))
6209 /* sqrt(x) < y is always true, when y is a very large
6210 value and we don't care about NaNs or Infinities. */
6211 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6212 return omit_one_operand (type, integer_one_node, arg);
6214 /* sqrt(x) < y is x != +Inf when y is very large and we
6215 don't care about NaNs. */
6216 if (! HONOR_NANS (mode))
6217 return fold_build2 (NE_EXPR, type, arg,
6218 build_real (TREE_TYPE (arg), c2));
6220 /* sqrt(x) < y is x >= 0 when y is very large and we
6221 don't care about Infinities. */
6222 if (! HONOR_INFINITIES (mode))
6223 return fold_build2 (GE_EXPR, type, arg,
6224 build_real (TREE_TYPE (arg), dconst0));
6226 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6227 if (lang_hooks.decls.global_bindings_p () != 0
6228 || CONTAINS_PLACEHOLDER_P (arg))
6229 return NULL_TREE;
6231 arg = save_expr (arg);
6232 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6233 fold_build2 (GE_EXPR, type, arg,
6234 build_real (TREE_TYPE (arg),
6235 dconst0)),
6236 fold_build2 (NE_EXPR, type, arg,
6237 build_real (TREE_TYPE (arg),
6238 c2)));
6241 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6242 if (! HONOR_NANS (mode))
6243 return fold_build2 (code, type, arg,
6244 build_real (TREE_TYPE (arg), c2));
6246 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6247 if (lang_hooks.decls.global_bindings_p () == 0
6248 && ! CONTAINS_PLACEHOLDER_P (arg))
6250 arg = save_expr (arg);
6251 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6252 fold_build2 (GE_EXPR, type, arg,
6253 build_real (TREE_TYPE (arg),
6254 dconst0)),
6255 fold_build2 (code, type, arg,
6256 build_real (TREE_TYPE (arg),
6257 c2)));
6262 return NULL_TREE;
6265 /* Subroutine of fold() that optimizes comparisons against Infinities,
6266 either +Inf or -Inf.
6268 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6269 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6270 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6272 The function returns the constant folded tree if a simplification
6273 can be made, and NULL_TREE otherwise. */
6275 static tree
6276 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6278 enum machine_mode mode;
6279 REAL_VALUE_TYPE max;
6280 tree temp;
6281 bool neg;
6283 mode = TYPE_MODE (TREE_TYPE (arg0));
6285 /* For negative infinity swap the sense of the comparison. */
6286 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6287 if (neg)
6288 code = swap_tree_comparison (code);
6290 switch (code)
6292 case GT_EXPR:
6293 /* x > +Inf is always false, if with ignore sNANs. */
6294 if (HONOR_SNANS (mode))
6295 return NULL_TREE;
6296 return omit_one_operand (type, integer_zero_node, arg0);
6298 case LE_EXPR:
6299 /* x <= +Inf is always true, if we don't case about NaNs. */
6300 if (! HONOR_NANS (mode))
6301 return omit_one_operand (type, integer_one_node, arg0);
6303 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6304 if (lang_hooks.decls.global_bindings_p () == 0
6305 && ! CONTAINS_PLACEHOLDER_P (arg0))
6307 arg0 = save_expr (arg0);
6308 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6310 break;
6312 case EQ_EXPR:
6313 case GE_EXPR:
6314 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6315 real_maxval (&max, neg, mode);
6316 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6317 arg0, build_real (TREE_TYPE (arg0), max));
6319 case LT_EXPR:
6320 /* x < +Inf is always equal to x <= DBL_MAX. */
6321 real_maxval (&max, neg, mode);
6322 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6323 arg0, build_real (TREE_TYPE (arg0), max));
6325 case NE_EXPR:
6326 /* x != +Inf is always equal to !(x > DBL_MAX). */
6327 real_maxval (&max, neg, mode);
6328 if (! HONOR_NANS (mode))
6329 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6332 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6333 arg0, build_real (TREE_TYPE (arg0), max));
6334 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6336 default:
6337 break;
6340 return NULL_TREE;
6343 /* Subroutine of fold() that optimizes comparisons of a division by
6344 a nonzero integer constant against an integer constant, i.e.
6345 X/C1 op C2.
6347 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6348 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6349 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6351 The function returns the constant folded tree if a simplification
6352 can be made, and NULL_TREE otherwise. */
6354 static tree
6355 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6357 tree prod, tmp, hi, lo;
6358 tree arg00 = TREE_OPERAND (arg0, 0);
6359 tree arg01 = TREE_OPERAND (arg0, 1);
6360 unsigned HOST_WIDE_INT lpart;
6361 HOST_WIDE_INT hpart;
6362 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6363 bool neg_overflow;
6364 int overflow;
6366 /* We have to do this the hard way to detect unsigned overflow.
6367 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6368 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6369 TREE_INT_CST_HIGH (arg01),
6370 TREE_INT_CST_LOW (arg1),
6371 TREE_INT_CST_HIGH (arg1),
6372 &lpart, &hpart, unsigned_p);
6373 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6374 -1, overflow);
6375 neg_overflow = false;
6377 if (unsigned_p)
6379 tmp = int_const_binop (MINUS_EXPR, arg01,
6380 build_int_cst (TREE_TYPE (arg01), 1), 0);
6381 lo = prod;
6383 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6384 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6385 TREE_INT_CST_HIGH (prod),
6386 TREE_INT_CST_LOW (tmp),
6387 TREE_INT_CST_HIGH (tmp),
6388 &lpart, &hpart, unsigned_p);
6389 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6390 -1, overflow | TREE_OVERFLOW (prod));
6392 else if (tree_int_cst_sgn (arg01) >= 0)
6394 tmp = int_const_binop (MINUS_EXPR, arg01,
6395 build_int_cst (TREE_TYPE (arg01), 1), 0);
6396 switch (tree_int_cst_sgn (arg1))
6398 case -1:
6399 neg_overflow = true;
6400 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6401 hi = prod;
6402 break;
6404 case 0:
6405 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6406 hi = tmp;
6407 break;
6409 case 1:
6410 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6411 lo = prod;
6412 break;
6414 default:
6415 gcc_unreachable ();
6418 else
6420 /* A negative divisor reverses the relational operators. */
6421 code = swap_tree_comparison (code);
6423 tmp = int_const_binop (PLUS_EXPR, arg01,
6424 build_int_cst (TREE_TYPE (arg01), 1), 0);
6425 switch (tree_int_cst_sgn (arg1))
6427 case -1:
6428 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6429 lo = prod;
6430 break;
6432 case 0:
6433 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6434 lo = tmp;
6435 break;
6437 case 1:
6438 neg_overflow = true;
6439 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6440 hi = prod;
6441 break;
6443 default:
6444 gcc_unreachable ();
6448 switch (code)
6450 case EQ_EXPR:
6451 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6452 return omit_one_operand (type, integer_zero_node, arg00);
6453 if (TREE_OVERFLOW (hi))
6454 return fold_build2 (GE_EXPR, type, arg00, lo);
6455 if (TREE_OVERFLOW (lo))
6456 return fold_build2 (LE_EXPR, type, arg00, hi);
6457 return build_range_check (type, arg00, 1, lo, hi);
6459 case NE_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand (type, integer_one_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2 (LT_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2 (GT_EXPR, type, arg00, hi);
6466 return build_range_check (type, arg00, 0, lo, hi);
6468 case LT_EXPR:
6469 if (TREE_OVERFLOW (lo))
6471 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6472 return omit_one_operand (type, tmp, arg00);
6474 return fold_build2 (LT_EXPR, type, arg00, lo);
6476 case LE_EXPR:
6477 if (TREE_OVERFLOW (hi))
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand (type, tmp, arg00);
6482 return fold_build2 (LE_EXPR, type, arg00, hi);
6484 case GT_EXPR:
6485 if (TREE_OVERFLOW (hi))
6487 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6488 return omit_one_operand (type, tmp, arg00);
6490 return fold_build2 (GT_EXPR, type, arg00, hi);
6492 case GE_EXPR:
6493 if (TREE_OVERFLOW (lo))
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand (type, tmp, arg00);
6498 return fold_build2 (GE_EXPR, type, arg00, lo);
6500 default:
6501 break;
6504 return NULL_TREE;
6508 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6509 equality/inequality test, then return a simplified form of the test
6510 using a sign testing. Otherwise return NULL. TYPE is the desired
6511 result type. */
6513 static tree
6514 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6515 tree result_type)
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code == NE_EXPR || code == EQ_EXPR)
6519 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6526 if (arg00 != NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00))
6530 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6532 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6533 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6534 result_type, fold_convert (stype, arg00),
6535 build_int_cst (stype, 0));
6539 return NULL_TREE;
6542 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6543 equality/inequality test, then return a simplified form of
6544 the test using shifts and logical operations. Otherwise return
6545 NULL. TYPE is the desired result type. */
6547 tree
6548 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6549 tree result_type)
6551 /* If this is testing a single bit, we can optimize the test. */
6552 if ((code == NE_EXPR || code == EQ_EXPR)
6553 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6554 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6556 tree inner = TREE_OPERAND (arg0, 0);
6557 tree type = TREE_TYPE (arg0);
6558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6559 enum machine_mode operand_mode = TYPE_MODE (type);
6560 int ops_unsigned;
6561 tree signed_type, unsigned_type, intermediate_type;
6562 tree tem, one;
6564 /* First, see if we can fold the single bit test into a sign-bit
6565 test. */
6566 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6567 result_type);
6568 if (tem)
6569 return tem;
6571 /* Otherwise we have (A & C) != 0 where C is a single bit,
6572 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6573 Similarly for (A & C) == 0. */
6575 /* If INNER is a right shift of a constant and it plus BITNUM does
6576 not overflow, adjust BITNUM and INNER. */
6577 if (TREE_CODE (inner) == RSHIFT_EXPR
6578 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6579 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6580 && bitnum < TYPE_PRECISION (type)
6581 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6582 bitnum - TYPE_PRECISION (type)))
6584 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6585 inner = TREE_OPERAND (inner, 0);
6588 /* If we are going to be able to omit the AND below, we must do our
6589 operations as unsigned. If we must use the AND, we have a choice.
6590 Normally unsigned is faster, but for some machines signed is. */
6591 #ifdef LOAD_EXTEND_OP
6592 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6593 && !flag_syntax_only) ? 0 : 1;
6594 #else
6595 ops_unsigned = 1;
6596 #endif
6598 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6599 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6600 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6601 inner = fold_convert (intermediate_type, inner);
6603 if (bitnum != 0)
6604 inner = build2 (RSHIFT_EXPR, intermediate_type,
6605 inner, size_int (bitnum));
6607 one = build_int_cst (intermediate_type, 1);
6609 if (code == EQ_EXPR)
6610 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6612 /* Put the AND last so it can combine with more things. */
6613 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6615 /* Make sure to return the proper type. */
6616 inner = fold_convert (result_type, inner);
6618 return inner;
6620 return NULL_TREE;
6623 /* Check whether we are allowed to reorder operands arg0 and arg1,
6624 such that the evaluation of arg1 occurs before arg0. */
6626 static bool
6627 reorder_operands_p (tree arg0, tree arg1)
6629 if (! flag_evaluation_order)
6630 return true;
6631 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6632 return true;
6633 return ! TREE_SIDE_EFFECTS (arg0)
6634 && ! TREE_SIDE_EFFECTS (arg1);
6637 /* Test whether it is preferable two swap two operands, ARG0 and
6638 ARG1, for example because ARG0 is an integer constant and ARG1
6639 isn't. If REORDER is true, only recommend swapping if we can
6640 evaluate the operands in reverse order. */
6642 bool
6643 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6645 STRIP_SIGN_NOPS (arg0);
6646 STRIP_SIGN_NOPS (arg1);
6648 if (TREE_CODE (arg1) == INTEGER_CST)
6649 return 0;
6650 if (TREE_CODE (arg0) == INTEGER_CST)
6651 return 1;
6653 if (TREE_CODE (arg1) == REAL_CST)
6654 return 0;
6655 if (TREE_CODE (arg0) == REAL_CST)
6656 return 1;
6658 if (TREE_CODE (arg1) == COMPLEX_CST)
6659 return 0;
6660 if (TREE_CODE (arg0) == COMPLEX_CST)
6661 return 1;
6663 if (TREE_CONSTANT (arg1))
6664 return 0;
6665 if (TREE_CONSTANT (arg0))
6666 return 1;
6668 if (optimize_size)
6669 return 0;
6671 if (reorder && flag_evaluation_order
6672 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6673 return 0;
6675 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6676 for commutative and comparison operators. Ensuring a canonical
6677 form allows the optimizers to find additional redundancies without
6678 having to explicitly check for both orderings. */
6679 if (TREE_CODE (arg0) == SSA_NAME
6680 && TREE_CODE (arg1) == SSA_NAME
6681 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6682 return 1;
6684 /* Put SSA_NAMEs last. */
6685 if (TREE_CODE (arg1) == SSA_NAME)
6686 return 0;
6687 if (TREE_CODE (arg0) == SSA_NAME)
6688 return 1;
6690 /* Put variables last. */
6691 if (DECL_P (arg1))
6692 return 0;
6693 if (DECL_P (arg0))
6694 return 1;
6696 return 0;
6699 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6700 ARG0 is extended to a wider type. */
6702 static tree
6703 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6705 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6706 tree arg1_unw;
6707 tree shorter_type, outer_type;
6708 tree min, max;
6709 bool above, below;
6711 if (arg0_unw == arg0)
6712 return NULL_TREE;
6713 shorter_type = TREE_TYPE (arg0_unw);
6715 #ifdef HAVE_canonicalize_funcptr_for_compare
6716 /* Disable this optimization if we're casting a function pointer
6717 type on targets that require function pointer canonicalization. */
6718 if (HAVE_canonicalize_funcptr_for_compare
6719 && TREE_CODE (shorter_type) == POINTER_TYPE
6720 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6721 return NULL_TREE;
6722 #endif
6724 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6725 return NULL_TREE;
6727 arg1_unw = get_unwidened (arg1, shorter_type);
6729 /* If possible, express the comparison in the shorter mode. */
6730 if ((code == EQ_EXPR || code == NE_EXPR
6731 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6732 && (TREE_TYPE (arg1_unw) == shorter_type
6733 || (TREE_CODE (arg1_unw) == INTEGER_CST
6734 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6735 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6736 && int_fits_type_p (arg1_unw, shorter_type))))
6737 return fold_build2 (code, type, arg0_unw,
6738 fold_convert (shorter_type, arg1_unw));
6740 if (TREE_CODE (arg1_unw) != INTEGER_CST
6741 || TREE_CODE (shorter_type) != INTEGER_TYPE
6742 || !int_fits_type_p (arg1_unw, shorter_type))
6743 return NULL_TREE;
6745 /* If we are comparing with the integer that does not fit into the range
6746 of the shorter type, the result is known. */
6747 outer_type = TREE_TYPE (arg1_unw);
6748 min = lower_bound_in_type (outer_type, shorter_type);
6749 max = upper_bound_in_type (outer_type, shorter_type);
6751 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6752 max, arg1_unw));
6753 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6754 arg1_unw, min));
6756 switch (code)
6758 case EQ_EXPR:
6759 if (above || below)
6760 return omit_one_operand (type, integer_zero_node, arg0);
6761 break;
6763 case NE_EXPR:
6764 if (above || below)
6765 return omit_one_operand (type, integer_one_node, arg0);
6766 break;
6768 case LT_EXPR:
6769 case LE_EXPR:
6770 if (above)
6771 return omit_one_operand (type, integer_one_node, arg0);
6772 else if (below)
6773 return omit_one_operand (type, integer_zero_node, arg0);
6775 case GT_EXPR:
6776 case GE_EXPR:
6777 if (above)
6778 return omit_one_operand (type, integer_zero_node, arg0);
6779 else if (below)
6780 return omit_one_operand (type, integer_one_node, arg0);
6782 default:
6783 break;
6786 return NULL_TREE;
6789 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6790 ARG0 just the signedness is changed. */
6792 static tree
6793 fold_sign_changed_comparison (enum tree_code code, tree type,
6794 tree arg0, tree arg1)
6796 tree arg0_inner;
6797 tree inner_type, outer_type;
6799 if (TREE_CODE (arg0) != NOP_EXPR
6800 && TREE_CODE (arg0) != CONVERT_EXPR)
6801 return NULL_TREE;
6803 outer_type = TREE_TYPE (arg0);
6804 arg0_inner = TREE_OPERAND (arg0, 0);
6805 inner_type = TREE_TYPE (arg0_inner);
6807 #ifdef HAVE_canonicalize_funcptr_for_compare
6808 /* Disable this optimization if we're casting a function pointer
6809 type on targets that require function pointer canonicalization. */
6810 if (HAVE_canonicalize_funcptr_for_compare
6811 && TREE_CODE (inner_type) == POINTER_TYPE
6812 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6813 return NULL_TREE;
6814 #endif
6816 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6817 return NULL_TREE;
6819 if (TREE_CODE (arg1) != INTEGER_CST
6820 && !((TREE_CODE (arg1) == NOP_EXPR
6821 || TREE_CODE (arg1) == CONVERT_EXPR)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6823 return NULL_TREE;
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 && code != NE_EXPR
6827 && code != EQ_EXPR)
6828 return NULL_TREE;
6830 if (TREE_CODE (arg1) == INTEGER_CST)
6831 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6832 TREE_INT_CST_HIGH (arg1), 0,
6833 TREE_OVERFLOW (arg1));
6834 else
6835 arg1 = fold_convert (inner_type, arg1);
6837 return fold_build2 (code, type, arg0_inner, arg1);
6840 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6841 step of the array. Reconstructs s and delta in the case of s * delta
6842 being an integer constant (and thus already folded).
6843 ADDR is the address. MULT is the multiplicative expression.
6844 If the function succeeds, the new address expression is returned. Otherwise
6845 NULL_TREE is returned. */
6847 static tree
6848 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6850 tree s, delta, step;
6851 tree ref = TREE_OPERAND (addr, 0), pref;
6852 tree ret, pos;
6853 tree itype;
6854 bool mdim = false;
6856 /* Canonicalize op1 into a possibly non-constant delta
6857 and an INTEGER_CST s. */
6858 if (TREE_CODE (op1) == MULT_EXPR)
6860 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6862 STRIP_NOPS (arg0);
6863 STRIP_NOPS (arg1);
6865 if (TREE_CODE (arg0) == INTEGER_CST)
6867 s = arg0;
6868 delta = arg1;
6870 else if (TREE_CODE (arg1) == INTEGER_CST)
6872 s = arg1;
6873 delta = arg0;
6875 else
6876 return NULL_TREE;
6878 else if (TREE_CODE (op1) == INTEGER_CST)
6880 delta = op1;
6881 s = NULL_TREE;
6883 else
6885 /* Simulate we are delta * 1. */
6886 delta = op1;
6887 s = integer_one_node;
6890 for (;; ref = TREE_OPERAND (ref, 0))
6892 if (TREE_CODE (ref) == ARRAY_REF)
6894 /* Remember if this was a multi-dimensional array. */
6895 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6896 mdim = true;
6898 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6899 if (! itype)
6900 continue;
6902 step = array_ref_element_size (ref);
6903 if (TREE_CODE (step) != INTEGER_CST)
6904 continue;
6906 if (s)
6908 if (! tree_int_cst_equal (step, s))
6909 continue;
6911 else
6913 /* Try if delta is a multiple of step. */
6914 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6915 if (! tmp)
6916 continue;
6917 delta = tmp;
6920 /* Only fold here if we can verify we do not overflow one
6921 dimension of a multi-dimensional array. */
6922 if (mdim)
6924 tree tmp;
6926 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6927 || !INTEGRAL_TYPE_P (itype)
6928 || !TYPE_MAX_VALUE (itype)
6929 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6930 continue;
6932 tmp = fold_binary (code, itype,
6933 fold_convert (itype,
6934 TREE_OPERAND (ref, 1)),
6935 fold_convert (itype, delta));
6936 if (!tmp
6937 || TREE_CODE (tmp) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6939 continue;
6942 break;
6944 else
6945 mdim = false;
6947 if (!handled_component_p (ref))
6948 return NULL_TREE;
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6954 pref = TREE_OPERAND (addr, 0);
6955 ret = copy_node (pref);
6956 pos = ret;
6958 while (pref != ref)
6960 pref = TREE_OPERAND (pref, 0);
6961 TREE_OPERAND (pos, 0) = copy_node (pref);
6962 pos = TREE_OPERAND (pos, 0);
6965 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6966 fold_convert (itype,
6967 TREE_OPERAND (pos, 1)),
6968 fold_convert (itype, delta));
6970 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6974 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6975 means A >= Y && A != MAX, but in this case we know that
6976 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6978 static tree
6979 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6981 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6983 if (TREE_CODE (bound) == LT_EXPR)
6984 a = TREE_OPERAND (bound, 0);
6985 else if (TREE_CODE (bound) == GT_EXPR)
6986 a = TREE_OPERAND (bound, 1);
6987 else
6988 return NULL_TREE;
6990 typea = TREE_TYPE (a);
6991 if (!INTEGRAL_TYPE_P (typea)
6992 && !POINTER_TYPE_P (typea))
6993 return NULL_TREE;
6995 if (TREE_CODE (ineq) == LT_EXPR)
6997 a1 = TREE_OPERAND (ineq, 1);
6998 y = TREE_OPERAND (ineq, 0);
7000 else if (TREE_CODE (ineq) == GT_EXPR)
7002 a1 = TREE_OPERAND (ineq, 0);
7003 y = TREE_OPERAND (ineq, 1);
7005 else
7006 return NULL_TREE;
7008 if (TREE_TYPE (a1) != typea)
7009 return NULL_TREE;
7011 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7012 if (!integer_onep (diff))
7013 return NULL_TREE;
7015 return fold_build2 (GE_EXPR, type, a, y);
7018 /* Fold a sum or difference of at least one multiplication.
7019 Returns the folded tree or NULL if no simplification could be made. */
7021 static tree
7022 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7024 tree arg00, arg01, arg10, arg11;
7025 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7027 /* (A * C) +- (B * C) -> (A+-B) * C.
7028 (A * C) +- A -> A * (C+-1).
7029 We are most concerned about the case where C is a constant,
7030 but other combinations show up during loop reduction. Since
7031 it is not difficult, try all four possibilities. */
7033 if (TREE_CODE (arg0) == MULT_EXPR)
7035 arg00 = TREE_OPERAND (arg0, 0);
7036 arg01 = TREE_OPERAND (arg0, 1);
7038 else
7040 arg00 = arg0;
7041 arg01 = build_one_cst (type);
7043 if (TREE_CODE (arg1) == MULT_EXPR)
7045 arg10 = TREE_OPERAND (arg1, 0);
7046 arg11 = TREE_OPERAND (arg1, 1);
7048 else
7050 arg10 = arg1;
7051 arg11 = build_one_cst (type);
7053 same = NULL_TREE;
7055 if (operand_equal_p (arg01, arg11, 0))
7056 same = arg01, alt0 = arg00, alt1 = arg10;
7057 else if (operand_equal_p (arg00, arg10, 0))
7058 same = arg00, alt0 = arg01, alt1 = arg11;
7059 else if (operand_equal_p (arg00, arg11, 0))
7060 same = arg00, alt0 = arg01, alt1 = arg10;
7061 else if (operand_equal_p (arg01, arg10, 0))
7062 same = arg01, alt0 = arg00, alt1 = arg11;
7064 /* No identical multiplicands; see if we can find a common
7065 power-of-two factor in non-power-of-two multiplies. This
7066 can help in multi-dimensional array access. */
7067 else if (host_integerp (arg01, 0)
7068 && host_integerp (arg11, 0))
7070 HOST_WIDE_INT int01, int11, tmp;
7071 bool swap = false;
7072 tree maybe_same;
7073 int01 = TREE_INT_CST_LOW (arg01);
7074 int11 = TREE_INT_CST_LOW (arg11);
7076 /* Move min of absolute values to int11. */
7077 if ((int01 >= 0 ? int01 : -int01)
7078 < (int11 >= 0 ? int11 : -int11))
7080 tmp = int01, int01 = int11, int11 = tmp;
7081 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7082 maybe_same = arg01;
7083 swap = true;
7085 else
7086 maybe_same = arg11;
7088 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7090 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7091 build_int_cst (TREE_TYPE (arg00),
7092 int01 / int11));
7093 alt1 = arg10;
7094 same = maybe_same;
7095 if (swap)
7096 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7100 if (same)
7101 return fold_build2 (MULT_EXPR, type,
7102 fold_build2 (code, type,
7103 fold_convert (type, alt0),
7104 fold_convert (type, alt1)),
7105 fold_convert (type, same));
7107 return NULL_TREE;
7110 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7111 specified by EXPR into the buffer PTR of length LEN bytes.
7112 Return the number of bytes placed in the buffer, or zero
7113 upon failure. */
7115 static int
7116 native_encode_int (tree expr, unsigned char *ptr, int len)
7118 tree type = TREE_TYPE (expr);
7119 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7120 int byte, offset, word, words;
7121 unsigned char value;
7123 if (total_bytes > len)
7124 return 0;
7125 words = total_bytes / UNITS_PER_WORD;
7127 for (byte = 0; byte < total_bytes; byte++)
7129 int bitpos = byte * BITS_PER_UNIT;
7130 if (bitpos < HOST_BITS_PER_WIDE_INT)
7131 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7132 else
7133 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7134 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7136 if (total_bytes > UNITS_PER_WORD)
7138 word = byte / UNITS_PER_WORD;
7139 if (WORDS_BIG_ENDIAN)
7140 word = (words - 1) - word;
7141 offset = word * UNITS_PER_WORD;
7142 if (BYTES_BIG_ENDIAN)
7143 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7144 else
7145 offset += byte % UNITS_PER_WORD;
7147 else
7148 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7149 ptr[offset] = value;
7151 return total_bytes;
7155 /* Subroutine of native_encode_expr. Encode the REAL_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7158 upon failure. */
7160 static int
7161 native_encode_real (tree expr, unsigned char *ptr, int len)
7163 tree type = TREE_TYPE (expr);
7164 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7165 int byte, offset, word, words, bitpos;
7166 unsigned char value;
7168 /* There are always 32 bits in each long, no matter the size of
7169 the hosts long. We handle floating point representations with
7170 up to 192 bits. */
7171 long tmp[6];
7173 if (total_bytes > len)
7174 return 0;
7175 words = 32 / UNITS_PER_WORD;
7177 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7179 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7180 bitpos += BITS_PER_UNIT)
7182 byte = (bitpos / BITS_PER_UNIT) & 3;
7183 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7185 if (UNITS_PER_WORD < 4)
7187 word = byte / UNITS_PER_WORD;
7188 if (WORDS_BIG_ENDIAN)
7189 word = (words - 1) - word;
7190 offset = word * UNITS_PER_WORD;
7191 if (BYTES_BIG_ENDIAN)
7192 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7193 else
7194 offset += byte % UNITS_PER_WORD;
7196 else
7197 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7198 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7200 return total_bytes;
7203 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7206 upon failure. */
7208 static int
7209 native_encode_complex (tree expr, unsigned char *ptr, int len)
7211 int rsize, isize;
7212 tree part;
7214 part = TREE_REALPART (expr);
7215 rsize = native_encode_expr (part, ptr, len);
7216 if (rsize == 0)
7217 return 0;
7218 part = TREE_IMAGPART (expr);
7219 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7220 if (isize != rsize)
7221 return 0;
7222 return rsize + isize;
7226 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7227 specified by EXPR into the buffer PTR of length LEN bytes.
7228 Return the number of bytes placed in the buffer, or zero
7229 upon failure. */
7231 static int
7232 native_encode_vector (tree expr, unsigned char *ptr, int len)
7234 int i, size, offset, count;
7235 tree itype, elem, elements;
7237 offset = 0;
7238 elements = TREE_VECTOR_CST_ELTS (expr);
7239 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7240 itype = TREE_TYPE (TREE_TYPE (expr));
7241 size = GET_MODE_SIZE (TYPE_MODE (itype));
7242 for (i = 0; i < count; i++)
7244 if (elements)
7246 elem = TREE_VALUE (elements);
7247 elements = TREE_CHAIN (elements);
7249 else
7250 elem = NULL_TREE;
7252 if (elem)
7254 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7255 return 0;
7257 else
7259 if (offset + size > len)
7260 return 0;
7261 memset (ptr+offset, 0, size);
7263 offset += size;
7265 return offset;
7269 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7270 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7271 buffer PTR of length LEN bytes. Return the number of bytes
7272 placed in the buffer, or zero upon failure. */
7274 static int
7275 native_encode_expr (tree expr, unsigned char *ptr, int len)
7277 switch (TREE_CODE (expr))
7279 case INTEGER_CST:
7280 return native_encode_int (expr, ptr, len);
7282 case REAL_CST:
7283 return native_encode_real (expr, ptr, len);
7285 case COMPLEX_CST:
7286 return native_encode_complex (expr, ptr, len);
7288 case VECTOR_CST:
7289 return native_encode_vector (expr, ptr, len);
7291 default:
7292 return 0;
7297 /* Subroutine of native_interpret_expr. Interpret the contents of
7298 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7299 If the buffer cannot be interpreted, return NULL_TREE. */
7301 static tree
7302 native_interpret_int (tree type, unsigned char *ptr, int len)
7304 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7305 int byte, offset, word, words;
7306 unsigned char value;
7307 unsigned int HOST_WIDE_INT lo = 0;
7308 HOST_WIDE_INT hi = 0;
7310 if (total_bytes > len)
7311 return NULL_TREE;
7312 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7313 return NULL_TREE;
7314 words = total_bytes / UNITS_PER_WORD;
7316 for (byte = 0; byte < total_bytes; byte++)
7318 int bitpos = byte * BITS_PER_UNIT;
7319 if (total_bytes > UNITS_PER_WORD)
7321 word = byte / UNITS_PER_WORD;
7322 if (WORDS_BIG_ENDIAN)
7323 word = (words - 1) - word;
7324 offset = word * UNITS_PER_WORD;
7325 if (BYTES_BIG_ENDIAN)
7326 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7327 else
7328 offset += byte % UNITS_PER_WORD;
7330 else
7331 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7332 value = ptr[offset];
7334 if (bitpos < HOST_BITS_PER_WIDE_INT)
7335 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7336 else
7337 hi |= (unsigned HOST_WIDE_INT) value
7338 << (bitpos - HOST_BITS_PER_WIDE_INT);
7341 return build_int_cst_wide_type (type, lo, hi);
7345 /* Subroutine of native_interpret_expr. Interpret the contents of
7346 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7347 If the buffer cannot be interpreted, return NULL_TREE. */
7349 static tree
7350 native_interpret_real (tree type, unsigned char *ptr, int len)
7352 enum machine_mode mode = TYPE_MODE (type);
7353 int total_bytes = GET_MODE_SIZE (mode);
7354 int byte, offset, word, words, bitpos;
7355 unsigned char value;
7356 /* There are always 32 bits in each long, no matter the size of
7357 the hosts long. We handle floating point representations with
7358 up to 192 bits. */
7359 REAL_VALUE_TYPE r;
7360 long tmp[6];
7362 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7363 if (total_bytes > len || total_bytes > 24)
7364 return NULL_TREE;
7365 words = 32 / UNITS_PER_WORD;
7367 memset (tmp, 0, sizeof (tmp));
7368 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7369 bitpos += BITS_PER_UNIT)
7371 byte = (bitpos / BITS_PER_UNIT) & 3;
7372 if (UNITS_PER_WORD < 4)
7374 word = byte / UNITS_PER_WORD;
7375 if (WORDS_BIG_ENDIAN)
7376 word = (words - 1) - word;
7377 offset = word * UNITS_PER_WORD;
7378 if (BYTES_BIG_ENDIAN)
7379 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7380 else
7381 offset += byte % UNITS_PER_WORD;
7383 else
7384 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7385 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7387 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7390 real_from_target (&r, tmp, mode);
7391 return build_real (type, r);
7395 /* Subroutine of native_interpret_expr. Interpret the contents of
7396 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7397 If the buffer cannot be interpreted, return NULL_TREE. */
7399 static tree
7400 native_interpret_complex (tree type, unsigned char *ptr, int len)
7402 tree etype, rpart, ipart;
7403 int size;
7405 etype = TREE_TYPE (type);
7406 size = GET_MODE_SIZE (TYPE_MODE (etype));
7407 if (size * 2 > len)
7408 return NULL_TREE;
7409 rpart = native_interpret_expr (etype, ptr, size);
7410 if (!rpart)
7411 return NULL_TREE;
7412 ipart = native_interpret_expr (etype, ptr+size, size);
7413 if (!ipart)
7414 return NULL_TREE;
7415 return build_complex (type, rpart, ipart);
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7423 static tree
7424 native_interpret_vector (tree type, unsigned char *ptr, int len)
7426 tree etype, elem, elements;
7427 int i, size, count;
7429 etype = TREE_TYPE (type);
7430 size = GET_MODE_SIZE (TYPE_MODE (etype));
7431 count = TYPE_VECTOR_SUBPARTS (type);
7432 if (size * count > len)
7433 return NULL_TREE;
7435 elements = NULL_TREE;
7436 for (i = count - 1; i >= 0; i--)
7438 elem = native_interpret_expr (etype, ptr+(i*size), size);
7439 if (!elem)
7440 return NULL_TREE;
7441 elements = tree_cons (NULL_TREE, elem, elements);
7443 return build_vector (type, elements);
7447 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7448 the buffer PTR of length LEN as a constant of type TYPE. For
7449 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7450 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7451 return NULL_TREE. */
7453 static tree
7454 native_interpret_expr (tree type, unsigned char *ptr, int len)
7456 switch (TREE_CODE (type))
7458 case INTEGER_TYPE:
7459 case ENUMERAL_TYPE:
7460 case BOOLEAN_TYPE:
7461 return native_interpret_int (type, ptr, len);
7463 case REAL_TYPE:
7464 return native_interpret_real (type, ptr, len);
7466 case COMPLEX_TYPE:
7467 return native_interpret_complex (type, ptr, len);
7469 case VECTOR_TYPE:
7470 return native_interpret_vector (type, ptr, len);
7472 default:
7473 return NULL_TREE;
7478 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7479 TYPE at compile-time. If we're unable to perform the conversion
7480 return NULL_TREE. */
7482 static tree
7483 fold_view_convert_expr (tree type, tree expr)
7485 /* We support up to 512-bit values (for V8DFmode). */
7486 unsigned char buffer[64];
7487 int len;
7489 /* Check that the host and target are sane. */
7490 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7491 return NULL_TREE;
7493 len = native_encode_expr (expr, buffer, sizeof (buffer));
7494 if (len == 0)
7495 return NULL_TREE;
7497 return native_interpret_expr (type, buffer, len);
7501 /* Fold a unary expression of code CODE and type TYPE with operand
7502 OP0. Return the folded expression if folding is successful.
7503 Otherwise, return NULL_TREE. */
7505 tree
7506 fold_unary (enum tree_code code, tree type, tree op0)
7508 tree tem;
7509 tree arg0;
7510 enum tree_code_class kind = TREE_CODE_CLASS (code);
7512 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7513 && TREE_CODE_LENGTH (code) == 1);
7515 arg0 = op0;
7516 if (arg0)
7518 if (code == NOP_EXPR || code == CONVERT_EXPR
7519 || code == FLOAT_EXPR || code == ABS_EXPR)
7521 /* Don't use STRIP_NOPS, because signedness of argument type
7522 matters. */
7523 STRIP_SIGN_NOPS (arg0);
7525 else
7527 /* Strip any conversions that don't change the mode. This
7528 is safe for every expression, except for a comparison
7529 expression because its signedness is derived from its
7530 operands.
7532 Note that this is done as an internal manipulation within
7533 the constant folder, in order to find the simplest
7534 representation of the arguments so that their form can be
7535 studied. In any cases, the appropriate type conversions
7536 should be put back in the tree that will get out of the
7537 constant folder. */
7538 STRIP_NOPS (arg0);
7542 if (TREE_CODE_CLASS (code) == tcc_unary)
7544 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7545 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7546 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7547 else if (TREE_CODE (arg0) == COND_EXPR)
7549 tree arg01 = TREE_OPERAND (arg0, 1);
7550 tree arg02 = TREE_OPERAND (arg0, 2);
7551 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7552 arg01 = fold_build1 (code, type, arg01);
7553 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7554 arg02 = fold_build1 (code, type, arg02);
7555 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7556 arg01, arg02);
7558 /* If this was a conversion, and all we did was to move into
7559 inside the COND_EXPR, bring it back out. But leave it if
7560 it is a conversion from integer to integer and the
7561 result precision is no wider than a word since such a
7562 conversion is cheap and may be optimized away by combine,
7563 while it couldn't if it were outside the COND_EXPR. Then return
7564 so we don't get into an infinite recursion loop taking the
7565 conversion out and then back in. */
7567 if ((code == NOP_EXPR || code == CONVERT_EXPR
7568 || code == NON_LVALUE_EXPR)
7569 && TREE_CODE (tem) == COND_EXPR
7570 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7571 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7572 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7573 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7574 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7575 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7576 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7577 && (INTEGRAL_TYPE_P
7578 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7579 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7580 || flag_syntax_only))
7581 tem = build1 (code, type,
7582 build3 (COND_EXPR,
7583 TREE_TYPE (TREE_OPERAND
7584 (TREE_OPERAND (tem, 1), 0)),
7585 TREE_OPERAND (tem, 0),
7586 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7587 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7588 return tem;
7590 else if (COMPARISON_CLASS_P (arg0))
7592 if (TREE_CODE (type) == BOOLEAN_TYPE)
7594 arg0 = copy_node (arg0);
7595 TREE_TYPE (arg0) = type;
7596 return arg0;
7598 else if (TREE_CODE (type) != INTEGER_TYPE)
7599 return fold_build3 (COND_EXPR, type, arg0,
7600 fold_build1 (code, type,
7601 integer_one_node),
7602 fold_build1 (code, type,
7603 integer_zero_node));
7607 switch (code)
7609 case NOP_EXPR:
7610 case FLOAT_EXPR:
7611 case CONVERT_EXPR:
7612 case FIX_TRUNC_EXPR:
7613 if (TREE_TYPE (op0) == type)
7614 return op0;
7616 /* If we have (type) (a CMP b) and type is an integral type, return
7617 new expression involving the new type. */
7618 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7619 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7620 TREE_OPERAND (op0, 1));
7622 /* Handle cases of two conversions in a row. */
7623 if (TREE_CODE (op0) == NOP_EXPR
7624 || TREE_CODE (op0) == CONVERT_EXPR)
7626 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7627 tree inter_type = TREE_TYPE (op0);
7628 int inside_int = INTEGRAL_TYPE_P (inside_type);
7629 int inside_ptr = POINTER_TYPE_P (inside_type);
7630 int inside_float = FLOAT_TYPE_P (inside_type);
7631 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7632 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7633 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7634 int inter_int = INTEGRAL_TYPE_P (inter_type);
7635 int inter_ptr = POINTER_TYPE_P (inter_type);
7636 int inter_float = FLOAT_TYPE_P (inter_type);
7637 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7638 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7639 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7640 int final_int = INTEGRAL_TYPE_P (type);
7641 int final_ptr = POINTER_TYPE_P (type);
7642 int final_float = FLOAT_TYPE_P (type);
7643 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7644 unsigned int final_prec = TYPE_PRECISION (type);
7645 int final_unsignedp = TYPE_UNSIGNED (type);
7647 /* In addition to the cases of two conversions in a row
7648 handled below, if we are converting something to its own
7649 type via an object of identical or wider precision, neither
7650 conversion is needed. */
7651 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7652 && (((inter_int || inter_ptr) && final_int)
7653 || (inter_float && final_float))
7654 && inter_prec >= final_prec)
7655 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7657 /* Likewise, if the intermediate and final types are either both
7658 float or both integer, we don't need the middle conversion if
7659 it is wider than the final type and doesn't change the signedness
7660 (for integers). Avoid this if the final type is a pointer
7661 since then we sometimes need the inner conversion. Likewise if
7662 the outer has a precision not equal to the size of its mode. */
7663 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7664 || (inter_float && inside_float)
7665 || (inter_vec && inside_vec))
7666 && inter_prec >= inside_prec
7667 && (inter_float || inter_vec
7668 || inter_unsignedp == inside_unsignedp)
7669 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7670 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7671 && ! final_ptr
7672 && (! final_vec || inter_prec == inside_prec))
7673 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7675 /* If we have a sign-extension of a zero-extended value, we can
7676 replace that by a single zero-extension. */
7677 if (inside_int && inter_int && final_int
7678 && inside_prec < inter_prec && inter_prec < final_prec
7679 && inside_unsignedp && !inter_unsignedp)
7680 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7682 /* Two conversions in a row are not needed unless:
7683 - some conversion is floating-point (overstrict for now), or
7684 - some conversion is a vector (overstrict for now), or
7685 - the intermediate type is narrower than both initial and
7686 final, or
7687 - the intermediate type and innermost type differ in signedness,
7688 and the outermost type is wider than the intermediate, or
7689 - the initial type is a pointer type and the precisions of the
7690 intermediate and final types differ, or
7691 - the final type is a pointer type and the precisions of the
7692 initial and intermediate types differ.
7693 - the final type is a pointer type and the initial type not
7694 - the initial type is a pointer to an array and the final type
7695 not. */
7696 if (! inside_float && ! inter_float && ! final_float
7697 && ! inside_vec && ! inter_vec && ! final_vec
7698 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7699 && ! (inside_int && inter_int
7700 && inter_unsignedp != inside_unsignedp
7701 && inter_prec < final_prec)
7702 && ((inter_unsignedp && inter_prec > inside_prec)
7703 == (final_unsignedp && final_prec > inter_prec))
7704 && ! (inside_ptr && inter_prec != final_prec)
7705 && ! (final_ptr && inside_prec != inter_prec)
7706 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7707 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7708 && final_ptr == inside_ptr
7709 && ! (inside_ptr
7710 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7711 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7712 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7715 /* Handle (T *)&A.B.C for A being of type T and B and C
7716 living at offset zero. This occurs frequently in
7717 C++ upcasting and then accessing the base. */
7718 if (TREE_CODE (op0) == ADDR_EXPR
7719 && POINTER_TYPE_P (type)
7720 && handled_component_p (TREE_OPERAND (op0, 0)))
7722 HOST_WIDE_INT bitsize, bitpos;
7723 tree offset;
7724 enum machine_mode mode;
7725 int unsignedp, volatilep;
7726 tree base = TREE_OPERAND (op0, 0);
7727 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7728 &mode, &unsignedp, &volatilep, false);
7729 /* If the reference was to a (constant) zero offset, we can use
7730 the address of the base if it has the same base type
7731 as the result type. */
7732 if (! offset && bitpos == 0
7733 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7734 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7735 return fold_convert (type, build_fold_addr_expr (base));
7738 /* Convert (type *)&A into &A->field_of_type_and_offset_0. */
7739 if (TREE_CODE (op0) == ADDR_EXPR && POINTER_TYPE_P (type)
7740 && (tem = maybe_fold_offset_to_component_ref
7741 (TREE_TYPE (TREE_OPERAND (op0, 0)), TREE_OPERAND (op0, 0),
7742 integer_zero_node, TREE_TYPE (type), false)))
7743 return build_fold_addr_expr_with_type (tem, type);
7745 if ((TREE_CODE (op0) == MODIFY_EXPR
7746 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7747 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7748 /* Detect assigning a bitfield. */
7749 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7750 && DECL_BIT_FIELD
7751 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7753 /* Don't leave an assignment inside a conversion
7754 unless assigning a bitfield. */
7755 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7756 /* First do the assignment, then return converted constant. */
7757 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7758 TREE_NO_WARNING (tem) = 1;
7759 TREE_USED (tem) = 1;
7760 return tem;
7763 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7764 constants (if x has signed type, the sign bit cannot be set
7765 in c). This folds extension into the BIT_AND_EXPR. */
7766 if (INTEGRAL_TYPE_P (type)
7767 && TREE_CODE (type) != BOOLEAN_TYPE
7768 && TREE_CODE (op0) == BIT_AND_EXPR
7769 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7771 tree and = op0;
7772 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7773 int change = 0;
7775 if (TYPE_UNSIGNED (TREE_TYPE (and))
7776 || (TYPE_PRECISION (type)
7777 <= TYPE_PRECISION (TREE_TYPE (and))))
7778 change = 1;
7779 else if (TYPE_PRECISION (TREE_TYPE (and1))
7780 <= HOST_BITS_PER_WIDE_INT
7781 && host_integerp (and1, 1))
7783 unsigned HOST_WIDE_INT cst;
7785 cst = tree_low_cst (and1, 1);
7786 cst &= (HOST_WIDE_INT) -1
7787 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7788 change = (cst == 0);
7789 #ifdef LOAD_EXTEND_OP
7790 if (change
7791 && !flag_syntax_only
7792 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7793 == ZERO_EXTEND))
7795 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7796 and0 = fold_convert (uns, and0);
7797 and1 = fold_convert (uns, and1);
7799 #endif
7801 if (change)
7803 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7804 TREE_INT_CST_HIGH (and1), 0,
7805 TREE_OVERFLOW (and1));
7806 return fold_build2 (BIT_AND_EXPR, type,
7807 fold_convert (type, and0), tem);
7811 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7812 T2 being pointers to types of the same size. */
7813 if (POINTER_TYPE_P (type)
7814 && BINARY_CLASS_P (arg0)
7815 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7816 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7818 tree arg00 = TREE_OPERAND (arg0, 0);
7819 tree t0 = type;
7820 tree t1 = TREE_TYPE (arg00);
7821 tree tt0 = TREE_TYPE (t0);
7822 tree tt1 = TREE_TYPE (t1);
7823 tree s0 = TYPE_SIZE (tt0);
7824 tree s1 = TYPE_SIZE (tt1);
7826 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7827 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7828 TREE_OPERAND (arg0, 1));
7831 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7832 of the same precision, and X is a integer type not narrower than
7833 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7834 if (INTEGRAL_TYPE_P (type)
7835 && TREE_CODE (op0) == BIT_NOT_EXPR
7836 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7837 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7838 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7839 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7841 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7842 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7843 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7844 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7847 tem = fold_convert_const (code, type, arg0);
7848 return tem ? tem : NULL_TREE;
7850 case VIEW_CONVERT_EXPR:
7851 if (TREE_TYPE (op0) == type)
7852 return op0;
7853 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7854 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7855 return fold_view_convert_expr (type, op0);
7857 case NEGATE_EXPR:
7858 tem = fold_negate_expr (arg0);
7859 if (tem)
7860 return fold_convert (type, tem);
7861 return NULL_TREE;
7863 case ABS_EXPR:
7864 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7865 return fold_abs_const (arg0, type);
7866 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7867 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7868 /* Convert fabs((double)float) into (double)fabsf(float). */
7869 else if (TREE_CODE (arg0) == NOP_EXPR
7870 && TREE_CODE (type) == REAL_TYPE)
7872 tree targ0 = strip_float_extensions (arg0);
7873 if (targ0 != arg0)
7874 return fold_convert (type, fold_build1 (ABS_EXPR,
7875 TREE_TYPE (targ0),
7876 targ0));
7878 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7879 else if (TREE_CODE (arg0) == ABS_EXPR)
7880 return arg0;
7881 else if (tree_expr_nonnegative_p (arg0))
7882 return arg0;
7884 /* Strip sign ops from argument. */
7885 if (TREE_CODE (type) == REAL_TYPE)
7887 tem = fold_strip_sign_ops (arg0);
7888 if (tem)
7889 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7891 return NULL_TREE;
7893 case CONJ_EXPR:
7894 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7895 return fold_convert (type, arg0);
7896 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7898 tree itype = TREE_TYPE (type);
7899 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7900 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7901 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7903 if (TREE_CODE (arg0) == COMPLEX_CST)
7905 tree itype = TREE_TYPE (type);
7906 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7907 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7908 return build_complex (type, rpart, negate_expr (ipart));
7910 if (TREE_CODE (arg0) == CONJ_EXPR)
7911 return fold_convert (type, TREE_OPERAND (arg0, 0));
7912 return NULL_TREE;
7914 case BIT_NOT_EXPR:
7915 if (TREE_CODE (arg0) == INTEGER_CST)
7916 return fold_not_const (arg0, type);
7917 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7918 return TREE_OPERAND (arg0, 0);
7919 /* Convert ~ (-A) to A - 1. */
7920 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7921 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7922 build_int_cst (type, 1));
7923 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7924 else if (INTEGRAL_TYPE_P (type)
7925 && ((TREE_CODE (arg0) == MINUS_EXPR
7926 && integer_onep (TREE_OPERAND (arg0, 1)))
7927 || (TREE_CODE (arg0) == PLUS_EXPR
7928 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7929 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7930 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7931 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7932 && (tem = fold_unary (BIT_NOT_EXPR, type,
7933 fold_convert (type,
7934 TREE_OPERAND (arg0, 0)))))
7935 return fold_build2 (BIT_XOR_EXPR, type, tem,
7936 fold_convert (type, TREE_OPERAND (arg0, 1)));
7937 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7938 && (tem = fold_unary (BIT_NOT_EXPR, type,
7939 fold_convert (type,
7940 TREE_OPERAND (arg0, 1)))))
7941 return fold_build2 (BIT_XOR_EXPR, type,
7942 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7944 return NULL_TREE;
7946 case TRUTH_NOT_EXPR:
7947 /* The argument to invert_truthvalue must have Boolean type. */
7948 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7949 arg0 = fold_convert (boolean_type_node, arg0);
7951 /* Note that the operand of this must be an int
7952 and its values must be 0 or 1.
7953 ("true" is a fixed value perhaps depending on the language,
7954 but we don't handle values other than 1 correctly yet.) */
7955 tem = fold_truth_not_expr (arg0);
7956 if (!tem)
7957 return NULL_TREE;
7958 return fold_convert (type, tem);
7960 case REALPART_EXPR:
7961 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7962 return fold_convert (type, arg0);
7963 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7964 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7965 TREE_OPERAND (arg0, 1));
7966 if (TREE_CODE (arg0) == COMPLEX_CST)
7967 return fold_convert (type, TREE_REALPART (arg0));
7968 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7970 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7971 tem = fold_build2 (TREE_CODE (arg0), itype,
7972 fold_build1 (REALPART_EXPR, itype,
7973 TREE_OPERAND (arg0, 0)),
7974 fold_build1 (REALPART_EXPR, itype,
7975 TREE_OPERAND (arg0, 1)));
7976 return fold_convert (type, tem);
7978 if (TREE_CODE (arg0) == CONJ_EXPR)
7980 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7981 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7982 return fold_convert (type, tem);
7984 if (TREE_CODE (arg0) == CALL_EXPR)
7986 tree fn = get_callee_fndecl (arg0);
7987 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7988 switch (DECL_FUNCTION_CODE (fn))
7990 CASE_FLT_FN (BUILT_IN_CEXPI):
7991 fn = mathfn_built_in (type, BUILT_IN_COS);
7992 if (fn)
7993 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
7994 break;
7996 default:
7997 break;
8000 return NULL_TREE;
8002 case IMAGPART_EXPR:
8003 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8004 return fold_convert (type, integer_zero_node);
8005 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8006 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8007 TREE_OPERAND (arg0, 0));
8008 if (TREE_CODE (arg0) == COMPLEX_CST)
8009 return fold_convert (type, TREE_IMAGPART (arg0));
8010 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8012 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8013 tem = fold_build2 (TREE_CODE (arg0), itype,
8014 fold_build1 (IMAGPART_EXPR, itype,
8015 TREE_OPERAND (arg0, 0)),
8016 fold_build1 (IMAGPART_EXPR, itype,
8017 TREE_OPERAND (arg0, 1)));
8018 return fold_convert (type, tem);
8020 if (TREE_CODE (arg0) == CONJ_EXPR)
8022 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8023 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8024 return fold_convert (type, negate_expr (tem));
8026 if (TREE_CODE (arg0) == CALL_EXPR)
8028 tree fn = get_callee_fndecl (arg0);
8029 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8030 switch (DECL_FUNCTION_CODE (fn))
8032 CASE_FLT_FN (BUILT_IN_CEXPI):
8033 fn = mathfn_built_in (type, BUILT_IN_SIN);
8034 if (fn)
8035 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8036 break;
8038 default:
8039 break;
8042 return NULL_TREE;
8044 default:
8045 return NULL_TREE;
8046 } /* switch (code) */
8049 /* Fold a binary expression of code CODE and type TYPE with operands
8050 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8051 Return the folded expression if folding is successful. Otherwise,
8052 return NULL_TREE. */
8054 static tree
8055 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8057 enum tree_code compl_code;
8059 if (code == MIN_EXPR)
8060 compl_code = MAX_EXPR;
8061 else if (code == MAX_EXPR)
8062 compl_code = MIN_EXPR;
8063 else
8064 gcc_unreachable ();
8066 /* MIN (MAX (a, b), b) == b. */
8067 if (TREE_CODE (op0) == compl_code
8068 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8069 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8071 /* MIN (MAX (b, a), b) == b. */
8072 if (TREE_CODE (op0) == compl_code
8073 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8074 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8075 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8077 /* MIN (a, MAX (a, b)) == a. */
8078 if (TREE_CODE (op1) == compl_code
8079 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8080 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8081 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8083 /* MIN (a, MAX (b, a)) == a. */
8084 if (TREE_CODE (op1) == compl_code
8085 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8086 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8087 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8089 return NULL_TREE;
8092 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8093 by changing CODE to reduce the magnitude of constants involved in
8094 ARG0 of the comparison.
8095 Returns a canonicalized comparison tree if a simplification was
8096 possible, otherwise returns NULL_TREE.
8097 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8098 valid if signed overflow is undefined. */
8100 static tree
8101 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8102 tree arg0, tree arg1,
8103 bool *strict_overflow_p)
8105 enum tree_code code0 = TREE_CODE (arg0);
8106 tree t, cst0 = NULL_TREE;
8107 int sgn0;
8108 bool swap = false;
8110 /* Match A +- CST code arg1 and CST code arg1. */
8111 if (!(((code0 == MINUS_EXPR
8112 || code0 == PLUS_EXPR)
8113 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8114 || code0 == INTEGER_CST))
8115 return NULL_TREE;
8117 /* Identify the constant in arg0 and its sign. */
8118 if (code0 == INTEGER_CST)
8119 cst0 = arg0;
8120 else
8121 cst0 = TREE_OPERAND (arg0, 1);
8122 sgn0 = tree_int_cst_sgn (cst0);
8124 /* Overflowed constants and zero will cause problems. */
8125 if (integer_zerop (cst0)
8126 || TREE_OVERFLOW (cst0))
8127 return NULL_TREE;
8129 /* See if we can reduce the magnitude of the constant in
8130 arg0 by changing the comparison code. */
8131 if (code0 == INTEGER_CST)
8133 /* CST <= arg1 -> CST-1 < arg1. */
8134 if (code == LE_EXPR && sgn0 == 1)
8135 code = LT_EXPR;
8136 /* -CST < arg1 -> -CST-1 <= arg1. */
8137 else if (code == LT_EXPR && sgn0 == -1)
8138 code = LE_EXPR;
8139 /* CST > arg1 -> CST-1 >= arg1. */
8140 else if (code == GT_EXPR && sgn0 == 1)
8141 code = GE_EXPR;
8142 /* -CST >= arg1 -> -CST-1 > arg1. */
8143 else if (code == GE_EXPR && sgn0 == -1)
8144 code = GT_EXPR;
8145 else
8146 return NULL_TREE;
8147 /* arg1 code' CST' might be more canonical. */
8148 swap = true;
8150 else
8152 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8153 if (code == LT_EXPR
8154 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8155 code = LE_EXPR;
8156 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8157 else if (code == GT_EXPR
8158 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8159 code = GE_EXPR;
8160 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8161 else if (code == LE_EXPR
8162 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8163 code = LT_EXPR;
8164 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8165 else if (code == GE_EXPR
8166 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8167 code = GT_EXPR;
8168 else
8169 return NULL_TREE;
8170 *strict_overflow_p = true;
8173 /* Now build the constant reduced in magnitude. */
8174 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8175 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8176 if (code0 != INTEGER_CST)
8177 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8179 /* If swapping might yield to a more canonical form, do so. */
8180 if (swap)
8181 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8182 else
8183 return fold_build2 (code, type, t, arg1);
8186 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8187 overflow further. Try to decrease the magnitude of constants involved
8188 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8189 and put sole constants at the second argument position.
8190 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8192 static tree
8193 maybe_canonicalize_comparison (enum tree_code code, tree type,
8194 tree arg0, tree arg1)
8196 tree t;
8197 bool strict_overflow_p;
8198 const char * const warnmsg = G_("assuming signed overflow does not occur "
8199 "when reducing constant in comparison");
8201 /* In principle pointers also have undefined overflow behavior,
8202 but that causes problems elsewhere. */
8203 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8204 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8205 return NULL_TREE;
8207 /* Try canonicalization by simplifying arg0. */
8208 strict_overflow_p = false;
8209 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8210 &strict_overflow_p);
8211 if (t)
8213 if (strict_overflow_p)
8214 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8215 return t;
8218 /* Try canonicalization by simplifying arg1 using the swapped
8219 comparison. */
8220 code = swap_tree_comparison (code);
8221 strict_overflow_p = false;
8222 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8223 &strict_overflow_p);
8224 if (t && strict_overflow_p)
8225 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8226 return t;
8229 /* Subroutine of fold_binary. This routine performs all of the
8230 transformations that are common to the equality/inequality
8231 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8232 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8233 fold_binary should call fold_binary. Fold a comparison with
8234 tree code CODE and type TYPE with operands OP0 and OP1. Return
8235 the folded comparison or NULL_TREE. */
8237 static tree
8238 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8240 tree arg0, arg1, tem;
8242 arg0 = op0;
8243 arg1 = op1;
8245 STRIP_SIGN_NOPS (arg0);
8246 STRIP_SIGN_NOPS (arg1);
8248 tem = fold_relational_const (code, type, arg0, arg1);
8249 if (tem != NULL_TREE)
8250 return tem;
8252 /* If one arg is a real or integer constant, put it last. */
8253 if (tree_swap_operands_p (arg0, arg1, true))
8254 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8256 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8257 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8258 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8259 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8260 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8261 && (TREE_CODE (arg1) == INTEGER_CST
8262 && !TREE_OVERFLOW (arg1)))
8264 tree const1 = TREE_OPERAND (arg0, 1);
8265 tree const2 = arg1;
8266 tree variable = TREE_OPERAND (arg0, 0);
8267 tree lhs;
8268 int lhs_add;
8269 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8271 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8272 TREE_TYPE (arg1), const2, const1);
8274 /* If the constant operation overflowed this can be
8275 simplified as a comparison against INT_MAX/INT_MIN. */
8276 if (TREE_CODE (lhs) == INTEGER_CST
8277 && TREE_OVERFLOW (lhs))
8279 int const1_sgn = tree_int_cst_sgn (const1);
8280 enum tree_code code2 = code;
8282 /* Get the sign of the constant on the lhs if the
8283 operation were VARIABLE + CONST1. */
8284 if (TREE_CODE (arg0) == MINUS_EXPR)
8285 const1_sgn = -const1_sgn;
8287 /* The sign of the constant determines if we overflowed
8288 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8289 Canonicalize to the INT_MIN overflow by swapping the comparison
8290 if necessary. */
8291 if (const1_sgn == -1)
8292 code2 = swap_tree_comparison (code);
8294 /* We now can look at the canonicalized case
8295 VARIABLE + 1 CODE2 INT_MIN
8296 and decide on the result. */
8297 if (code2 == LT_EXPR
8298 || code2 == LE_EXPR
8299 || code2 == EQ_EXPR)
8300 return omit_one_operand (type, boolean_false_node, variable);
8301 else if (code2 == NE_EXPR
8302 || code2 == GE_EXPR
8303 || code2 == GT_EXPR)
8304 return omit_one_operand (type, boolean_true_node, variable);
8307 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8308 && (TREE_CODE (lhs) != INTEGER_CST
8309 || !TREE_OVERFLOW (lhs)))
8311 fold_overflow_warning (("assuming signed overflow does not occur "
8312 "when changing X +- C1 cmp C2 to "
8313 "X cmp C1 +- C2"),
8314 WARN_STRICT_OVERFLOW_COMPARISON);
8315 return fold_build2 (code, type, variable, lhs);
8319 /* For comparisons of pointers we can decompose it to a compile time
8320 comparison of the base objects and the offsets into the object.
8321 This requires at least one operand being an ADDR_EXPR to do more
8322 than the operand_equal_p test below. */
8323 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8324 && (TREE_CODE (arg0) == ADDR_EXPR
8325 || TREE_CODE (arg1) == ADDR_EXPR))
8327 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8328 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8329 enum machine_mode mode;
8330 int volatilep, unsignedp;
8331 bool indirect_base0 = false;
8333 /* Get base and offset for the access. Strip ADDR_EXPR for
8334 get_inner_reference, but put it back by stripping INDIRECT_REF
8335 off the base object if possible. */
8336 base0 = arg0;
8337 if (TREE_CODE (arg0) == ADDR_EXPR)
8339 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8340 &bitsize, &bitpos0, &offset0, &mode,
8341 &unsignedp, &volatilep, false);
8342 if (TREE_CODE (base0) == INDIRECT_REF)
8343 base0 = TREE_OPERAND (base0, 0);
8344 else
8345 indirect_base0 = true;
8348 base1 = arg1;
8349 if (TREE_CODE (arg1) == ADDR_EXPR)
8351 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8352 &bitsize, &bitpos1, &offset1, &mode,
8353 &unsignedp, &volatilep, false);
8354 /* We have to make sure to have an indirect/non-indirect base1
8355 just the same as we did for base0. */
8356 if (TREE_CODE (base1) == INDIRECT_REF
8357 && !indirect_base0)
8358 base1 = TREE_OPERAND (base1, 0);
8359 else if (!indirect_base0)
8360 base1 = NULL_TREE;
8362 else if (indirect_base0)
8363 base1 = NULL_TREE;
8365 /* If we have equivalent bases we might be able to simplify. */
8366 if (base0 && base1
8367 && operand_equal_p (base0, base1, 0))
8369 /* We can fold this expression to a constant if the non-constant
8370 offset parts are equal. */
8371 if (offset0 == offset1
8372 || (offset0 && offset1
8373 && operand_equal_p (offset0, offset1, 0)))
8375 switch (code)
8377 case EQ_EXPR:
8378 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8379 case NE_EXPR:
8380 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8381 case LT_EXPR:
8382 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8383 case LE_EXPR:
8384 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8385 case GE_EXPR:
8386 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8387 case GT_EXPR:
8388 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8389 default:;
8392 /* We can simplify the comparison to a comparison of the variable
8393 offset parts if the constant offset parts are equal.
8394 Be careful to use signed size type here because otherwise we
8395 mess with array offsets in the wrong way. This is possible
8396 because pointer arithmetic is restricted to retain within an
8397 object and overflow on pointer differences is undefined as of
8398 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8399 else if (bitpos0 == bitpos1)
8401 tree signed_size_type_node;
8402 signed_size_type_node = signed_type_for (size_type_node);
8404 /* By converting to signed size type we cover middle-end pointer
8405 arithmetic which operates on unsigned pointer types of size
8406 type size and ARRAY_REF offsets which are properly sign or
8407 zero extended from their type in case it is narrower than
8408 size type. */
8409 if (offset0 == NULL_TREE)
8410 offset0 = build_int_cst (signed_size_type_node, 0);
8411 else
8412 offset0 = fold_convert (signed_size_type_node, offset0);
8413 if (offset1 == NULL_TREE)
8414 offset1 = build_int_cst (signed_size_type_node, 0);
8415 else
8416 offset1 = fold_convert (signed_size_type_node, offset1);
8418 return fold_build2 (code, type, offset0, offset1);
8423 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8424 same object, then we can fold this to a comparison of the two offsets in
8425 signed size type. This is possible because pointer arithmetic is
8426 restricted to retain within an object and overflow on pointer differences
8427 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8429 We check flag_wrapv directly because pointers types are unsigned,
8430 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8431 normally what we want to avoid certain odd overflow cases, but
8432 not here. */
8433 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8434 && !flag_wrapv
8435 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8437 tree base0, offset0, base1, offset1;
8439 if (extract_array_ref (arg0, &base0, &offset0)
8440 && extract_array_ref (arg1, &base1, &offset1)
8441 && operand_equal_p (base0, base1, 0))
8443 tree signed_size_type_node;
8444 signed_size_type_node = signed_type_for (size_type_node);
8446 /* By converting to signed size type we cover middle-end pointer
8447 arithmetic which operates on unsigned pointer types of size
8448 type size and ARRAY_REF offsets which are properly sign or
8449 zero extended from their type in case it is narrower than
8450 size type. */
8451 if (offset0 == NULL_TREE)
8452 offset0 = build_int_cst (signed_size_type_node, 0);
8453 else
8454 offset0 = fold_convert (signed_size_type_node, offset0);
8455 if (offset1 == NULL_TREE)
8456 offset1 = build_int_cst (signed_size_type_node, 0);
8457 else
8458 offset1 = fold_convert (signed_size_type_node, offset1);
8460 return fold_build2 (code, type, offset0, offset1);
8464 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8465 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8466 the resulting offset is smaller in absolute value than the
8467 original one. */
8468 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8469 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8470 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8471 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8472 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8473 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8474 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8476 tree const1 = TREE_OPERAND (arg0, 1);
8477 tree const2 = TREE_OPERAND (arg1, 1);
8478 tree variable1 = TREE_OPERAND (arg0, 0);
8479 tree variable2 = TREE_OPERAND (arg1, 0);
8480 tree cst;
8481 const char * const warnmsg = G_("assuming signed overflow does not "
8482 "occur when combining constants around "
8483 "a comparison");
8485 /* Put the constant on the side where it doesn't overflow and is
8486 of lower absolute value than before. */
8487 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8488 ? MINUS_EXPR : PLUS_EXPR,
8489 const2, const1, 0);
8490 if (!TREE_OVERFLOW (cst)
8491 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8493 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8494 return fold_build2 (code, type,
8495 variable1,
8496 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8497 variable2, cst));
8500 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8501 ? MINUS_EXPR : PLUS_EXPR,
8502 const1, const2, 0);
8503 if (!TREE_OVERFLOW (cst)
8504 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8506 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8507 return fold_build2 (code, type,
8508 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8509 variable1, cst),
8510 variable2);
8514 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8515 signed arithmetic case. That form is created by the compiler
8516 often enough for folding it to be of value. One example is in
8517 computing loop trip counts after Operator Strength Reduction. */
8518 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8519 && TREE_CODE (arg0) == MULT_EXPR
8520 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8521 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8522 && integer_zerop (arg1))
8524 tree const1 = TREE_OPERAND (arg0, 1);
8525 tree const2 = arg1; /* zero */
8526 tree variable1 = TREE_OPERAND (arg0, 0);
8527 enum tree_code cmp_code = code;
8529 gcc_assert (!integer_zerop (const1));
8531 fold_overflow_warning (("assuming signed overflow does not occur when "
8532 "eliminating multiplication in comparison "
8533 "with zero"),
8534 WARN_STRICT_OVERFLOW_COMPARISON);
8536 /* If const1 is negative we swap the sense of the comparison. */
8537 if (tree_int_cst_sgn (const1) < 0)
8538 cmp_code = swap_tree_comparison (cmp_code);
8540 return fold_build2 (cmp_code, type, variable1, const2);
8543 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8544 if (tem)
8545 return tem;
8547 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8549 tree targ0 = strip_float_extensions (arg0);
8550 tree targ1 = strip_float_extensions (arg1);
8551 tree newtype = TREE_TYPE (targ0);
8553 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8554 newtype = TREE_TYPE (targ1);
8556 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8557 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8558 return fold_build2 (code, type, fold_convert (newtype, targ0),
8559 fold_convert (newtype, targ1));
8561 /* (-a) CMP (-b) -> b CMP a */
8562 if (TREE_CODE (arg0) == NEGATE_EXPR
8563 && TREE_CODE (arg1) == NEGATE_EXPR)
8564 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8565 TREE_OPERAND (arg0, 0));
8567 if (TREE_CODE (arg1) == REAL_CST)
8569 REAL_VALUE_TYPE cst;
8570 cst = TREE_REAL_CST (arg1);
8572 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8573 if (TREE_CODE (arg0) == NEGATE_EXPR)
8574 return fold_build2 (swap_tree_comparison (code), type,
8575 TREE_OPERAND (arg0, 0),
8576 build_real (TREE_TYPE (arg1),
8577 REAL_VALUE_NEGATE (cst)));
8579 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8580 /* a CMP (-0) -> a CMP 0 */
8581 if (REAL_VALUE_MINUS_ZERO (cst))
8582 return fold_build2 (code, type, arg0,
8583 build_real (TREE_TYPE (arg1), dconst0));
8585 /* x != NaN is always true, other ops are always false. */
8586 if (REAL_VALUE_ISNAN (cst)
8587 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8589 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8590 return omit_one_operand (type, tem, arg0);
8593 /* Fold comparisons against infinity. */
8594 if (REAL_VALUE_ISINF (cst))
8596 tem = fold_inf_compare (code, type, arg0, arg1);
8597 if (tem != NULL_TREE)
8598 return tem;
8602 /* If this is a comparison of a real constant with a PLUS_EXPR
8603 or a MINUS_EXPR of a real constant, we can convert it into a
8604 comparison with a revised real constant as long as no overflow
8605 occurs when unsafe_math_optimizations are enabled. */
8606 if (flag_unsafe_math_optimizations
8607 && TREE_CODE (arg1) == REAL_CST
8608 && (TREE_CODE (arg0) == PLUS_EXPR
8609 || TREE_CODE (arg0) == MINUS_EXPR)
8610 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8611 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8612 ? MINUS_EXPR : PLUS_EXPR,
8613 arg1, TREE_OPERAND (arg0, 1), 0))
8614 && !TREE_OVERFLOW (tem))
8615 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8617 /* Likewise, we can simplify a comparison of a real constant with
8618 a MINUS_EXPR whose first operand is also a real constant, i.e.
8619 (c1 - x) < c2 becomes x > c1-c2. */
8620 if (flag_unsafe_math_optimizations
8621 && TREE_CODE (arg1) == REAL_CST
8622 && TREE_CODE (arg0) == MINUS_EXPR
8623 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8624 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8625 arg1, 0))
8626 && !TREE_OVERFLOW (tem))
8627 return fold_build2 (swap_tree_comparison (code), type,
8628 TREE_OPERAND (arg0, 1), tem);
8630 /* Fold comparisons against built-in math functions. */
8631 if (TREE_CODE (arg1) == REAL_CST
8632 && flag_unsafe_math_optimizations
8633 && ! flag_errno_math)
8635 enum built_in_function fcode = builtin_mathfn_code (arg0);
8637 if (fcode != END_BUILTINS)
8639 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8640 if (tem != NULL_TREE)
8641 return tem;
8646 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8647 if (TREE_CONSTANT (arg1)
8648 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8649 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8650 /* This optimization is invalid for ordered comparisons
8651 if CONST+INCR overflows or if foo+incr might overflow.
8652 This optimization is invalid for floating point due to rounding.
8653 For pointer types we assume overflow doesn't happen. */
8654 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8655 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8656 && (code == EQ_EXPR || code == NE_EXPR))))
8658 tree varop, newconst;
8660 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8662 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8663 arg1, TREE_OPERAND (arg0, 1));
8664 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8665 TREE_OPERAND (arg0, 0),
8666 TREE_OPERAND (arg0, 1));
8668 else
8670 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8671 arg1, TREE_OPERAND (arg0, 1));
8672 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8673 TREE_OPERAND (arg0, 0),
8674 TREE_OPERAND (arg0, 1));
8678 /* If VAROP is a reference to a bitfield, we must mask
8679 the constant by the width of the field. */
8680 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8681 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8682 && host_integerp (DECL_SIZE (TREE_OPERAND
8683 (TREE_OPERAND (varop, 0), 1)), 1))
8685 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8686 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8687 tree folded_compare, shift;
8689 /* First check whether the comparison would come out
8690 always the same. If we don't do that we would
8691 change the meaning with the masking. */
8692 folded_compare = fold_build2 (code, type,
8693 TREE_OPERAND (varop, 0), arg1);
8694 if (TREE_CODE (folded_compare) == INTEGER_CST)
8695 return omit_one_operand (type, folded_compare, varop);
8697 shift = build_int_cst (NULL_TREE,
8698 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8699 shift = fold_convert (TREE_TYPE (varop), shift);
8700 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8701 newconst, shift);
8702 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8703 newconst, shift);
8706 return fold_build2 (code, type, varop, newconst);
8709 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8710 && (TREE_CODE (arg0) == NOP_EXPR
8711 || TREE_CODE (arg0) == CONVERT_EXPR))
8713 /* If we are widening one operand of an integer comparison,
8714 see if the other operand is similarly being widened. Perhaps we
8715 can do the comparison in the narrower type. */
8716 tem = fold_widened_comparison (code, type, arg0, arg1);
8717 if (tem)
8718 return tem;
8720 /* Or if we are changing signedness. */
8721 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8722 if (tem)
8723 return tem;
8726 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8727 constant, we can simplify it. */
8728 if (TREE_CODE (arg1) == INTEGER_CST
8729 && (TREE_CODE (arg0) == MIN_EXPR
8730 || TREE_CODE (arg0) == MAX_EXPR)
8731 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8733 tem = optimize_minmax_comparison (code, type, op0, op1);
8734 if (tem)
8735 return tem;
8738 /* Simplify comparison of something with itself. (For IEEE
8739 floating-point, we can only do some of these simplifications.) */
8740 if (operand_equal_p (arg0, arg1, 0))
8742 switch (code)
8744 case EQ_EXPR:
8745 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8746 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8747 return constant_boolean_node (1, type);
8748 break;
8750 case GE_EXPR:
8751 case LE_EXPR:
8752 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8753 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8754 return constant_boolean_node (1, type);
8755 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8757 case NE_EXPR:
8758 /* For NE, we can only do this simplification if integer
8759 or we don't honor IEEE floating point NaNs. */
8760 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8761 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8762 break;
8763 /* ... fall through ... */
8764 case GT_EXPR:
8765 case LT_EXPR:
8766 return constant_boolean_node (0, type);
8767 default:
8768 gcc_unreachable ();
8772 /* If we are comparing an expression that just has comparisons
8773 of two integer values, arithmetic expressions of those comparisons,
8774 and constants, we can simplify it. There are only three cases
8775 to check: the two values can either be equal, the first can be
8776 greater, or the second can be greater. Fold the expression for
8777 those three values. Since each value must be 0 or 1, we have
8778 eight possibilities, each of which corresponds to the constant 0
8779 or 1 or one of the six possible comparisons.
8781 This handles common cases like (a > b) == 0 but also handles
8782 expressions like ((x > y) - (y > x)) > 0, which supposedly
8783 occur in macroized code. */
8785 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8787 tree cval1 = 0, cval2 = 0;
8788 int save_p = 0;
8790 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8791 /* Don't handle degenerate cases here; they should already
8792 have been handled anyway. */
8793 && cval1 != 0 && cval2 != 0
8794 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8795 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8796 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8797 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8798 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8799 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8800 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8802 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8803 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8805 /* We can't just pass T to eval_subst in case cval1 or cval2
8806 was the same as ARG1. */
8808 tree high_result
8809 = fold_build2 (code, type,
8810 eval_subst (arg0, cval1, maxval,
8811 cval2, minval),
8812 arg1);
8813 tree equal_result
8814 = fold_build2 (code, type,
8815 eval_subst (arg0, cval1, maxval,
8816 cval2, maxval),
8817 arg1);
8818 tree low_result
8819 = fold_build2 (code, type,
8820 eval_subst (arg0, cval1, minval,
8821 cval2, maxval),
8822 arg1);
8824 /* All three of these results should be 0 or 1. Confirm they are.
8825 Then use those values to select the proper code to use. */
8827 if (TREE_CODE (high_result) == INTEGER_CST
8828 && TREE_CODE (equal_result) == INTEGER_CST
8829 && TREE_CODE (low_result) == INTEGER_CST)
8831 /* Make a 3-bit mask with the high-order bit being the
8832 value for `>', the next for '=', and the low for '<'. */
8833 switch ((integer_onep (high_result) * 4)
8834 + (integer_onep (equal_result) * 2)
8835 + integer_onep (low_result))
8837 case 0:
8838 /* Always false. */
8839 return omit_one_operand (type, integer_zero_node, arg0);
8840 case 1:
8841 code = LT_EXPR;
8842 break;
8843 case 2:
8844 code = EQ_EXPR;
8845 break;
8846 case 3:
8847 code = LE_EXPR;
8848 break;
8849 case 4:
8850 code = GT_EXPR;
8851 break;
8852 case 5:
8853 code = NE_EXPR;
8854 break;
8855 case 6:
8856 code = GE_EXPR;
8857 break;
8858 case 7:
8859 /* Always true. */
8860 return omit_one_operand (type, integer_one_node, arg0);
8863 if (save_p)
8864 return save_expr (build2 (code, type, cval1, cval2));
8865 return fold_build2 (code, type, cval1, cval2);
8870 /* Fold a comparison of the address of COMPONENT_REFs with the same
8871 type and component to a comparison of the address of the base
8872 object. In short, &x->a OP &y->a to x OP y and
8873 &x->a OP &y.a to x OP &y */
8874 if (TREE_CODE (arg0) == ADDR_EXPR
8875 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8876 && TREE_CODE (arg1) == ADDR_EXPR
8877 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8879 tree cref0 = TREE_OPERAND (arg0, 0);
8880 tree cref1 = TREE_OPERAND (arg1, 0);
8881 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8883 tree op0 = TREE_OPERAND (cref0, 0);
8884 tree op1 = TREE_OPERAND (cref1, 0);
8885 return fold_build2 (code, type,
8886 build_fold_addr_expr (op0),
8887 build_fold_addr_expr (op1));
8891 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8892 into a single range test. */
8893 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8894 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8895 && TREE_CODE (arg1) == INTEGER_CST
8896 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8897 && !integer_zerop (TREE_OPERAND (arg0, 1))
8898 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8899 && !TREE_OVERFLOW (arg1))
8901 tem = fold_div_compare (code, type, arg0, arg1);
8902 if (tem != NULL_TREE)
8903 return tem;
8906 /* Fold ~X op ~Y as Y op X. */
8907 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8908 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8910 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8911 return fold_build2 (code, type,
8912 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8913 TREE_OPERAND (arg0, 0));
8916 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8917 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8918 && TREE_CODE (arg1) == INTEGER_CST)
8920 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8921 return fold_build2 (swap_tree_comparison (code), type,
8922 TREE_OPERAND (arg0, 0),
8923 fold_build1 (BIT_NOT_EXPR, cmp_type,
8924 fold_convert (cmp_type, arg1)));
8927 return NULL_TREE;
8931 /* Subroutine of fold_binary. Optimize complex multiplications of the
8932 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8933 argument EXPR represents the expression "z" of type TYPE. */
8935 static tree
8936 fold_mult_zconjz (tree type, tree expr)
8938 tree itype = TREE_TYPE (type);
8939 tree rpart, ipart, tem;
8941 if (TREE_CODE (expr) == COMPLEX_EXPR)
8943 rpart = TREE_OPERAND (expr, 0);
8944 ipart = TREE_OPERAND (expr, 1);
8946 else if (TREE_CODE (expr) == COMPLEX_CST)
8948 rpart = TREE_REALPART (expr);
8949 ipart = TREE_IMAGPART (expr);
8951 else
8953 expr = save_expr (expr);
8954 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8955 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8958 rpart = save_expr (rpart);
8959 ipart = save_expr (ipart);
8960 tem = fold_build2 (PLUS_EXPR, itype,
8961 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8962 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8963 return fold_build2 (COMPLEX_EXPR, type, tem,
8964 fold_convert (itype, integer_zero_node));
8968 /* Fold a binary expression of code CODE and type TYPE with operands
8969 OP0 and OP1. Return the folded expression if folding is
8970 successful. Otherwise, return NULL_TREE. */
8972 tree
8973 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8975 enum tree_code_class kind = TREE_CODE_CLASS (code);
8976 tree arg0, arg1, tem;
8977 tree t1 = NULL_TREE;
8978 bool strict_overflow_p;
8980 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8981 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8982 && TREE_CODE_LENGTH (code) == 2
8983 && op0 != NULL_TREE
8984 && op1 != NULL_TREE);
8986 arg0 = op0;
8987 arg1 = op1;
8989 /* Strip any conversions that don't change the mode. This is
8990 safe for every expression, except for a comparison expression
8991 because its signedness is derived from its operands. So, in
8992 the latter case, only strip conversions that don't change the
8993 signedness.
8995 Note that this is done as an internal manipulation within the
8996 constant folder, in order to find the simplest representation
8997 of the arguments so that their form can be studied. In any
8998 cases, the appropriate type conversions should be put back in
8999 the tree that will get out of the constant folder. */
9001 if (kind == tcc_comparison)
9003 STRIP_SIGN_NOPS (arg0);
9004 STRIP_SIGN_NOPS (arg1);
9006 else
9008 STRIP_NOPS (arg0);
9009 STRIP_NOPS (arg1);
9012 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9013 constant but we can't do arithmetic on them. */
9014 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9015 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9016 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9017 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9019 if (kind == tcc_binary)
9020 tem = const_binop (code, arg0, arg1, 0);
9021 else if (kind == tcc_comparison)
9022 tem = fold_relational_const (code, type, arg0, arg1);
9023 else
9024 tem = NULL_TREE;
9026 if (tem != NULL_TREE)
9028 if (TREE_TYPE (tem) != type)
9029 tem = fold_convert (type, tem);
9030 return tem;
9034 /* If this is a commutative operation, and ARG0 is a constant, move it
9035 to ARG1 to reduce the number of tests below. */
9036 if (commutative_tree_code (code)
9037 && tree_swap_operands_p (arg0, arg1, true))
9038 return fold_build2 (code, type, op1, op0);
9040 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9042 First check for cases where an arithmetic operation is applied to a
9043 compound, conditional, or comparison operation. Push the arithmetic
9044 operation inside the compound or conditional to see if any folding
9045 can then be done. Convert comparison to conditional for this purpose.
9046 The also optimizes non-constant cases that used to be done in
9047 expand_expr.
9049 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9050 one of the operands is a comparison and the other is a comparison, a
9051 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9052 code below would make the expression more complex. Change it to a
9053 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9054 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9056 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9057 || code == EQ_EXPR || code == NE_EXPR)
9058 && ((truth_value_p (TREE_CODE (arg0))
9059 && (truth_value_p (TREE_CODE (arg1))
9060 || (TREE_CODE (arg1) == BIT_AND_EXPR
9061 && integer_onep (TREE_OPERAND (arg1, 1)))))
9062 || (truth_value_p (TREE_CODE (arg1))
9063 && (truth_value_p (TREE_CODE (arg0))
9064 || (TREE_CODE (arg0) == BIT_AND_EXPR
9065 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9067 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9068 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9069 : TRUTH_XOR_EXPR,
9070 boolean_type_node,
9071 fold_convert (boolean_type_node, arg0),
9072 fold_convert (boolean_type_node, arg1));
9074 if (code == EQ_EXPR)
9075 tem = invert_truthvalue (tem);
9077 return fold_convert (type, tem);
9080 if (TREE_CODE_CLASS (code) == tcc_binary
9081 || TREE_CODE_CLASS (code) == tcc_comparison)
9083 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9084 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9085 fold_build2 (code, type,
9086 TREE_OPERAND (arg0, 1), op1));
9087 if (TREE_CODE (arg1) == COMPOUND_EXPR
9088 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9089 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9090 fold_build2 (code, type,
9091 op0, TREE_OPERAND (arg1, 1)));
9093 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9095 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9096 arg0, arg1,
9097 /*cond_first_p=*/1);
9098 if (tem != NULL_TREE)
9099 return tem;
9102 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9104 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9105 arg1, arg0,
9106 /*cond_first_p=*/0);
9107 if (tem != NULL_TREE)
9108 return tem;
9112 switch (code)
9114 case PLUS_EXPR:
9115 /* A + (-B) -> A - B */
9116 if (TREE_CODE (arg1) == NEGATE_EXPR)
9117 return fold_build2 (MINUS_EXPR, type,
9118 fold_convert (type, arg0),
9119 fold_convert (type, TREE_OPERAND (arg1, 0)));
9120 /* (-A) + B -> B - A */
9121 if (TREE_CODE (arg0) == NEGATE_EXPR
9122 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9123 return fold_build2 (MINUS_EXPR, type,
9124 fold_convert (type, arg1),
9125 fold_convert (type, TREE_OPERAND (arg0, 0)));
9126 /* Convert ~A + 1 to -A. */
9127 if (INTEGRAL_TYPE_P (type)
9128 && TREE_CODE (arg0) == BIT_NOT_EXPR
9129 && integer_onep (arg1))
9130 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9132 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9133 same or one. */
9134 if ((TREE_CODE (arg0) == MULT_EXPR
9135 || TREE_CODE (arg1) == MULT_EXPR)
9136 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9138 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9139 if (tem)
9140 return tem;
9143 if (! FLOAT_TYPE_P (type))
9145 if (integer_zerop (arg1))
9146 return non_lvalue (fold_convert (type, arg0));
9148 /* ~X + X is -1. */
9149 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9150 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9151 && !TYPE_OVERFLOW_TRAPS (type))
9153 t1 = build_int_cst_type (type, -1);
9154 return omit_one_operand (type, t1, arg1);
9157 /* X + ~X is -1. */
9158 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9159 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9160 && !TYPE_OVERFLOW_TRAPS (type))
9162 t1 = build_int_cst_type (type, -1);
9163 return omit_one_operand (type, t1, arg0);
9166 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9167 with a constant, and the two constants have no bits in common,
9168 we should treat this as a BIT_IOR_EXPR since this may produce more
9169 simplifications. */
9170 if (TREE_CODE (arg0) == BIT_AND_EXPR
9171 && TREE_CODE (arg1) == BIT_AND_EXPR
9172 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9173 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9174 && integer_zerop (const_binop (BIT_AND_EXPR,
9175 TREE_OPERAND (arg0, 1),
9176 TREE_OPERAND (arg1, 1), 0)))
9178 code = BIT_IOR_EXPR;
9179 goto bit_ior;
9182 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9183 (plus (plus (mult) (mult)) (foo)) so that we can
9184 take advantage of the factoring cases below. */
9185 if (((TREE_CODE (arg0) == PLUS_EXPR
9186 || TREE_CODE (arg0) == MINUS_EXPR)
9187 && TREE_CODE (arg1) == MULT_EXPR)
9188 || ((TREE_CODE (arg1) == PLUS_EXPR
9189 || TREE_CODE (arg1) == MINUS_EXPR)
9190 && TREE_CODE (arg0) == MULT_EXPR))
9192 tree parg0, parg1, parg, marg;
9193 enum tree_code pcode;
9195 if (TREE_CODE (arg1) == MULT_EXPR)
9196 parg = arg0, marg = arg1;
9197 else
9198 parg = arg1, marg = arg0;
9199 pcode = TREE_CODE (parg);
9200 parg0 = TREE_OPERAND (parg, 0);
9201 parg1 = TREE_OPERAND (parg, 1);
9202 STRIP_NOPS (parg0);
9203 STRIP_NOPS (parg1);
9205 if (TREE_CODE (parg0) == MULT_EXPR
9206 && TREE_CODE (parg1) != MULT_EXPR)
9207 return fold_build2 (pcode, type,
9208 fold_build2 (PLUS_EXPR, type,
9209 fold_convert (type, parg0),
9210 fold_convert (type, marg)),
9211 fold_convert (type, parg1));
9212 if (TREE_CODE (parg0) != MULT_EXPR
9213 && TREE_CODE (parg1) == MULT_EXPR)
9214 return fold_build2 (PLUS_EXPR, type,
9215 fold_convert (type, parg0),
9216 fold_build2 (pcode, type,
9217 fold_convert (type, marg),
9218 fold_convert (type,
9219 parg1)));
9222 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9223 of the array. Loop optimizer sometimes produce this type of
9224 expressions. */
9225 if (TREE_CODE (arg0) == ADDR_EXPR)
9227 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9228 if (tem)
9229 return fold_convert (type, tem);
9231 else if (TREE_CODE (arg1) == ADDR_EXPR)
9233 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9234 if (tem)
9235 return fold_convert (type, tem);
9238 else
9240 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9241 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9242 return non_lvalue (fold_convert (type, arg0));
9244 /* Likewise if the operands are reversed. */
9245 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9246 return non_lvalue (fold_convert (type, arg1));
9248 /* Convert X + -C into X - C. */
9249 if (TREE_CODE (arg1) == REAL_CST
9250 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9252 tem = fold_negate_const (arg1, type);
9253 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9254 return fold_build2 (MINUS_EXPR, type,
9255 fold_convert (type, arg0),
9256 fold_convert (type, tem));
9259 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9260 to __complex__ ( x, y ). This is not the same for SNaNs or
9261 if signed zeros are involved. */
9262 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9263 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9264 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9266 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9267 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9268 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9269 bool arg0rz = false, arg0iz = false;
9270 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9271 || (arg0i && (arg0iz = real_zerop (arg0i))))
9273 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9274 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9275 if (arg0rz && arg1i && real_zerop (arg1i))
9277 tree rp = arg1r ? arg1r
9278 : build1 (REALPART_EXPR, rtype, arg1);
9279 tree ip = arg0i ? arg0i
9280 : build1 (IMAGPART_EXPR, rtype, arg0);
9281 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9283 else if (arg0iz && arg1r && real_zerop (arg1r))
9285 tree rp = arg0r ? arg0r
9286 : build1 (REALPART_EXPR, rtype, arg0);
9287 tree ip = arg1i ? arg1i
9288 : build1 (IMAGPART_EXPR, rtype, arg1);
9289 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9294 if (flag_unsafe_math_optimizations
9295 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9296 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9297 && (tem = distribute_real_division (code, type, arg0, arg1)))
9298 return tem;
9300 /* Convert x+x into x*2.0. */
9301 if (operand_equal_p (arg0, arg1, 0)
9302 && SCALAR_FLOAT_TYPE_P (type))
9303 return fold_build2 (MULT_EXPR, type, arg0,
9304 build_real (type, dconst2));
9306 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9307 if (flag_unsafe_math_optimizations
9308 && TREE_CODE (arg1) == PLUS_EXPR
9309 && TREE_CODE (arg0) != MULT_EXPR)
9311 tree tree10 = TREE_OPERAND (arg1, 0);
9312 tree tree11 = TREE_OPERAND (arg1, 1);
9313 if (TREE_CODE (tree11) == MULT_EXPR
9314 && TREE_CODE (tree10) == MULT_EXPR)
9316 tree tree0;
9317 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9318 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9321 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9322 if (flag_unsafe_math_optimizations
9323 && TREE_CODE (arg0) == PLUS_EXPR
9324 && TREE_CODE (arg1) != MULT_EXPR)
9326 tree tree00 = TREE_OPERAND (arg0, 0);
9327 tree tree01 = TREE_OPERAND (arg0, 1);
9328 if (TREE_CODE (tree01) == MULT_EXPR
9329 && TREE_CODE (tree00) == MULT_EXPR)
9331 tree tree0;
9332 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9333 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9338 bit_rotate:
9339 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9340 is a rotate of A by C1 bits. */
9341 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9342 is a rotate of A by B bits. */
9344 enum tree_code code0, code1;
9345 code0 = TREE_CODE (arg0);
9346 code1 = TREE_CODE (arg1);
9347 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9348 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9349 && operand_equal_p (TREE_OPERAND (arg0, 0),
9350 TREE_OPERAND (arg1, 0), 0)
9351 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9353 tree tree01, tree11;
9354 enum tree_code code01, code11;
9356 tree01 = TREE_OPERAND (arg0, 1);
9357 tree11 = TREE_OPERAND (arg1, 1);
9358 STRIP_NOPS (tree01);
9359 STRIP_NOPS (tree11);
9360 code01 = TREE_CODE (tree01);
9361 code11 = TREE_CODE (tree11);
9362 if (code01 == INTEGER_CST
9363 && code11 == INTEGER_CST
9364 && TREE_INT_CST_HIGH (tree01) == 0
9365 && TREE_INT_CST_HIGH (tree11) == 0
9366 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9367 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9368 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9369 code0 == LSHIFT_EXPR ? tree01 : tree11);
9370 else if (code11 == MINUS_EXPR)
9372 tree tree110, tree111;
9373 tree110 = TREE_OPERAND (tree11, 0);
9374 tree111 = TREE_OPERAND (tree11, 1);
9375 STRIP_NOPS (tree110);
9376 STRIP_NOPS (tree111);
9377 if (TREE_CODE (tree110) == INTEGER_CST
9378 && 0 == compare_tree_int (tree110,
9379 TYPE_PRECISION
9380 (TREE_TYPE (TREE_OPERAND
9381 (arg0, 0))))
9382 && operand_equal_p (tree01, tree111, 0))
9383 return build2 ((code0 == LSHIFT_EXPR
9384 ? LROTATE_EXPR
9385 : RROTATE_EXPR),
9386 type, TREE_OPERAND (arg0, 0), tree01);
9388 else if (code01 == MINUS_EXPR)
9390 tree tree010, tree011;
9391 tree010 = TREE_OPERAND (tree01, 0);
9392 tree011 = TREE_OPERAND (tree01, 1);
9393 STRIP_NOPS (tree010);
9394 STRIP_NOPS (tree011);
9395 if (TREE_CODE (tree010) == INTEGER_CST
9396 && 0 == compare_tree_int (tree010,
9397 TYPE_PRECISION
9398 (TREE_TYPE (TREE_OPERAND
9399 (arg0, 0))))
9400 && operand_equal_p (tree11, tree011, 0))
9401 return build2 ((code0 != LSHIFT_EXPR
9402 ? LROTATE_EXPR
9403 : RROTATE_EXPR),
9404 type, TREE_OPERAND (arg0, 0), tree11);
9409 associate:
9410 /* In most languages, can't associate operations on floats through
9411 parentheses. Rather than remember where the parentheses were, we
9412 don't associate floats at all, unless the user has specified
9413 -funsafe-math-optimizations. */
9415 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9417 tree var0, con0, lit0, minus_lit0;
9418 tree var1, con1, lit1, minus_lit1;
9419 bool ok = true;
9421 /* Split both trees into variables, constants, and literals. Then
9422 associate each group together, the constants with literals,
9423 then the result with variables. This increases the chances of
9424 literals being recombined later and of generating relocatable
9425 expressions for the sum of a constant and literal. */
9426 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9427 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9428 code == MINUS_EXPR);
9430 /* With undefined overflow we can only associate constants
9431 with one variable. */
9432 if ((POINTER_TYPE_P (type)
9433 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9434 && var0 && var1)
9436 tree tmp0 = var0;
9437 tree tmp1 = var1;
9439 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9440 tmp0 = TREE_OPERAND (tmp0, 0);
9441 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9442 tmp1 = TREE_OPERAND (tmp1, 0);
9443 /* The only case we can still associate with two variables
9444 is if they are the same, modulo negation. */
9445 if (!operand_equal_p (tmp0, tmp1, 0))
9446 ok = false;
9449 /* Only do something if we found more than two objects. Otherwise,
9450 nothing has changed and we risk infinite recursion. */
9451 if (ok
9452 && (2 < ((var0 != 0) + (var1 != 0)
9453 + (con0 != 0) + (con1 != 0)
9454 + (lit0 != 0) + (lit1 != 0)
9455 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9457 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9458 if (code == MINUS_EXPR)
9459 code = PLUS_EXPR;
9461 var0 = associate_trees (var0, var1, code, type);
9462 con0 = associate_trees (con0, con1, code, type);
9463 lit0 = associate_trees (lit0, lit1, code, type);
9464 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9466 /* Preserve the MINUS_EXPR if the negative part of the literal is
9467 greater than the positive part. Otherwise, the multiplicative
9468 folding code (i.e extract_muldiv) may be fooled in case
9469 unsigned constants are subtracted, like in the following
9470 example: ((X*2 + 4) - 8U)/2. */
9471 if (minus_lit0 && lit0)
9473 if (TREE_CODE (lit0) == INTEGER_CST
9474 && TREE_CODE (minus_lit0) == INTEGER_CST
9475 && tree_int_cst_lt (lit0, minus_lit0))
9477 minus_lit0 = associate_trees (minus_lit0, lit0,
9478 MINUS_EXPR, type);
9479 lit0 = 0;
9481 else
9483 lit0 = associate_trees (lit0, minus_lit0,
9484 MINUS_EXPR, type);
9485 minus_lit0 = 0;
9488 if (minus_lit0)
9490 if (con0 == 0)
9491 return fold_convert (type,
9492 associate_trees (var0, minus_lit0,
9493 MINUS_EXPR, type));
9494 else
9496 con0 = associate_trees (con0, minus_lit0,
9497 MINUS_EXPR, type);
9498 return fold_convert (type,
9499 associate_trees (var0, con0,
9500 PLUS_EXPR, type));
9504 con0 = associate_trees (con0, lit0, code, type);
9505 return fold_convert (type, associate_trees (var0, con0,
9506 code, type));
9510 return NULL_TREE;
9512 case MINUS_EXPR:
9513 /* A - (-B) -> A + B */
9514 if (TREE_CODE (arg1) == NEGATE_EXPR)
9515 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9516 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9517 if (TREE_CODE (arg0) == NEGATE_EXPR
9518 && (FLOAT_TYPE_P (type)
9519 || INTEGRAL_TYPE_P (type))
9520 && negate_expr_p (arg1)
9521 && reorder_operands_p (arg0, arg1))
9522 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9523 TREE_OPERAND (arg0, 0));
9524 /* Convert -A - 1 to ~A. */
9525 if (INTEGRAL_TYPE_P (type)
9526 && TREE_CODE (arg0) == NEGATE_EXPR
9527 && integer_onep (arg1)
9528 && !TYPE_OVERFLOW_TRAPS (type))
9529 return fold_build1 (BIT_NOT_EXPR, type,
9530 fold_convert (type, TREE_OPERAND (arg0, 0)));
9532 /* Convert -1 - A to ~A. */
9533 if (INTEGRAL_TYPE_P (type)
9534 && integer_all_onesp (arg0))
9535 return fold_build1 (BIT_NOT_EXPR, type, op1);
9537 if (! FLOAT_TYPE_P (type))
9539 if (integer_zerop (arg0))
9540 return negate_expr (fold_convert (type, arg1));
9541 if (integer_zerop (arg1))
9542 return non_lvalue (fold_convert (type, arg0));
9544 /* Fold A - (A & B) into ~B & A. */
9545 if (!TREE_SIDE_EFFECTS (arg0)
9546 && TREE_CODE (arg1) == BIT_AND_EXPR)
9548 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9549 return fold_build2 (BIT_AND_EXPR, type,
9550 fold_build1 (BIT_NOT_EXPR, type,
9551 TREE_OPERAND (arg1, 0)),
9552 arg0);
9553 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9554 return fold_build2 (BIT_AND_EXPR, type,
9555 fold_build1 (BIT_NOT_EXPR, type,
9556 TREE_OPERAND (arg1, 1)),
9557 arg0);
9560 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9561 any power of 2 minus 1. */
9562 if (TREE_CODE (arg0) == BIT_AND_EXPR
9563 && TREE_CODE (arg1) == BIT_AND_EXPR
9564 && operand_equal_p (TREE_OPERAND (arg0, 0),
9565 TREE_OPERAND (arg1, 0), 0))
9567 tree mask0 = TREE_OPERAND (arg0, 1);
9568 tree mask1 = TREE_OPERAND (arg1, 1);
9569 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9571 if (operand_equal_p (tem, mask1, 0))
9573 tem = fold_build2 (BIT_XOR_EXPR, type,
9574 TREE_OPERAND (arg0, 0), mask1);
9575 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9580 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9581 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9582 return non_lvalue (fold_convert (type, arg0));
9584 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9585 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9586 (-ARG1 + ARG0) reduces to -ARG1. */
9587 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9588 return negate_expr (fold_convert (type, arg1));
9590 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9591 __complex__ ( x, -y ). This is not the same for SNaNs or if
9592 signed zeros are involved. */
9593 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9594 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9595 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9597 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9598 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9599 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9600 bool arg0rz = false, arg0iz = false;
9601 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9602 || (arg0i && (arg0iz = real_zerop (arg0i))))
9604 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9605 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9606 if (arg0rz && arg1i && real_zerop (arg1i))
9608 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9609 arg1r ? arg1r
9610 : build1 (REALPART_EXPR, rtype, arg1));
9611 tree ip = arg0i ? arg0i
9612 : build1 (IMAGPART_EXPR, rtype, arg0);
9613 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9615 else if (arg0iz && arg1r && real_zerop (arg1r))
9617 tree rp = arg0r ? arg0r
9618 : build1 (REALPART_EXPR, rtype, arg0);
9619 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9620 arg1i ? arg1i
9621 : build1 (IMAGPART_EXPR, rtype, arg1));
9622 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9627 /* Fold &x - &x. This can happen from &x.foo - &x.
9628 This is unsafe for certain floats even in non-IEEE formats.
9629 In IEEE, it is unsafe because it does wrong for NaNs.
9630 Also note that operand_equal_p is always false if an operand
9631 is volatile. */
9633 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9634 && operand_equal_p (arg0, arg1, 0))
9635 return fold_convert (type, integer_zero_node);
9637 /* A - B -> A + (-B) if B is easily negatable. */
9638 if (negate_expr_p (arg1)
9639 && ((FLOAT_TYPE_P (type)
9640 /* Avoid this transformation if B is a positive REAL_CST. */
9641 && (TREE_CODE (arg1) != REAL_CST
9642 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9643 || INTEGRAL_TYPE_P (type)))
9644 return fold_build2 (PLUS_EXPR, type,
9645 fold_convert (type, arg0),
9646 fold_convert (type, negate_expr (arg1)));
9648 /* Try folding difference of addresses. */
9650 HOST_WIDE_INT diff;
9652 if ((TREE_CODE (arg0) == ADDR_EXPR
9653 || TREE_CODE (arg1) == ADDR_EXPR)
9654 && ptr_difference_const (arg0, arg1, &diff))
9655 return build_int_cst_type (type, diff);
9658 /* Fold &a[i] - &a[j] to i-j. */
9659 if (TREE_CODE (arg0) == ADDR_EXPR
9660 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9661 && TREE_CODE (arg1) == ADDR_EXPR
9662 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9664 tree aref0 = TREE_OPERAND (arg0, 0);
9665 tree aref1 = TREE_OPERAND (arg1, 0);
9666 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9667 TREE_OPERAND (aref1, 0), 0))
9669 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9670 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9671 tree esz = array_ref_element_size (aref0);
9672 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9673 return fold_build2 (MULT_EXPR, type, diff,
9674 fold_convert (type, esz));
9679 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9680 of the array. Loop optimizer sometimes produce this type of
9681 expressions. */
9682 if (TREE_CODE (arg0) == ADDR_EXPR)
9684 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9685 if (tem)
9686 return fold_convert (type, tem);
9689 if (flag_unsafe_math_optimizations
9690 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9691 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9692 && (tem = distribute_real_division (code, type, arg0, arg1)))
9693 return tem;
9695 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9696 same or one. */
9697 if ((TREE_CODE (arg0) == MULT_EXPR
9698 || TREE_CODE (arg1) == MULT_EXPR)
9699 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9701 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9702 if (tem)
9703 return tem;
9706 goto associate;
9708 case MULT_EXPR:
9709 /* (-A) * (-B) -> A * B */
9710 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9711 return fold_build2 (MULT_EXPR, type,
9712 fold_convert (type, TREE_OPERAND (arg0, 0)),
9713 fold_convert (type, negate_expr (arg1)));
9714 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9715 return fold_build2 (MULT_EXPR, type,
9716 fold_convert (type, negate_expr (arg0)),
9717 fold_convert (type, TREE_OPERAND (arg1, 0)));
9719 if (! FLOAT_TYPE_P (type))
9721 if (integer_zerop (arg1))
9722 return omit_one_operand (type, arg1, arg0);
9723 if (integer_onep (arg1))
9724 return non_lvalue (fold_convert (type, arg0));
9725 /* Transform x * -1 into -x. */
9726 if (integer_all_onesp (arg1))
9727 return fold_convert (type, negate_expr (arg0));
9728 /* Transform x * -C into -x * C if x is easily negatable. */
9729 if (TREE_CODE (arg1) == INTEGER_CST
9730 && tree_int_cst_sgn (arg1) == -1
9731 && negate_expr_p (arg0)
9732 && (tem = negate_expr (arg1)) != arg1
9733 && !TREE_OVERFLOW (tem))
9734 return fold_build2 (MULT_EXPR, type,
9735 negate_expr (arg0), tem);
9737 /* (a * (1 << b)) is (a << b) */
9738 if (TREE_CODE (arg1) == LSHIFT_EXPR
9739 && integer_onep (TREE_OPERAND (arg1, 0)))
9740 return fold_build2 (LSHIFT_EXPR, type, arg0,
9741 TREE_OPERAND (arg1, 1));
9742 if (TREE_CODE (arg0) == LSHIFT_EXPR
9743 && integer_onep (TREE_OPERAND (arg0, 0)))
9744 return fold_build2 (LSHIFT_EXPR, type, arg1,
9745 TREE_OPERAND (arg0, 1));
9747 strict_overflow_p = false;
9748 if (TREE_CODE (arg1) == INTEGER_CST
9749 && 0 != (tem = extract_muldiv (op0,
9750 fold_convert (type, arg1),
9751 code, NULL_TREE,
9752 &strict_overflow_p)))
9754 if (strict_overflow_p)
9755 fold_overflow_warning (("assuming signed overflow does not "
9756 "occur when simplifying "
9757 "multiplication"),
9758 WARN_STRICT_OVERFLOW_MISC);
9759 return fold_convert (type, tem);
9762 /* Optimize z * conj(z) for integer complex numbers. */
9763 if (TREE_CODE (arg0) == CONJ_EXPR
9764 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9765 return fold_mult_zconjz (type, arg1);
9766 if (TREE_CODE (arg1) == CONJ_EXPR
9767 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9768 return fold_mult_zconjz (type, arg0);
9770 else
9772 /* Maybe fold x * 0 to 0. The expressions aren't the same
9773 when x is NaN, since x * 0 is also NaN. Nor are they the
9774 same in modes with signed zeros, since multiplying a
9775 negative value by 0 gives -0, not +0. */
9776 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9777 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9778 && real_zerop (arg1))
9779 return omit_one_operand (type, arg1, arg0);
9780 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9781 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9782 && real_onep (arg1))
9783 return non_lvalue (fold_convert (type, arg0));
9785 /* Transform x * -1.0 into -x. */
9786 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9787 && real_minus_onep (arg1))
9788 return fold_convert (type, negate_expr (arg0));
9790 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9791 if (flag_unsafe_math_optimizations
9792 && TREE_CODE (arg0) == RDIV_EXPR
9793 && TREE_CODE (arg1) == REAL_CST
9794 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9796 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9797 arg1, 0);
9798 if (tem)
9799 return fold_build2 (RDIV_EXPR, type, tem,
9800 TREE_OPERAND (arg0, 1));
9803 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9804 if (operand_equal_p (arg0, arg1, 0))
9806 tree tem = fold_strip_sign_ops (arg0);
9807 if (tem != NULL_TREE)
9809 tem = fold_convert (type, tem);
9810 return fold_build2 (MULT_EXPR, type, tem, tem);
9814 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9815 This is not the same for NaNs or if signed zeros are
9816 involved. */
9817 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9818 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9819 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9820 && TREE_CODE (arg1) == COMPLEX_CST
9821 && real_zerop (TREE_REALPART (arg1)))
9823 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9824 if (real_onep (TREE_IMAGPART (arg1)))
9825 return fold_build2 (COMPLEX_EXPR, type,
9826 negate_expr (fold_build1 (IMAGPART_EXPR,
9827 rtype, arg0)),
9828 fold_build1 (REALPART_EXPR, rtype, arg0));
9829 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9830 return fold_build2 (COMPLEX_EXPR, type,
9831 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9832 negate_expr (fold_build1 (REALPART_EXPR,
9833 rtype, arg0)));
9836 /* Optimize z * conj(z) for floating point complex numbers.
9837 Guarded by flag_unsafe_math_optimizations as non-finite
9838 imaginary components don't produce scalar results. */
9839 if (flag_unsafe_math_optimizations
9840 && TREE_CODE (arg0) == CONJ_EXPR
9841 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9842 return fold_mult_zconjz (type, arg1);
9843 if (flag_unsafe_math_optimizations
9844 && TREE_CODE (arg1) == CONJ_EXPR
9845 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9846 return fold_mult_zconjz (type, arg0);
9848 if (flag_unsafe_math_optimizations)
9850 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9851 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9853 /* Optimizations of root(...)*root(...). */
9854 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9856 tree rootfn, arg;
9857 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9858 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9860 /* Optimize sqrt(x)*sqrt(x) as x. */
9861 if (BUILTIN_SQRT_P (fcode0)
9862 && operand_equal_p (arg00, arg10, 0)
9863 && ! HONOR_SNANS (TYPE_MODE (type)))
9864 return arg00;
9866 /* Optimize root(x)*root(y) as root(x*y). */
9867 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9868 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9869 return build_call_expr (rootfn, 1, arg);
9872 /* Optimize expN(x)*expN(y) as expN(x+y). */
9873 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9875 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9876 tree arg = fold_build2 (PLUS_EXPR, type,
9877 CALL_EXPR_ARG (arg0, 0),
9878 CALL_EXPR_ARG (arg1, 0));
9879 return build_call_expr (expfn, 1, arg);
9882 /* Optimizations of pow(...)*pow(...). */
9883 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9884 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9885 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9887 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9888 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9889 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9890 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9892 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9893 if (operand_equal_p (arg01, arg11, 0))
9895 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9896 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9897 return build_call_expr (powfn, 2, arg, arg01);
9900 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9901 if (operand_equal_p (arg00, arg10, 0))
9903 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9904 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9905 return build_call_expr (powfn, 2, arg00, arg);
9909 /* Optimize tan(x)*cos(x) as sin(x). */
9910 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9911 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9912 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9913 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9914 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9915 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9916 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9917 CALL_EXPR_ARG (arg1, 0), 0))
9919 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9921 if (sinfn != NULL_TREE)
9922 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9925 /* Optimize x*pow(x,c) as pow(x,c+1). */
9926 if (fcode1 == BUILT_IN_POW
9927 || fcode1 == BUILT_IN_POWF
9928 || fcode1 == BUILT_IN_POWL)
9930 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9931 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9932 if (TREE_CODE (arg11) == REAL_CST
9933 && !TREE_OVERFLOW (arg11)
9934 && operand_equal_p (arg0, arg10, 0))
9936 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9937 REAL_VALUE_TYPE c;
9938 tree arg;
9940 c = TREE_REAL_CST (arg11);
9941 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9942 arg = build_real (type, c);
9943 return build_call_expr (powfn, 2, arg0, arg);
9947 /* Optimize pow(x,c)*x as pow(x,c+1). */
9948 if (fcode0 == BUILT_IN_POW
9949 || fcode0 == BUILT_IN_POWF
9950 || fcode0 == BUILT_IN_POWL)
9952 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9953 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9954 if (TREE_CODE (arg01) == REAL_CST
9955 && !TREE_OVERFLOW (arg01)
9956 && operand_equal_p (arg1, arg00, 0))
9958 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9959 REAL_VALUE_TYPE c;
9960 tree arg;
9962 c = TREE_REAL_CST (arg01);
9963 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9964 arg = build_real (type, c);
9965 return build_call_expr (powfn, 2, arg1, arg);
9969 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9970 if (! optimize_size
9971 && operand_equal_p (arg0, arg1, 0))
9973 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9975 if (powfn)
9977 tree arg = build_real (type, dconst2);
9978 return build_call_expr (powfn, 2, arg0, arg);
9983 goto associate;
9985 case BIT_IOR_EXPR:
9986 bit_ior:
9987 if (integer_all_onesp (arg1))
9988 return omit_one_operand (type, arg1, arg0);
9989 if (integer_zerop (arg1))
9990 return non_lvalue (fold_convert (type, arg0));
9991 if (operand_equal_p (arg0, arg1, 0))
9992 return non_lvalue (fold_convert (type, arg0));
9994 /* ~X | X is -1. */
9995 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9998 t1 = build_int_cst_type (type, -1);
9999 return omit_one_operand (type, t1, arg1);
10002 /* X | ~X is -1. */
10003 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10004 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10006 t1 = build_int_cst_type (type, -1);
10007 return omit_one_operand (type, t1, arg0);
10010 /* Canonicalize (X & C1) | C2. */
10011 if (TREE_CODE (arg0) == BIT_AND_EXPR
10012 && TREE_CODE (arg1) == INTEGER_CST
10013 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10015 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10016 int width = TYPE_PRECISION (type);
10017 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10018 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10019 hi2 = TREE_INT_CST_HIGH (arg1);
10020 lo2 = TREE_INT_CST_LOW (arg1);
10022 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10023 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10024 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10026 if (width > HOST_BITS_PER_WIDE_INT)
10028 mhi = (unsigned HOST_WIDE_INT) -1
10029 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10030 mlo = -1;
10032 else
10034 mhi = 0;
10035 mlo = (unsigned HOST_WIDE_INT) -1
10036 >> (HOST_BITS_PER_WIDE_INT - width);
10039 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10040 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10041 return fold_build2 (BIT_IOR_EXPR, type,
10042 TREE_OPERAND (arg0, 0), arg1);
10044 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10045 hi1 &= mhi;
10046 lo1 &= mlo;
10047 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10048 return fold_build2 (BIT_IOR_EXPR, type,
10049 fold_build2 (BIT_AND_EXPR, type,
10050 TREE_OPERAND (arg0, 0),
10051 build_int_cst_wide (type,
10052 lo1 & ~lo2,
10053 hi1 & ~hi2)),
10054 arg1);
10057 /* (X & Y) | Y is (X, Y). */
10058 if (TREE_CODE (arg0) == BIT_AND_EXPR
10059 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10060 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10061 /* (X & Y) | X is (Y, X). */
10062 if (TREE_CODE (arg0) == BIT_AND_EXPR
10063 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10064 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10065 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10066 /* X | (X & Y) is (Y, X). */
10067 if (TREE_CODE (arg1) == BIT_AND_EXPR
10068 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10069 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10070 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10071 /* X | (Y & X) is (Y, X). */
10072 if (TREE_CODE (arg1) == BIT_AND_EXPR
10073 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10074 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10075 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10077 t1 = distribute_bit_expr (code, type, arg0, arg1);
10078 if (t1 != NULL_TREE)
10079 return t1;
10081 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10083 This results in more efficient code for machines without a NAND
10084 instruction. Combine will canonicalize to the first form
10085 which will allow use of NAND instructions provided by the
10086 backend if they exist. */
10087 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10088 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10090 return fold_build1 (BIT_NOT_EXPR, type,
10091 build2 (BIT_AND_EXPR, type,
10092 TREE_OPERAND (arg0, 0),
10093 TREE_OPERAND (arg1, 0)));
10096 /* See if this can be simplified into a rotate first. If that
10097 is unsuccessful continue in the association code. */
10098 goto bit_rotate;
10100 case BIT_XOR_EXPR:
10101 if (integer_zerop (arg1))
10102 return non_lvalue (fold_convert (type, arg0));
10103 if (integer_all_onesp (arg1))
10104 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10105 if (operand_equal_p (arg0, arg1, 0))
10106 return omit_one_operand (type, integer_zero_node, arg0);
10108 /* ~X ^ X is -1. */
10109 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10110 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10112 t1 = build_int_cst_type (type, -1);
10113 return omit_one_operand (type, t1, arg1);
10116 /* X ^ ~X is -1. */
10117 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10118 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10120 t1 = build_int_cst_type (type, -1);
10121 return omit_one_operand (type, t1, arg0);
10124 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10125 with a constant, and the two constants have no bits in common,
10126 we should treat this as a BIT_IOR_EXPR since this may produce more
10127 simplifications. */
10128 if (TREE_CODE (arg0) == BIT_AND_EXPR
10129 && TREE_CODE (arg1) == BIT_AND_EXPR
10130 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10131 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10132 && integer_zerop (const_binop (BIT_AND_EXPR,
10133 TREE_OPERAND (arg0, 1),
10134 TREE_OPERAND (arg1, 1), 0)))
10136 code = BIT_IOR_EXPR;
10137 goto bit_ior;
10140 /* (X | Y) ^ X -> Y & ~ X*/
10141 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10142 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10144 tree t2 = TREE_OPERAND (arg0, 1);
10145 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10146 arg1);
10147 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10148 fold_convert (type, t1));
10149 return t1;
10152 /* (Y | X) ^ X -> Y & ~ X*/
10153 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10154 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10156 tree t2 = TREE_OPERAND (arg0, 0);
10157 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10158 arg1);
10159 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10160 fold_convert (type, t1));
10161 return t1;
10164 /* X ^ (X | Y) -> Y & ~ X*/
10165 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10166 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10168 tree t2 = TREE_OPERAND (arg1, 1);
10169 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10170 arg0);
10171 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10172 fold_convert (type, t1));
10173 return t1;
10176 /* X ^ (Y | X) -> Y & ~ X*/
10177 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10178 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10180 tree t2 = TREE_OPERAND (arg1, 0);
10181 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10182 arg0);
10183 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10184 fold_convert (type, t1));
10185 return t1;
10188 /* Convert ~X ^ ~Y to X ^ Y. */
10189 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10190 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10191 return fold_build2 (code, type,
10192 fold_convert (type, TREE_OPERAND (arg0, 0)),
10193 fold_convert (type, TREE_OPERAND (arg1, 0)));
10195 /* Convert ~X ^ C to X ^ ~C. */
10196 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10197 && TREE_CODE (arg1) == INTEGER_CST)
10198 return fold_build2 (code, type,
10199 fold_convert (type, TREE_OPERAND (arg0, 0)),
10200 fold_build1 (BIT_NOT_EXPR, type, arg1));
10202 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10203 if (TREE_CODE (arg0) == BIT_AND_EXPR
10204 && integer_onep (TREE_OPERAND (arg0, 1))
10205 && integer_onep (arg1))
10206 return fold_build2 (EQ_EXPR, type, arg0,
10207 build_int_cst (TREE_TYPE (arg0), 0));
10209 /* Fold (X & Y) ^ Y as ~X & Y. */
10210 if (TREE_CODE (arg0) == BIT_AND_EXPR
10211 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10213 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10214 return fold_build2 (BIT_AND_EXPR, type,
10215 fold_build1 (BIT_NOT_EXPR, type, tem),
10216 fold_convert (type, arg1));
10218 /* Fold (X & Y) ^ X as ~Y & X. */
10219 if (TREE_CODE (arg0) == BIT_AND_EXPR
10220 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10221 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10223 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10224 return fold_build2 (BIT_AND_EXPR, type,
10225 fold_build1 (BIT_NOT_EXPR, type, tem),
10226 fold_convert (type, arg1));
10228 /* Fold X ^ (X & Y) as X & ~Y. */
10229 if (TREE_CODE (arg1) == BIT_AND_EXPR
10230 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10232 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10233 return fold_build2 (BIT_AND_EXPR, type,
10234 fold_convert (type, arg0),
10235 fold_build1 (BIT_NOT_EXPR, type, tem));
10237 /* Fold X ^ (Y & X) as ~Y & X. */
10238 if (TREE_CODE (arg1) == BIT_AND_EXPR
10239 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10240 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10242 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10243 return fold_build2 (BIT_AND_EXPR, type,
10244 fold_build1 (BIT_NOT_EXPR, type, tem),
10245 fold_convert (type, arg0));
10248 /* See if this can be simplified into a rotate first. If that
10249 is unsuccessful continue in the association code. */
10250 goto bit_rotate;
10252 case BIT_AND_EXPR:
10253 if (integer_all_onesp (arg1))
10254 return non_lvalue (fold_convert (type, arg0));
10255 if (integer_zerop (arg1))
10256 return omit_one_operand (type, arg1, arg0);
10257 if (operand_equal_p (arg0, arg1, 0))
10258 return non_lvalue (fold_convert (type, arg0));
10260 /* ~X & X is always zero. */
10261 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10262 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10263 return omit_one_operand (type, integer_zero_node, arg1);
10265 /* X & ~X is always zero. */
10266 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10267 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10268 return omit_one_operand (type, integer_zero_node, arg0);
10270 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10271 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10272 && TREE_CODE (arg1) == INTEGER_CST
10273 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10274 return fold_build2 (BIT_IOR_EXPR, type,
10275 fold_build2 (BIT_AND_EXPR, type,
10276 TREE_OPERAND (arg0, 0), arg1),
10277 fold_build2 (BIT_AND_EXPR, type,
10278 TREE_OPERAND (arg0, 1), arg1));
10280 /* (X | Y) & Y is (X, Y). */
10281 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10282 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10283 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10284 /* (X | Y) & X is (Y, X). */
10285 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10286 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10287 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10288 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10289 /* X & (X | Y) is (Y, X). */
10290 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10291 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10292 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10293 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10294 /* X & (Y | X) is (Y, X). */
10295 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10296 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10297 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10298 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10300 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10301 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10302 && integer_onep (TREE_OPERAND (arg0, 1))
10303 && integer_onep (arg1))
10305 tem = TREE_OPERAND (arg0, 0);
10306 return fold_build2 (EQ_EXPR, type,
10307 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10308 build_int_cst (TREE_TYPE (tem), 1)),
10309 build_int_cst (TREE_TYPE (tem), 0));
10311 /* Fold ~X & 1 as (X & 1) == 0. */
10312 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10313 && integer_onep (arg1))
10315 tem = TREE_OPERAND (arg0, 0);
10316 return fold_build2 (EQ_EXPR, type,
10317 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10318 build_int_cst (TREE_TYPE (tem), 1)),
10319 build_int_cst (TREE_TYPE (tem), 0));
10322 /* Fold (X ^ Y) & Y as ~X & Y. */
10323 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10324 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10326 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10327 return fold_build2 (BIT_AND_EXPR, type,
10328 fold_build1 (BIT_NOT_EXPR, type, tem),
10329 fold_convert (type, arg1));
10331 /* Fold (X ^ Y) & X as ~Y & X. */
10332 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10333 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10334 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10336 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10337 return fold_build2 (BIT_AND_EXPR, type,
10338 fold_build1 (BIT_NOT_EXPR, type, tem),
10339 fold_convert (type, arg1));
10341 /* Fold X & (X ^ Y) as X & ~Y. */
10342 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10343 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10345 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10346 return fold_build2 (BIT_AND_EXPR, type,
10347 fold_convert (type, arg0),
10348 fold_build1 (BIT_NOT_EXPR, type, tem));
10350 /* Fold X & (Y ^ X) as ~Y & X. */
10351 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10352 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10353 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10355 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10356 return fold_build2 (BIT_AND_EXPR, type,
10357 fold_build1 (BIT_NOT_EXPR, type, tem),
10358 fold_convert (type, arg0));
10361 t1 = distribute_bit_expr (code, type, arg0, arg1);
10362 if (t1 != NULL_TREE)
10363 return t1;
10364 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10365 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10366 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10368 unsigned int prec
10369 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10371 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10372 && (~TREE_INT_CST_LOW (arg1)
10373 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10374 return fold_convert (type, TREE_OPERAND (arg0, 0));
10377 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10379 This results in more efficient code for machines without a NOR
10380 instruction. Combine will canonicalize to the first form
10381 which will allow use of NOR instructions provided by the
10382 backend if they exist. */
10383 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10384 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10386 return fold_build1 (BIT_NOT_EXPR, type,
10387 build2 (BIT_IOR_EXPR, type,
10388 TREE_OPERAND (arg0, 0),
10389 TREE_OPERAND (arg1, 0)));
10392 goto associate;
10394 case RDIV_EXPR:
10395 /* Don't touch a floating-point divide by zero unless the mode
10396 of the constant can represent infinity. */
10397 if (TREE_CODE (arg1) == REAL_CST
10398 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10399 && real_zerop (arg1))
10400 return NULL_TREE;
10402 /* Optimize A / A to 1.0 if we don't care about
10403 NaNs or Infinities. Skip the transformation
10404 for non-real operands. */
10405 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10406 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10407 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10408 && operand_equal_p (arg0, arg1, 0))
10410 tree r = build_real (TREE_TYPE (arg0), dconst1);
10412 return omit_two_operands (type, r, arg0, arg1);
10415 /* The complex version of the above A / A optimization. */
10416 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10417 && operand_equal_p (arg0, arg1, 0))
10419 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10420 if (! HONOR_NANS (TYPE_MODE (elem_type))
10421 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10423 tree r = build_real (elem_type, dconst1);
10424 /* omit_two_operands will call fold_convert for us. */
10425 return omit_two_operands (type, r, arg0, arg1);
10429 /* (-A) / (-B) -> A / B */
10430 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10431 return fold_build2 (RDIV_EXPR, type,
10432 TREE_OPERAND (arg0, 0),
10433 negate_expr (arg1));
10434 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10435 return fold_build2 (RDIV_EXPR, type,
10436 negate_expr (arg0),
10437 TREE_OPERAND (arg1, 0));
10439 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10440 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10441 && real_onep (arg1))
10442 return non_lvalue (fold_convert (type, arg0));
10444 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10445 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10446 && real_minus_onep (arg1))
10447 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10449 /* If ARG1 is a constant, we can convert this to a multiply by the
10450 reciprocal. This does not have the same rounding properties,
10451 so only do this if -funsafe-math-optimizations. We can actually
10452 always safely do it if ARG1 is a power of two, but it's hard to
10453 tell if it is or not in a portable manner. */
10454 if (TREE_CODE (arg1) == REAL_CST)
10456 if (flag_unsafe_math_optimizations
10457 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10458 arg1, 0)))
10459 return fold_build2 (MULT_EXPR, type, arg0, tem);
10460 /* Find the reciprocal if optimizing and the result is exact. */
10461 if (optimize)
10463 REAL_VALUE_TYPE r;
10464 r = TREE_REAL_CST (arg1);
10465 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10467 tem = build_real (type, r);
10468 return fold_build2 (MULT_EXPR, type,
10469 fold_convert (type, arg0), tem);
10473 /* Convert A/B/C to A/(B*C). */
10474 if (flag_unsafe_math_optimizations
10475 && TREE_CODE (arg0) == RDIV_EXPR)
10476 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10477 fold_build2 (MULT_EXPR, type,
10478 TREE_OPERAND (arg0, 1), arg1));
10480 /* Convert A/(B/C) to (A/B)*C. */
10481 if (flag_unsafe_math_optimizations
10482 && TREE_CODE (arg1) == RDIV_EXPR)
10483 return fold_build2 (MULT_EXPR, type,
10484 fold_build2 (RDIV_EXPR, type, arg0,
10485 TREE_OPERAND (arg1, 0)),
10486 TREE_OPERAND (arg1, 1));
10488 /* Convert C1/(X*C2) into (C1/C2)/X. */
10489 if (flag_unsafe_math_optimizations
10490 && TREE_CODE (arg1) == MULT_EXPR
10491 && TREE_CODE (arg0) == REAL_CST
10492 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10494 tree tem = const_binop (RDIV_EXPR, arg0,
10495 TREE_OPERAND (arg1, 1), 0);
10496 if (tem)
10497 return fold_build2 (RDIV_EXPR, type, tem,
10498 TREE_OPERAND (arg1, 0));
10501 if (flag_unsafe_math_optimizations)
10503 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10504 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10506 /* Optimize sin(x)/cos(x) as tan(x). */
10507 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10508 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10509 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10510 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10511 CALL_EXPR_ARG (arg1, 0), 0))
10513 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10515 if (tanfn != NULL_TREE)
10516 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10519 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10520 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10521 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10522 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10523 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10524 CALL_EXPR_ARG (arg1, 0), 0))
10526 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10528 if (tanfn != NULL_TREE)
10530 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10531 return fold_build2 (RDIV_EXPR, type,
10532 build_real (type, dconst1), tmp);
10536 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10537 NaNs or Infinities. */
10538 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10539 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10540 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10542 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10543 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10545 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10546 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10547 && operand_equal_p (arg00, arg01, 0))
10549 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10551 if (cosfn != NULL_TREE)
10552 return build_call_expr (cosfn, 1, arg00);
10556 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10557 NaNs or Infinities. */
10558 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10559 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10560 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10562 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10563 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10565 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10566 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10567 && operand_equal_p (arg00, arg01, 0))
10569 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10571 if (cosfn != NULL_TREE)
10573 tree tmp = build_call_expr (cosfn, 1, arg00);
10574 return fold_build2 (RDIV_EXPR, type,
10575 build_real (type, dconst1),
10576 tmp);
10581 /* Optimize pow(x,c)/x as pow(x,c-1). */
10582 if (fcode0 == BUILT_IN_POW
10583 || fcode0 == BUILT_IN_POWF
10584 || fcode0 == BUILT_IN_POWL)
10586 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10587 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10588 if (TREE_CODE (arg01) == REAL_CST
10589 && !TREE_OVERFLOW (arg01)
10590 && operand_equal_p (arg1, arg00, 0))
10592 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10593 REAL_VALUE_TYPE c;
10594 tree arg;
10596 c = TREE_REAL_CST (arg01);
10597 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10598 arg = build_real (type, c);
10599 return build_call_expr (powfn, 2, arg1, arg);
10603 /* Optimize x/expN(y) into x*expN(-y). */
10604 if (BUILTIN_EXPONENT_P (fcode1))
10606 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10607 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10608 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10609 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10612 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10613 if (fcode1 == BUILT_IN_POW
10614 || fcode1 == BUILT_IN_POWF
10615 || fcode1 == BUILT_IN_POWL)
10617 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10618 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10619 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10620 tree neg11 = fold_convert (type, negate_expr (arg11));
10621 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10622 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10625 return NULL_TREE;
10627 case TRUNC_DIV_EXPR:
10628 case FLOOR_DIV_EXPR:
10629 /* Simplify A / (B << N) where A and B are positive and B is
10630 a power of 2, to A >> (N + log2(B)). */
10631 strict_overflow_p = false;
10632 if (TREE_CODE (arg1) == LSHIFT_EXPR
10633 && (TYPE_UNSIGNED (type)
10634 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10636 tree sval = TREE_OPERAND (arg1, 0);
10637 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10639 tree sh_cnt = TREE_OPERAND (arg1, 1);
10640 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10642 if (strict_overflow_p)
10643 fold_overflow_warning (("assuming signed overflow does not "
10644 "occur when simplifying A / (B << N)"),
10645 WARN_STRICT_OVERFLOW_MISC);
10647 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10648 sh_cnt, build_int_cst (NULL_TREE, pow2));
10649 return fold_build2 (RSHIFT_EXPR, type,
10650 fold_convert (type, arg0), sh_cnt);
10653 /* Fall thru */
10655 case ROUND_DIV_EXPR:
10656 case CEIL_DIV_EXPR:
10657 case EXACT_DIV_EXPR:
10658 if (integer_onep (arg1))
10659 return non_lvalue (fold_convert (type, arg0));
10660 if (integer_zerop (arg1))
10661 return NULL_TREE;
10662 /* X / -1 is -X. */
10663 if (!TYPE_UNSIGNED (type)
10664 && TREE_CODE (arg1) == INTEGER_CST
10665 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10666 && TREE_INT_CST_HIGH (arg1) == -1)
10667 return fold_convert (type, negate_expr (arg0));
10669 /* Convert -A / -B to A / B when the type is signed and overflow is
10670 undefined. */
10671 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10672 && TREE_CODE (arg0) == NEGATE_EXPR
10673 && negate_expr_p (arg1))
10675 if (INTEGRAL_TYPE_P (type))
10676 fold_overflow_warning (("assuming signed overflow does not occur "
10677 "when distributing negation across "
10678 "division"),
10679 WARN_STRICT_OVERFLOW_MISC);
10680 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10681 negate_expr (arg1));
10683 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10684 && TREE_CODE (arg1) == NEGATE_EXPR
10685 && negate_expr_p (arg0))
10687 if (INTEGRAL_TYPE_P (type))
10688 fold_overflow_warning (("assuming signed overflow does not occur "
10689 "when distributing negation across "
10690 "division"),
10691 WARN_STRICT_OVERFLOW_MISC);
10692 return fold_build2 (code, type, negate_expr (arg0),
10693 TREE_OPERAND (arg1, 0));
10696 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10697 operation, EXACT_DIV_EXPR.
10699 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10700 At one time others generated faster code, it's not clear if they do
10701 after the last round to changes to the DIV code in expmed.c. */
10702 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10703 && multiple_of_p (type, arg0, arg1))
10704 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10706 strict_overflow_p = false;
10707 if (TREE_CODE (arg1) == INTEGER_CST
10708 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10709 &strict_overflow_p)))
10711 if (strict_overflow_p)
10712 fold_overflow_warning (("assuming signed overflow does not occur "
10713 "when simplifying division"),
10714 WARN_STRICT_OVERFLOW_MISC);
10715 return fold_convert (type, tem);
10718 return NULL_TREE;
10720 case CEIL_MOD_EXPR:
10721 case FLOOR_MOD_EXPR:
10722 case ROUND_MOD_EXPR:
10723 case TRUNC_MOD_EXPR:
10724 /* X % 1 is always zero, but be sure to preserve any side
10725 effects in X. */
10726 if (integer_onep (arg1))
10727 return omit_one_operand (type, integer_zero_node, arg0);
10729 /* X % 0, return X % 0 unchanged so that we can get the
10730 proper warnings and errors. */
10731 if (integer_zerop (arg1))
10732 return NULL_TREE;
10734 /* 0 % X is always zero, but be sure to preserve any side
10735 effects in X. Place this after checking for X == 0. */
10736 if (integer_zerop (arg0))
10737 return omit_one_operand (type, integer_zero_node, arg1);
10739 /* X % -1 is zero. */
10740 if (!TYPE_UNSIGNED (type)
10741 && TREE_CODE (arg1) == INTEGER_CST
10742 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10743 && TREE_INT_CST_HIGH (arg1) == -1)
10744 return omit_one_operand (type, integer_zero_node, arg0);
10746 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10747 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10748 strict_overflow_p = false;
10749 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10750 && (TYPE_UNSIGNED (type)
10751 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10753 tree c = arg1;
10754 /* Also optimize A % (C << N) where C is a power of 2,
10755 to A & ((C << N) - 1). */
10756 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10757 c = TREE_OPERAND (arg1, 0);
10759 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10761 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10762 build_int_cst (TREE_TYPE (arg1), 1));
10763 if (strict_overflow_p)
10764 fold_overflow_warning (("assuming signed overflow does not "
10765 "occur when simplifying "
10766 "X % (power of two)"),
10767 WARN_STRICT_OVERFLOW_MISC);
10768 return fold_build2 (BIT_AND_EXPR, type,
10769 fold_convert (type, arg0),
10770 fold_convert (type, mask));
10774 /* X % -C is the same as X % C. */
10775 if (code == TRUNC_MOD_EXPR
10776 && !TYPE_UNSIGNED (type)
10777 && TREE_CODE (arg1) == INTEGER_CST
10778 && !TREE_OVERFLOW (arg1)
10779 && TREE_INT_CST_HIGH (arg1) < 0
10780 && !TYPE_OVERFLOW_TRAPS (type)
10781 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10782 && !sign_bit_p (arg1, arg1))
10783 return fold_build2 (code, type, fold_convert (type, arg0),
10784 fold_convert (type, negate_expr (arg1)));
10786 /* X % -Y is the same as X % Y. */
10787 if (code == TRUNC_MOD_EXPR
10788 && !TYPE_UNSIGNED (type)
10789 && TREE_CODE (arg1) == NEGATE_EXPR
10790 && !TYPE_OVERFLOW_TRAPS (type))
10791 return fold_build2 (code, type, fold_convert (type, arg0),
10792 fold_convert (type, TREE_OPERAND (arg1, 0)));
10794 if (TREE_CODE (arg1) == INTEGER_CST
10795 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10796 &strict_overflow_p)))
10798 if (strict_overflow_p)
10799 fold_overflow_warning (("assuming signed overflow does not occur "
10800 "when simplifying modulos"),
10801 WARN_STRICT_OVERFLOW_MISC);
10802 return fold_convert (type, tem);
10805 return NULL_TREE;
10807 case LROTATE_EXPR:
10808 case RROTATE_EXPR:
10809 if (integer_all_onesp (arg0))
10810 return omit_one_operand (type, arg0, arg1);
10811 goto shift;
10813 case RSHIFT_EXPR:
10814 /* Optimize -1 >> x for arithmetic right shifts. */
10815 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10816 return omit_one_operand (type, arg0, arg1);
10817 /* ... fall through ... */
10819 case LSHIFT_EXPR:
10820 shift:
10821 if (integer_zerop (arg1))
10822 return non_lvalue (fold_convert (type, arg0));
10823 if (integer_zerop (arg0))
10824 return omit_one_operand (type, arg0, arg1);
10826 /* Since negative shift count is not well-defined,
10827 don't try to compute it in the compiler. */
10828 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10829 return NULL_TREE;
10831 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10832 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10833 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10834 && host_integerp (TREE_OPERAND (arg0, 1), false)
10835 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10837 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10838 + TREE_INT_CST_LOW (arg1));
10840 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10841 being well defined. */
10842 if (low >= TYPE_PRECISION (type))
10844 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10845 low = low % TYPE_PRECISION (type);
10846 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10847 return build_int_cst (type, 0);
10848 else
10849 low = TYPE_PRECISION (type) - 1;
10852 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10853 build_int_cst (type, low));
10856 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10857 into x & ((unsigned)-1 >> c) for unsigned types. */
10858 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10859 || (TYPE_UNSIGNED (type)
10860 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10861 && host_integerp (arg1, false)
10862 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10863 && host_integerp (TREE_OPERAND (arg0, 1), false)
10864 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10866 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10867 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10868 tree lshift;
10869 tree arg00;
10871 if (low0 == low1)
10873 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10875 lshift = build_int_cst (type, -1);
10876 lshift = int_const_binop (code, lshift, arg1, 0);
10878 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10882 /* Rewrite an LROTATE_EXPR by a constant into an
10883 RROTATE_EXPR by a new constant. */
10884 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10886 tree tem = build_int_cst (TREE_TYPE (arg1),
10887 GET_MODE_BITSIZE (TYPE_MODE (type)));
10888 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10889 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10892 /* If we have a rotate of a bit operation with the rotate count and
10893 the second operand of the bit operation both constant,
10894 permute the two operations. */
10895 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10896 && (TREE_CODE (arg0) == BIT_AND_EXPR
10897 || TREE_CODE (arg0) == BIT_IOR_EXPR
10898 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10899 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10900 return fold_build2 (TREE_CODE (arg0), type,
10901 fold_build2 (code, type,
10902 TREE_OPERAND (arg0, 0), arg1),
10903 fold_build2 (code, type,
10904 TREE_OPERAND (arg0, 1), arg1));
10906 /* Two consecutive rotates adding up to the width of the mode can
10907 be ignored. */
10908 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10909 && TREE_CODE (arg0) == RROTATE_EXPR
10910 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10911 && TREE_INT_CST_HIGH (arg1) == 0
10912 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10913 && ((TREE_INT_CST_LOW (arg1)
10914 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10915 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10916 return TREE_OPERAND (arg0, 0);
10918 return NULL_TREE;
10920 case MIN_EXPR:
10921 if (operand_equal_p (arg0, arg1, 0))
10922 return omit_one_operand (type, arg0, arg1);
10923 if (INTEGRAL_TYPE_P (type)
10924 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10925 return omit_one_operand (type, arg1, arg0);
10926 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10927 if (tem)
10928 return tem;
10929 goto associate;
10931 case MAX_EXPR:
10932 if (operand_equal_p (arg0, arg1, 0))
10933 return omit_one_operand (type, arg0, arg1);
10934 if (INTEGRAL_TYPE_P (type)
10935 && TYPE_MAX_VALUE (type)
10936 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10937 return omit_one_operand (type, arg1, arg0);
10938 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10939 if (tem)
10940 return tem;
10941 goto associate;
10943 case TRUTH_ANDIF_EXPR:
10944 /* Note that the operands of this must be ints
10945 and their values must be 0 or 1.
10946 ("true" is a fixed value perhaps depending on the language.) */
10947 /* If first arg is constant zero, return it. */
10948 if (integer_zerop (arg0))
10949 return fold_convert (type, arg0);
10950 case TRUTH_AND_EXPR:
10951 /* If either arg is constant true, drop it. */
10952 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10953 return non_lvalue (fold_convert (type, arg1));
10954 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10955 /* Preserve sequence points. */
10956 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10957 return non_lvalue (fold_convert (type, arg0));
10958 /* If second arg is constant zero, result is zero, but first arg
10959 must be evaluated. */
10960 if (integer_zerop (arg1))
10961 return omit_one_operand (type, arg1, arg0);
10962 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10963 case will be handled here. */
10964 if (integer_zerop (arg0))
10965 return omit_one_operand (type, arg0, arg1);
10967 /* !X && X is always false. */
10968 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10969 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10970 return omit_one_operand (type, integer_zero_node, arg1);
10971 /* X && !X is always false. */
10972 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10973 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10974 return omit_one_operand (type, integer_zero_node, arg0);
10976 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10977 means A >= Y && A != MAX, but in this case we know that
10978 A < X <= MAX. */
10980 if (!TREE_SIDE_EFFECTS (arg0)
10981 && !TREE_SIDE_EFFECTS (arg1))
10983 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10984 if (tem && !operand_equal_p (tem, arg0, 0))
10985 return fold_build2 (code, type, tem, arg1);
10987 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10988 if (tem && !operand_equal_p (tem, arg1, 0))
10989 return fold_build2 (code, type, arg0, tem);
10992 truth_andor:
10993 /* We only do these simplifications if we are optimizing. */
10994 if (!optimize)
10995 return NULL_TREE;
10997 /* Check for things like (A || B) && (A || C). We can convert this
10998 to A || (B && C). Note that either operator can be any of the four
10999 truth and/or operations and the transformation will still be
11000 valid. Also note that we only care about order for the
11001 ANDIF and ORIF operators. If B contains side effects, this
11002 might change the truth-value of A. */
11003 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11004 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11005 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11006 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11007 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11008 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11010 tree a00 = TREE_OPERAND (arg0, 0);
11011 tree a01 = TREE_OPERAND (arg0, 1);
11012 tree a10 = TREE_OPERAND (arg1, 0);
11013 tree a11 = TREE_OPERAND (arg1, 1);
11014 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11015 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11016 && (code == TRUTH_AND_EXPR
11017 || code == TRUTH_OR_EXPR));
11019 if (operand_equal_p (a00, a10, 0))
11020 return fold_build2 (TREE_CODE (arg0), type, a00,
11021 fold_build2 (code, type, a01, a11));
11022 else if (commutative && operand_equal_p (a00, a11, 0))
11023 return fold_build2 (TREE_CODE (arg0), type, a00,
11024 fold_build2 (code, type, a01, a10));
11025 else if (commutative && operand_equal_p (a01, a10, 0))
11026 return fold_build2 (TREE_CODE (arg0), type, a01,
11027 fold_build2 (code, type, a00, a11));
11029 /* This case if tricky because we must either have commutative
11030 operators or else A10 must not have side-effects. */
11032 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11033 && operand_equal_p (a01, a11, 0))
11034 return fold_build2 (TREE_CODE (arg0), type,
11035 fold_build2 (code, type, a00, a10),
11036 a01);
11039 /* See if we can build a range comparison. */
11040 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11041 return tem;
11043 /* Check for the possibility of merging component references. If our
11044 lhs is another similar operation, try to merge its rhs with our
11045 rhs. Then try to merge our lhs and rhs. */
11046 if (TREE_CODE (arg0) == code
11047 && 0 != (tem = fold_truthop (code, type,
11048 TREE_OPERAND (arg0, 1), arg1)))
11049 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11051 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11052 return tem;
11054 return NULL_TREE;
11056 case TRUTH_ORIF_EXPR:
11057 /* Note that the operands of this must be ints
11058 and their values must be 0 or true.
11059 ("true" is a fixed value perhaps depending on the language.) */
11060 /* If first arg is constant true, return it. */
11061 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11062 return fold_convert (type, arg0);
11063 case TRUTH_OR_EXPR:
11064 /* If either arg is constant zero, drop it. */
11065 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11066 return non_lvalue (fold_convert (type, arg1));
11067 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11068 /* Preserve sequence points. */
11069 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11070 return non_lvalue (fold_convert (type, arg0));
11071 /* If second arg is constant true, result is true, but we must
11072 evaluate first arg. */
11073 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11074 return omit_one_operand (type, arg1, arg0);
11075 /* Likewise for first arg, but note this only occurs here for
11076 TRUTH_OR_EXPR. */
11077 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11078 return omit_one_operand (type, arg0, arg1);
11080 /* !X || X is always true. */
11081 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11082 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11083 return omit_one_operand (type, integer_one_node, arg1);
11084 /* X || !X is always true. */
11085 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11086 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11087 return omit_one_operand (type, integer_one_node, arg0);
11089 goto truth_andor;
11091 case TRUTH_XOR_EXPR:
11092 /* If the second arg is constant zero, drop it. */
11093 if (integer_zerop (arg1))
11094 return non_lvalue (fold_convert (type, arg0));
11095 /* If the second arg is constant true, this is a logical inversion. */
11096 if (integer_onep (arg1))
11098 /* Only call invert_truthvalue if operand is a truth value. */
11099 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11100 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11101 else
11102 tem = invert_truthvalue (arg0);
11103 return non_lvalue (fold_convert (type, tem));
11105 /* Identical arguments cancel to zero. */
11106 if (operand_equal_p (arg0, arg1, 0))
11107 return omit_one_operand (type, integer_zero_node, arg0);
11109 /* !X ^ X is always true. */
11110 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11111 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11112 return omit_one_operand (type, integer_one_node, arg1);
11114 /* X ^ !X is always true. */
11115 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11116 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11117 return omit_one_operand (type, integer_one_node, arg0);
11119 return NULL_TREE;
11121 case EQ_EXPR:
11122 case NE_EXPR:
11123 tem = fold_comparison (code, type, op0, op1);
11124 if (tem != NULL_TREE)
11125 return tem;
11127 /* bool_var != 0 becomes bool_var. */
11128 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11129 && code == NE_EXPR)
11130 return non_lvalue (fold_convert (type, arg0));
11132 /* bool_var == 1 becomes bool_var. */
11133 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11134 && code == EQ_EXPR)
11135 return non_lvalue (fold_convert (type, arg0));
11137 /* bool_var != 1 becomes !bool_var. */
11138 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11139 && code == NE_EXPR)
11140 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11142 /* bool_var == 0 becomes !bool_var. */
11143 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11144 && code == EQ_EXPR)
11145 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11147 /* If this is an equality comparison of the address of two non-weak,
11148 unaliased symbols neither of which are extern (since we do not
11149 have access to attributes for externs), then we know the result. */
11150 if (TREE_CODE (arg0) == ADDR_EXPR
11151 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11152 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11153 && ! lookup_attribute ("alias",
11154 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11155 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11156 && TREE_CODE (arg1) == ADDR_EXPR
11157 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11158 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11159 && ! lookup_attribute ("alias",
11160 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11161 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11163 /* We know that we're looking at the address of two
11164 non-weak, unaliased, static _DECL nodes.
11166 It is both wasteful and incorrect to call operand_equal_p
11167 to compare the two ADDR_EXPR nodes. It is wasteful in that
11168 all we need to do is test pointer equality for the arguments
11169 to the two ADDR_EXPR nodes. It is incorrect to use
11170 operand_equal_p as that function is NOT equivalent to a
11171 C equality test. It can in fact return false for two
11172 objects which would test as equal using the C equality
11173 operator. */
11174 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11175 return constant_boolean_node (equal
11176 ? code == EQ_EXPR : code != EQ_EXPR,
11177 type);
11180 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11181 a MINUS_EXPR of a constant, we can convert it into a comparison with
11182 a revised constant as long as no overflow occurs. */
11183 if (TREE_CODE (arg1) == INTEGER_CST
11184 && (TREE_CODE (arg0) == PLUS_EXPR
11185 || TREE_CODE (arg0) == MINUS_EXPR)
11186 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11187 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11188 ? MINUS_EXPR : PLUS_EXPR,
11189 fold_convert (TREE_TYPE (arg0), arg1),
11190 TREE_OPERAND (arg0, 1), 0))
11191 && !TREE_OVERFLOW (tem))
11192 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11194 /* Similarly for a NEGATE_EXPR. */
11195 if (TREE_CODE (arg0) == NEGATE_EXPR
11196 && TREE_CODE (arg1) == INTEGER_CST
11197 && 0 != (tem = negate_expr (arg1))
11198 && TREE_CODE (tem) == INTEGER_CST
11199 && !TREE_OVERFLOW (tem))
11200 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11202 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11203 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11204 && TREE_CODE (arg1) == INTEGER_CST
11205 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11206 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11207 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11208 fold_convert (TREE_TYPE (arg0), arg1),
11209 TREE_OPERAND (arg0, 1)));
11211 /* Transform comparisons of the form X +- C CMP X. */
11212 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11214 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11215 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11216 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11218 tree cst = TREE_OPERAND (arg0, 1);
11220 if (code == EQ_EXPR
11221 && !integer_zerop (cst))
11222 return omit_two_operands (type, boolean_false_node,
11223 TREE_OPERAND (arg0, 0), arg1);
11224 else
11225 return omit_two_operands (type, boolean_true_node,
11226 TREE_OPERAND (arg0, 0), arg1);
11229 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11230 for !=. Don't do this for ordered comparisons due to overflow. */
11231 if (TREE_CODE (arg0) == MINUS_EXPR
11232 && integer_zerop (arg1))
11233 return fold_build2 (code, type,
11234 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11236 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11237 if (TREE_CODE (arg0) == ABS_EXPR
11238 && (integer_zerop (arg1) || real_zerop (arg1)))
11239 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11241 /* If this is an EQ or NE comparison with zero and ARG0 is
11242 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11243 two operations, but the latter can be done in one less insn
11244 on machines that have only two-operand insns or on which a
11245 constant cannot be the first operand. */
11246 if (TREE_CODE (arg0) == BIT_AND_EXPR
11247 && integer_zerop (arg1))
11249 tree arg00 = TREE_OPERAND (arg0, 0);
11250 tree arg01 = TREE_OPERAND (arg0, 1);
11251 if (TREE_CODE (arg00) == LSHIFT_EXPR
11252 && integer_onep (TREE_OPERAND (arg00, 0)))
11253 return
11254 fold_build2 (code, type,
11255 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11256 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11257 arg01, TREE_OPERAND (arg00, 1)),
11258 fold_convert (TREE_TYPE (arg0),
11259 integer_one_node)),
11260 arg1);
11261 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11262 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11263 return
11264 fold_build2 (code, type,
11265 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11266 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11267 arg00, TREE_OPERAND (arg01, 1)),
11268 fold_convert (TREE_TYPE (arg0),
11269 integer_one_node)),
11270 arg1);
11273 /* If this is an NE or EQ comparison of zero against the result of a
11274 signed MOD operation whose second operand is a power of 2, make
11275 the MOD operation unsigned since it is simpler and equivalent. */
11276 if (integer_zerop (arg1)
11277 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11278 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11279 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11280 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11281 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11282 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11284 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11285 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11286 fold_convert (newtype,
11287 TREE_OPERAND (arg0, 0)),
11288 fold_convert (newtype,
11289 TREE_OPERAND (arg0, 1)));
11291 return fold_build2 (code, type, newmod,
11292 fold_convert (newtype, arg1));
11295 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11296 C1 is a valid shift constant, and C2 is a power of two, i.e.
11297 a single bit. */
11298 if (TREE_CODE (arg0) == BIT_AND_EXPR
11299 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11300 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11301 == INTEGER_CST
11302 && integer_pow2p (TREE_OPERAND (arg0, 1))
11303 && integer_zerop (arg1))
11305 tree itype = TREE_TYPE (arg0);
11306 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11307 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11309 /* Check for a valid shift count. */
11310 if (TREE_INT_CST_HIGH (arg001) == 0
11311 && TREE_INT_CST_LOW (arg001) < prec)
11313 tree arg01 = TREE_OPERAND (arg0, 1);
11314 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11315 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11316 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11317 can be rewritten as (X & (C2 << C1)) != 0. */
11318 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11320 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11321 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11322 return fold_build2 (code, type, tem, arg1);
11324 /* Otherwise, for signed (arithmetic) shifts,
11325 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11326 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11327 else if (!TYPE_UNSIGNED (itype))
11328 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11329 arg000, build_int_cst (itype, 0));
11330 /* Otherwise, of unsigned (logical) shifts,
11331 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11332 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11333 else
11334 return omit_one_operand (type,
11335 code == EQ_EXPR ? integer_one_node
11336 : integer_zero_node,
11337 arg000);
11341 /* If this is an NE comparison of zero with an AND of one, remove the
11342 comparison since the AND will give the correct value. */
11343 if (code == NE_EXPR
11344 && integer_zerop (arg1)
11345 && TREE_CODE (arg0) == BIT_AND_EXPR
11346 && integer_onep (TREE_OPERAND (arg0, 1)))
11347 return fold_convert (type, arg0);
11349 /* If we have (A & C) == C where C is a power of 2, convert this into
11350 (A & C) != 0. Similarly for NE_EXPR. */
11351 if (TREE_CODE (arg0) == BIT_AND_EXPR
11352 && integer_pow2p (TREE_OPERAND (arg0, 1))
11353 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11354 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11355 arg0, fold_convert (TREE_TYPE (arg0),
11356 integer_zero_node));
11358 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11359 bit, then fold the expression into A < 0 or A >= 0. */
11360 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11361 if (tem)
11362 return tem;
11364 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11365 Similarly for NE_EXPR. */
11366 if (TREE_CODE (arg0) == BIT_AND_EXPR
11367 && TREE_CODE (arg1) == INTEGER_CST
11368 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11370 tree notc = fold_build1 (BIT_NOT_EXPR,
11371 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11372 TREE_OPERAND (arg0, 1));
11373 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11374 arg1, notc);
11375 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11376 if (integer_nonzerop (dandnotc))
11377 return omit_one_operand (type, rslt, arg0);
11380 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11381 Similarly for NE_EXPR. */
11382 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11383 && TREE_CODE (arg1) == INTEGER_CST
11384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11386 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11387 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11388 TREE_OPERAND (arg0, 1), notd);
11389 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11390 if (integer_nonzerop (candnotd))
11391 return omit_one_operand (type, rslt, arg0);
11394 /* If this is a comparison of a field, we may be able to simplify it. */
11395 if ((TREE_CODE (arg0) == COMPONENT_REF
11396 || TREE_CODE (arg0) == BIT_FIELD_REF)
11397 /* Handle the constant case even without -O
11398 to make sure the warnings are given. */
11399 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11401 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11402 if (t1)
11403 return t1;
11406 /* Optimize comparisons of strlen vs zero to a compare of the
11407 first character of the string vs zero. To wit,
11408 strlen(ptr) == 0 => *ptr == 0
11409 strlen(ptr) != 0 => *ptr != 0
11410 Other cases should reduce to one of these two (or a constant)
11411 due to the return value of strlen being unsigned. */
11412 if (TREE_CODE (arg0) == CALL_EXPR
11413 && integer_zerop (arg1))
11415 tree fndecl = get_callee_fndecl (arg0);
11417 if (fndecl
11418 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11419 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11420 && call_expr_nargs (arg0) == 1
11421 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11423 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11424 return fold_build2 (code, type, iref,
11425 build_int_cst (TREE_TYPE (iref), 0));
11429 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11430 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11431 if (TREE_CODE (arg0) == RSHIFT_EXPR
11432 && integer_zerop (arg1)
11433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11435 tree arg00 = TREE_OPERAND (arg0, 0);
11436 tree arg01 = TREE_OPERAND (arg0, 1);
11437 tree itype = TREE_TYPE (arg00);
11438 if (TREE_INT_CST_HIGH (arg01) == 0
11439 && TREE_INT_CST_LOW (arg01)
11440 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11442 if (TYPE_UNSIGNED (itype))
11444 itype = lang_hooks.types.signed_type (itype);
11445 arg00 = fold_convert (itype, arg00);
11447 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11448 type, arg00, build_int_cst (itype, 0));
11452 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11453 if (integer_zerop (arg1)
11454 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11455 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11456 TREE_OPERAND (arg0, 1));
11458 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11459 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11460 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11461 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11462 build_int_cst (TREE_TYPE (arg1), 0));
11463 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11464 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11465 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11466 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11467 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11468 build_int_cst (TREE_TYPE (arg1), 0));
11470 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11471 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11472 && TREE_CODE (arg1) == INTEGER_CST
11473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11474 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11475 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11476 TREE_OPERAND (arg0, 1), arg1));
11478 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11479 (X & C) == 0 when C is a single bit. */
11480 if (TREE_CODE (arg0) == BIT_AND_EXPR
11481 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11482 && integer_zerop (arg1)
11483 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11485 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11486 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11487 TREE_OPERAND (arg0, 1));
11488 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11489 type, tem, arg1);
11492 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11493 constant C is a power of two, i.e. a single bit. */
11494 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11495 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11496 && integer_zerop (arg1)
11497 && integer_pow2p (TREE_OPERAND (arg0, 1))
11498 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11499 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11501 tree arg00 = TREE_OPERAND (arg0, 0);
11502 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11503 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11506 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11507 when is C is a power of two, i.e. a single bit. */
11508 if (TREE_CODE (arg0) == BIT_AND_EXPR
11509 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11510 && integer_zerop (arg1)
11511 && integer_pow2p (TREE_OPERAND (arg0, 1))
11512 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11513 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11515 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11516 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11517 arg000, TREE_OPERAND (arg0, 1));
11518 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11519 tem, build_int_cst (TREE_TYPE (tem), 0));
11522 if (integer_zerop (arg1)
11523 && tree_expr_nonzero_p (arg0))
11525 tree res = constant_boolean_node (code==NE_EXPR, type);
11526 return omit_one_operand (type, res, arg0);
11529 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11530 if (TREE_CODE (arg0) == NEGATE_EXPR
11531 && TREE_CODE (arg1) == NEGATE_EXPR)
11532 return fold_build2 (code, type,
11533 TREE_OPERAND (arg0, 0),
11534 TREE_OPERAND (arg1, 0));
11536 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11537 if (TREE_CODE (arg0) == BIT_AND_EXPR
11538 && TREE_CODE (arg1) == BIT_AND_EXPR)
11540 tree arg00 = TREE_OPERAND (arg0, 0);
11541 tree arg01 = TREE_OPERAND (arg0, 1);
11542 tree arg10 = TREE_OPERAND (arg1, 0);
11543 tree arg11 = TREE_OPERAND (arg1, 1);
11544 tree itype = TREE_TYPE (arg0);
11546 if (operand_equal_p (arg01, arg11, 0))
11547 return fold_build2 (code, type,
11548 fold_build2 (BIT_AND_EXPR, itype,
11549 fold_build2 (BIT_XOR_EXPR, itype,
11550 arg00, arg10),
11551 arg01),
11552 build_int_cst (itype, 0));
11554 if (operand_equal_p (arg01, arg10, 0))
11555 return fold_build2 (code, type,
11556 fold_build2 (BIT_AND_EXPR, itype,
11557 fold_build2 (BIT_XOR_EXPR, itype,
11558 arg00, arg11),
11559 arg01),
11560 build_int_cst (itype, 0));
11562 if (operand_equal_p (arg00, arg11, 0))
11563 return fold_build2 (code, type,
11564 fold_build2 (BIT_AND_EXPR, itype,
11565 fold_build2 (BIT_XOR_EXPR, itype,
11566 arg01, arg10),
11567 arg00),
11568 build_int_cst (itype, 0));
11570 if (operand_equal_p (arg00, arg10, 0))
11571 return fold_build2 (code, type,
11572 fold_build2 (BIT_AND_EXPR, itype,
11573 fold_build2 (BIT_XOR_EXPR, itype,
11574 arg01, arg11),
11575 arg00),
11576 build_int_cst (itype, 0));
11579 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11580 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11582 tree arg00 = TREE_OPERAND (arg0, 0);
11583 tree arg01 = TREE_OPERAND (arg0, 1);
11584 tree arg10 = TREE_OPERAND (arg1, 0);
11585 tree arg11 = TREE_OPERAND (arg1, 1);
11586 tree itype = TREE_TYPE (arg0);
11588 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11589 operand_equal_p guarantees no side-effects so we don't need
11590 to use omit_one_operand on Z. */
11591 if (operand_equal_p (arg01, arg11, 0))
11592 return fold_build2 (code, type, arg00, arg10);
11593 if (operand_equal_p (arg01, arg10, 0))
11594 return fold_build2 (code, type, arg00, arg11);
11595 if (operand_equal_p (arg00, arg11, 0))
11596 return fold_build2 (code, type, arg01, arg10);
11597 if (operand_equal_p (arg00, arg10, 0))
11598 return fold_build2 (code, type, arg01, arg11);
11600 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11601 if (TREE_CODE (arg01) == INTEGER_CST
11602 && TREE_CODE (arg11) == INTEGER_CST)
11603 return fold_build2 (code, type,
11604 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11605 fold_build2 (BIT_XOR_EXPR, itype,
11606 arg01, arg11)),
11607 arg10);
11610 /* Attempt to simplify equality/inequality comparisons of complex
11611 values. Only lower the comparison if the result is known or
11612 can be simplified to a single scalar comparison. */
11613 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11614 || TREE_CODE (arg0) == COMPLEX_CST)
11615 && (TREE_CODE (arg1) == COMPLEX_EXPR
11616 || TREE_CODE (arg1) == COMPLEX_CST))
11618 tree real0, imag0, real1, imag1;
11619 tree rcond, icond;
11621 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11623 real0 = TREE_OPERAND (arg0, 0);
11624 imag0 = TREE_OPERAND (arg0, 1);
11626 else
11628 real0 = TREE_REALPART (arg0);
11629 imag0 = TREE_IMAGPART (arg0);
11632 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11634 real1 = TREE_OPERAND (arg1, 0);
11635 imag1 = TREE_OPERAND (arg1, 1);
11637 else
11639 real1 = TREE_REALPART (arg1);
11640 imag1 = TREE_IMAGPART (arg1);
11643 rcond = fold_binary (code, type, real0, real1);
11644 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11646 if (integer_zerop (rcond))
11648 if (code == EQ_EXPR)
11649 return omit_two_operands (type, boolean_false_node,
11650 imag0, imag1);
11651 return fold_build2 (NE_EXPR, type, imag0, imag1);
11653 else
11655 if (code == NE_EXPR)
11656 return omit_two_operands (type, boolean_true_node,
11657 imag0, imag1);
11658 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11662 icond = fold_binary (code, type, imag0, imag1);
11663 if (icond && TREE_CODE (icond) == INTEGER_CST)
11665 if (integer_zerop (icond))
11667 if (code == EQ_EXPR)
11668 return omit_two_operands (type, boolean_false_node,
11669 real0, real1);
11670 return fold_build2 (NE_EXPR, type, real0, real1);
11672 else
11674 if (code == NE_EXPR)
11675 return omit_two_operands (type, boolean_true_node,
11676 real0, real1);
11677 return fold_build2 (EQ_EXPR, type, real0, real1);
11682 return NULL_TREE;
11684 case LT_EXPR:
11685 case GT_EXPR:
11686 case LE_EXPR:
11687 case GE_EXPR:
11688 tem = fold_comparison (code, type, op0, op1);
11689 if (tem != NULL_TREE)
11690 return tem;
11692 /* Transform comparisons of the form X +- C CMP X. */
11693 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11694 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11695 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11696 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11697 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11698 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11700 tree arg01 = TREE_OPERAND (arg0, 1);
11701 enum tree_code code0 = TREE_CODE (arg0);
11702 int is_positive;
11704 if (TREE_CODE (arg01) == REAL_CST)
11705 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11706 else
11707 is_positive = tree_int_cst_sgn (arg01);
11709 /* (X - c) > X becomes false. */
11710 if (code == GT_EXPR
11711 && ((code0 == MINUS_EXPR && is_positive >= 0)
11712 || (code0 == PLUS_EXPR && is_positive <= 0)))
11714 if (TREE_CODE (arg01) == INTEGER_CST
11715 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11716 fold_overflow_warning (("assuming signed overflow does not "
11717 "occur when assuming that (X - c) > X "
11718 "is always false"),
11719 WARN_STRICT_OVERFLOW_ALL);
11720 return constant_boolean_node (0, type);
11723 /* Likewise (X + c) < X becomes false. */
11724 if (code == LT_EXPR
11725 && ((code0 == PLUS_EXPR && is_positive >= 0)
11726 || (code0 == MINUS_EXPR && is_positive <= 0)))
11728 if (TREE_CODE (arg01) == INTEGER_CST
11729 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11730 fold_overflow_warning (("assuming signed overflow does not "
11731 "occur when assuming that "
11732 "(X + c) < X is always false"),
11733 WARN_STRICT_OVERFLOW_ALL);
11734 return constant_boolean_node (0, type);
11737 /* Convert (X - c) <= X to true. */
11738 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11739 && code == LE_EXPR
11740 && ((code0 == MINUS_EXPR && is_positive >= 0)
11741 || (code0 == PLUS_EXPR && is_positive <= 0)))
11743 if (TREE_CODE (arg01) == INTEGER_CST
11744 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11745 fold_overflow_warning (("assuming signed overflow does not "
11746 "occur when assuming that "
11747 "(X - c) <= X is always true"),
11748 WARN_STRICT_OVERFLOW_ALL);
11749 return constant_boolean_node (1, type);
11752 /* Convert (X + c) >= X to true. */
11753 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11754 && code == GE_EXPR
11755 && ((code0 == PLUS_EXPR && is_positive >= 0)
11756 || (code0 == MINUS_EXPR && is_positive <= 0)))
11758 if (TREE_CODE (arg01) == INTEGER_CST
11759 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11760 fold_overflow_warning (("assuming signed overflow does not "
11761 "occur when assuming that "
11762 "(X + c) >= X is always true"),
11763 WARN_STRICT_OVERFLOW_ALL);
11764 return constant_boolean_node (1, type);
11767 if (TREE_CODE (arg01) == INTEGER_CST)
11769 /* Convert X + c > X and X - c < X to true for integers. */
11770 if (code == GT_EXPR
11771 && ((code0 == PLUS_EXPR && is_positive > 0)
11772 || (code0 == MINUS_EXPR && is_positive < 0)))
11774 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11775 fold_overflow_warning (("assuming signed overflow does "
11776 "not occur when assuming that "
11777 "(X + c) > X is always true"),
11778 WARN_STRICT_OVERFLOW_ALL);
11779 return constant_boolean_node (1, type);
11782 if (code == LT_EXPR
11783 && ((code0 == MINUS_EXPR && is_positive > 0)
11784 || (code0 == PLUS_EXPR && is_positive < 0)))
11786 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11787 fold_overflow_warning (("assuming signed overflow does "
11788 "not occur when assuming that "
11789 "(X - c) < X is always true"),
11790 WARN_STRICT_OVERFLOW_ALL);
11791 return constant_boolean_node (1, type);
11794 /* Convert X + c <= X and X - c >= X to false for integers. */
11795 if (code == LE_EXPR
11796 && ((code0 == PLUS_EXPR && is_positive > 0)
11797 || (code0 == MINUS_EXPR && is_positive < 0)))
11799 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11800 fold_overflow_warning (("assuming signed overflow does "
11801 "not occur when assuming that "
11802 "(X + c) <= X is always false"),
11803 WARN_STRICT_OVERFLOW_ALL);
11804 return constant_boolean_node (0, type);
11807 if (code == GE_EXPR
11808 && ((code0 == MINUS_EXPR && is_positive > 0)
11809 || (code0 == PLUS_EXPR && is_positive < 0)))
11811 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11812 fold_overflow_warning (("assuming signed overflow does "
11813 "not occur when assuming that "
11814 "(X - c) >= X is always false"),
11815 WARN_STRICT_OVERFLOW_ALL);
11816 return constant_boolean_node (0, type);
11821 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11822 This transformation affects the cases which are handled in later
11823 optimizations involving comparisons with non-negative constants. */
11824 if (TREE_CODE (arg1) == INTEGER_CST
11825 && TREE_CODE (arg0) != INTEGER_CST
11826 && tree_int_cst_sgn (arg1) > 0)
11828 if (code == GE_EXPR)
11830 arg1 = const_binop (MINUS_EXPR, arg1,
11831 build_int_cst (TREE_TYPE (arg1), 1), 0);
11832 return fold_build2 (GT_EXPR, type, arg0,
11833 fold_convert (TREE_TYPE (arg0), arg1));
11835 if (code == LT_EXPR)
11837 arg1 = const_binop (MINUS_EXPR, arg1,
11838 build_int_cst (TREE_TYPE (arg1), 1), 0);
11839 return fold_build2 (LE_EXPR, type, arg0,
11840 fold_convert (TREE_TYPE (arg0), arg1));
11844 /* Comparisons with the highest or lowest possible integer of
11845 the specified precision will have known values. */
11847 tree arg1_type = TREE_TYPE (arg1);
11848 unsigned int width = TYPE_PRECISION (arg1_type);
11850 if (TREE_CODE (arg1) == INTEGER_CST
11851 && !TREE_OVERFLOW (arg1)
11852 && width <= 2 * HOST_BITS_PER_WIDE_INT
11853 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11855 HOST_WIDE_INT signed_max_hi;
11856 unsigned HOST_WIDE_INT signed_max_lo;
11857 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11859 if (width <= HOST_BITS_PER_WIDE_INT)
11861 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11862 - 1;
11863 signed_max_hi = 0;
11864 max_hi = 0;
11866 if (TYPE_UNSIGNED (arg1_type))
11868 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11869 min_lo = 0;
11870 min_hi = 0;
11872 else
11874 max_lo = signed_max_lo;
11875 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11876 min_hi = -1;
11879 else
11881 width -= HOST_BITS_PER_WIDE_INT;
11882 signed_max_lo = -1;
11883 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11884 - 1;
11885 max_lo = -1;
11886 min_lo = 0;
11888 if (TYPE_UNSIGNED (arg1_type))
11890 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11891 min_hi = 0;
11893 else
11895 max_hi = signed_max_hi;
11896 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11900 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11901 && TREE_INT_CST_LOW (arg1) == max_lo)
11902 switch (code)
11904 case GT_EXPR:
11905 return omit_one_operand (type, integer_zero_node, arg0);
11907 case GE_EXPR:
11908 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11910 case LE_EXPR:
11911 return omit_one_operand (type, integer_one_node, arg0);
11913 case LT_EXPR:
11914 return fold_build2 (NE_EXPR, type, arg0, arg1);
11916 /* The GE_EXPR and LT_EXPR cases above are not normally
11917 reached because of previous transformations. */
11919 default:
11920 break;
11922 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11923 == max_hi
11924 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11925 switch (code)
11927 case GT_EXPR:
11928 arg1 = const_binop (PLUS_EXPR, arg1,
11929 build_int_cst (TREE_TYPE (arg1), 1), 0);
11930 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11931 case LE_EXPR:
11932 arg1 = const_binop (PLUS_EXPR, arg1,
11933 build_int_cst (TREE_TYPE (arg1), 1), 0);
11934 return fold_build2 (NE_EXPR, type, arg0, arg1);
11935 default:
11936 break;
11938 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11939 == min_hi
11940 && TREE_INT_CST_LOW (arg1) == min_lo)
11941 switch (code)
11943 case LT_EXPR:
11944 return omit_one_operand (type, integer_zero_node, arg0);
11946 case LE_EXPR:
11947 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11949 case GE_EXPR:
11950 return omit_one_operand (type, integer_one_node, arg0);
11952 case GT_EXPR:
11953 return fold_build2 (NE_EXPR, type, op0, op1);
11955 default:
11956 break;
11958 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11959 == min_hi
11960 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11961 switch (code)
11963 case GE_EXPR:
11964 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11965 return fold_build2 (NE_EXPR, type, arg0, arg1);
11966 case LT_EXPR:
11967 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11968 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11969 default:
11970 break;
11973 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11974 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11975 && TYPE_UNSIGNED (arg1_type)
11976 /* We will flip the signedness of the comparison operator
11977 associated with the mode of arg1, so the sign bit is
11978 specified by this mode. Check that arg1 is the signed
11979 max associated with this sign bit. */
11980 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11981 /* signed_type does not work on pointer types. */
11982 && INTEGRAL_TYPE_P (arg1_type))
11984 /* The following case also applies to X < signed_max+1
11985 and X >= signed_max+1 because previous transformations. */
11986 if (code == LE_EXPR || code == GT_EXPR)
11988 tree st0, st1;
11989 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11990 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11991 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11992 type, fold_convert (st0, arg0),
11993 build_int_cst (st1, 0));
11999 /* If we are comparing an ABS_EXPR with a constant, we can
12000 convert all the cases into explicit comparisons, but they may
12001 well not be faster than doing the ABS and one comparison.
12002 But ABS (X) <= C is a range comparison, which becomes a subtraction
12003 and a comparison, and is probably faster. */
12004 if (code == LE_EXPR
12005 && TREE_CODE (arg1) == INTEGER_CST
12006 && TREE_CODE (arg0) == ABS_EXPR
12007 && ! TREE_SIDE_EFFECTS (arg0)
12008 && (0 != (tem = negate_expr (arg1)))
12009 && TREE_CODE (tem) == INTEGER_CST
12010 && !TREE_OVERFLOW (tem))
12011 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12012 build2 (GE_EXPR, type,
12013 TREE_OPERAND (arg0, 0), tem),
12014 build2 (LE_EXPR, type,
12015 TREE_OPERAND (arg0, 0), arg1));
12017 /* Convert ABS_EXPR<x> >= 0 to true. */
12018 strict_overflow_p = false;
12019 if (code == GE_EXPR
12020 && (integer_zerop (arg1)
12021 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12022 && real_zerop (arg1)))
12023 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12025 if (strict_overflow_p)
12026 fold_overflow_warning (("assuming signed overflow does not occur "
12027 "when simplifying comparison of "
12028 "absolute value and zero"),
12029 WARN_STRICT_OVERFLOW_CONDITIONAL);
12030 return omit_one_operand (type, integer_one_node, arg0);
12033 /* Convert ABS_EXPR<x> < 0 to false. */
12034 strict_overflow_p = false;
12035 if (code == LT_EXPR
12036 && (integer_zerop (arg1) || real_zerop (arg1))
12037 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12039 if (strict_overflow_p)
12040 fold_overflow_warning (("assuming signed overflow does not occur "
12041 "when simplifying comparison of "
12042 "absolute value and zero"),
12043 WARN_STRICT_OVERFLOW_CONDITIONAL);
12044 return omit_one_operand (type, integer_zero_node, arg0);
12047 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12048 and similarly for >= into !=. */
12049 if ((code == LT_EXPR || code == GE_EXPR)
12050 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12051 && TREE_CODE (arg1) == LSHIFT_EXPR
12052 && integer_onep (TREE_OPERAND (arg1, 0)))
12053 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12054 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12055 TREE_OPERAND (arg1, 1)),
12056 build_int_cst (TREE_TYPE (arg0), 0));
12058 if ((code == LT_EXPR || code == GE_EXPR)
12059 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12060 && (TREE_CODE (arg1) == NOP_EXPR
12061 || TREE_CODE (arg1) == CONVERT_EXPR)
12062 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12063 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12064 return
12065 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12066 fold_convert (TREE_TYPE (arg0),
12067 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12068 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12069 1))),
12070 build_int_cst (TREE_TYPE (arg0), 0));
12072 return NULL_TREE;
12074 case UNORDERED_EXPR:
12075 case ORDERED_EXPR:
12076 case UNLT_EXPR:
12077 case UNLE_EXPR:
12078 case UNGT_EXPR:
12079 case UNGE_EXPR:
12080 case UNEQ_EXPR:
12081 case LTGT_EXPR:
12082 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12084 t1 = fold_relational_const (code, type, arg0, arg1);
12085 if (t1 != NULL_TREE)
12086 return t1;
12089 /* If the first operand is NaN, the result is constant. */
12090 if (TREE_CODE (arg0) == REAL_CST
12091 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12092 && (code != LTGT_EXPR || ! flag_trapping_math))
12094 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12095 ? integer_zero_node
12096 : integer_one_node;
12097 return omit_one_operand (type, t1, arg1);
12100 /* If the second operand is NaN, the result is constant. */
12101 if (TREE_CODE (arg1) == REAL_CST
12102 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12103 && (code != LTGT_EXPR || ! flag_trapping_math))
12105 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12106 ? integer_zero_node
12107 : integer_one_node;
12108 return omit_one_operand (type, t1, arg0);
12111 /* Simplify unordered comparison of something with itself. */
12112 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12113 && operand_equal_p (arg0, arg1, 0))
12114 return constant_boolean_node (1, type);
12116 if (code == LTGT_EXPR
12117 && !flag_trapping_math
12118 && operand_equal_p (arg0, arg1, 0))
12119 return constant_boolean_node (0, type);
12121 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12123 tree targ0 = strip_float_extensions (arg0);
12124 tree targ1 = strip_float_extensions (arg1);
12125 tree newtype = TREE_TYPE (targ0);
12127 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12128 newtype = TREE_TYPE (targ1);
12130 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12131 return fold_build2 (code, type, fold_convert (newtype, targ0),
12132 fold_convert (newtype, targ1));
12135 return NULL_TREE;
12137 case COMPOUND_EXPR:
12138 /* When pedantic, a compound expression can be neither an lvalue
12139 nor an integer constant expression. */
12140 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12141 return NULL_TREE;
12142 /* Don't let (0, 0) be null pointer constant. */
12143 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12144 : fold_convert (type, arg1);
12145 return pedantic_non_lvalue (tem);
12147 case COMPLEX_EXPR:
12148 if ((TREE_CODE (arg0) == REAL_CST
12149 && TREE_CODE (arg1) == REAL_CST)
12150 || (TREE_CODE (arg0) == INTEGER_CST
12151 && TREE_CODE (arg1) == INTEGER_CST))
12152 return build_complex (type, arg0, arg1);
12153 return NULL_TREE;
12155 case ASSERT_EXPR:
12156 /* An ASSERT_EXPR should never be passed to fold_binary. */
12157 gcc_unreachable ();
12159 default:
12160 return NULL_TREE;
12161 } /* switch (code) */
12164 /* Callback for walk_tree, looking for LABEL_EXPR.
12165 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12166 Do not check the sub-tree of GOTO_EXPR. */
12168 static tree
12169 contains_label_1 (tree *tp,
12170 int *walk_subtrees,
12171 void *data ATTRIBUTE_UNUSED)
12173 switch (TREE_CODE (*tp))
12175 case LABEL_EXPR:
12176 return *tp;
12177 case GOTO_EXPR:
12178 *walk_subtrees = 0;
12179 /* no break */
12180 default:
12181 return NULL_TREE;
12185 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12186 accessible from outside the sub-tree. Returns NULL_TREE if no
12187 addressable label is found. */
12189 static bool
12190 contains_label_p (tree st)
12192 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12195 /* Fold a ternary expression of code CODE and type TYPE with operands
12196 OP0, OP1, and OP2. Return the folded expression if folding is
12197 successful. Otherwise, return NULL_TREE. */
12199 tree
12200 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12202 tree tem;
12203 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12204 enum tree_code_class kind = TREE_CODE_CLASS (code);
12206 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12207 && TREE_CODE_LENGTH (code) == 3);
12209 /* Strip any conversions that don't change the mode. This is safe
12210 for every expression, except for a comparison expression because
12211 its signedness is derived from its operands. So, in the latter
12212 case, only strip conversions that don't change the signedness.
12214 Note that this is done as an internal manipulation within the
12215 constant folder, in order to find the simplest representation of
12216 the arguments so that their form can be studied. In any cases,
12217 the appropriate type conversions should be put back in the tree
12218 that will get out of the constant folder. */
12219 if (op0)
12221 arg0 = op0;
12222 STRIP_NOPS (arg0);
12225 if (op1)
12227 arg1 = op1;
12228 STRIP_NOPS (arg1);
12231 switch (code)
12233 case COMPONENT_REF:
12234 if (TREE_CODE (arg0) == CONSTRUCTOR
12235 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12237 unsigned HOST_WIDE_INT idx;
12238 tree field, value;
12239 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12240 if (field == arg1)
12241 return value;
12243 return NULL_TREE;
12245 case COND_EXPR:
12246 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12247 so all simple results must be passed through pedantic_non_lvalue. */
12248 if (TREE_CODE (arg0) == INTEGER_CST)
12250 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12251 tem = integer_zerop (arg0) ? op2 : op1;
12252 /* Only optimize constant conditions when the selected branch
12253 has the same type as the COND_EXPR. This avoids optimizing
12254 away "c ? x : throw", where the throw has a void type.
12255 Avoid throwing away that operand which contains label. */
12256 if ((!TREE_SIDE_EFFECTS (unused_op)
12257 || !contains_label_p (unused_op))
12258 && (! VOID_TYPE_P (TREE_TYPE (tem))
12259 || VOID_TYPE_P (type)))
12260 return pedantic_non_lvalue (tem);
12261 return NULL_TREE;
12263 if (operand_equal_p (arg1, op2, 0))
12264 return pedantic_omit_one_operand (type, arg1, arg0);
12266 /* If we have A op B ? A : C, we may be able to convert this to a
12267 simpler expression, depending on the operation and the values
12268 of B and C. Signed zeros prevent all of these transformations,
12269 for reasons given above each one.
12271 Also try swapping the arguments and inverting the conditional. */
12272 if (COMPARISON_CLASS_P (arg0)
12273 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12274 arg1, TREE_OPERAND (arg0, 1))
12275 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12277 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12278 if (tem)
12279 return tem;
12282 if (COMPARISON_CLASS_P (arg0)
12283 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12284 op2,
12285 TREE_OPERAND (arg0, 1))
12286 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12288 tem = fold_truth_not_expr (arg0);
12289 if (tem && COMPARISON_CLASS_P (tem))
12291 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12292 if (tem)
12293 return tem;
12297 /* If the second operand is simpler than the third, swap them
12298 since that produces better jump optimization results. */
12299 if (truth_value_p (TREE_CODE (arg0))
12300 && tree_swap_operands_p (op1, op2, false))
12302 /* See if this can be inverted. If it can't, possibly because
12303 it was a floating-point inequality comparison, don't do
12304 anything. */
12305 tem = fold_truth_not_expr (arg0);
12306 if (tem)
12307 return fold_build3 (code, type, tem, op2, op1);
12310 /* Convert A ? 1 : 0 to simply A. */
12311 if (integer_onep (op1)
12312 && integer_zerop (op2)
12313 /* If we try to convert OP0 to our type, the
12314 call to fold will try to move the conversion inside
12315 a COND, which will recurse. In that case, the COND_EXPR
12316 is probably the best choice, so leave it alone. */
12317 && type == TREE_TYPE (arg0))
12318 return pedantic_non_lvalue (arg0);
12320 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12321 over COND_EXPR in cases such as floating point comparisons. */
12322 if (integer_zerop (op1)
12323 && integer_onep (op2)
12324 && truth_value_p (TREE_CODE (arg0)))
12325 return pedantic_non_lvalue (fold_convert (type,
12326 invert_truthvalue (arg0)));
12328 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12329 if (TREE_CODE (arg0) == LT_EXPR
12330 && integer_zerop (TREE_OPERAND (arg0, 1))
12331 && integer_zerop (op2)
12332 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12334 /* sign_bit_p only checks ARG1 bits within A's precision.
12335 If <sign bit of A> has wider type than A, bits outside
12336 of A's precision in <sign bit of A> need to be checked.
12337 If they are all 0, this optimization needs to be done
12338 in unsigned A's type, if they are all 1 in signed A's type,
12339 otherwise this can't be done. */
12340 if (TYPE_PRECISION (TREE_TYPE (tem))
12341 < TYPE_PRECISION (TREE_TYPE (arg1))
12342 && TYPE_PRECISION (TREE_TYPE (tem))
12343 < TYPE_PRECISION (type))
12345 unsigned HOST_WIDE_INT mask_lo;
12346 HOST_WIDE_INT mask_hi;
12347 int inner_width, outer_width;
12348 tree tem_type;
12350 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12351 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12352 if (outer_width > TYPE_PRECISION (type))
12353 outer_width = TYPE_PRECISION (type);
12355 if (outer_width > HOST_BITS_PER_WIDE_INT)
12357 mask_hi = ((unsigned HOST_WIDE_INT) -1
12358 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12359 mask_lo = -1;
12361 else
12363 mask_hi = 0;
12364 mask_lo = ((unsigned HOST_WIDE_INT) -1
12365 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12367 if (inner_width > HOST_BITS_PER_WIDE_INT)
12369 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12370 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12371 mask_lo = 0;
12373 else
12374 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12375 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12377 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12378 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12380 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12381 tem = fold_convert (tem_type, tem);
12383 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12384 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12386 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12387 tem = fold_convert (tem_type, tem);
12389 else
12390 tem = NULL;
12393 if (tem)
12394 return fold_convert (type,
12395 fold_build2 (BIT_AND_EXPR,
12396 TREE_TYPE (tem), tem,
12397 fold_convert (TREE_TYPE (tem),
12398 arg1)));
12401 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12402 already handled above. */
12403 if (TREE_CODE (arg0) == BIT_AND_EXPR
12404 && integer_onep (TREE_OPERAND (arg0, 1))
12405 && integer_zerop (op2)
12406 && integer_pow2p (arg1))
12408 tree tem = TREE_OPERAND (arg0, 0);
12409 STRIP_NOPS (tem);
12410 if (TREE_CODE (tem) == RSHIFT_EXPR
12411 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12412 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12413 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12414 return fold_build2 (BIT_AND_EXPR, type,
12415 TREE_OPERAND (tem, 0), arg1);
12418 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12419 is probably obsolete because the first operand should be a
12420 truth value (that's why we have the two cases above), but let's
12421 leave it in until we can confirm this for all front-ends. */
12422 if (integer_zerop (op2)
12423 && TREE_CODE (arg0) == NE_EXPR
12424 && integer_zerop (TREE_OPERAND (arg0, 1))
12425 && integer_pow2p (arg1)
12426 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12428 arg1, OEP_ONLY_CONST))
12429 return pedantic_non_lvalue (fold_convert (type,
12430 TREE_OPERAND (arg0, 0)));
12432 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12433 if (integer_zerop (op2)
12434 && truth_value_p (TREE_CODE (arg0))
12435 && truth_value_p (TREE_CODE (arg1)))
12436 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12437 fold_convert (type, arg0),
12438 arg1);
12440 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12441 if (integer_onep (op2)
12442 && truth_value_p (TREE_CODE (arg0))
12443 && truth_value_p (TREE_CODE (arg1)))
12445 /* Only perform transformation if ARG0 is easily inverted. */
12446 tem = fold_truth_not_expr (arg0);
12447 if (tem)
12448 return fold_build2 (TRUTH_ORIF_EXPR, type,
12449 fold_convert (type, tem),
12450 arg1);
12453 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12454 if (integer_zerop (arg1)
12455 && truth_value_p (TREE_CODE (arg0))
12456 && truth_value_p (TREE_CODE (op2)))
12458 /* Only perform transformation if ARG0 is easily inverted. */
12459 tem = fold_truth_not_expr (arg0);
12460 if (tem)
12461 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12462 fold_convert (type, tem),
12463 op2);
12466 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12467 if (integer_onep (arg1)
12468 && truth_value_p (TREE_CODE (arg0))
12469 && truth_value_p (TREE_CODE (op2)))
12470 return fold_build2 (TRUTH_ORIF_EXPR, type,
12471 fold_convert (type, arg0),
12472 op2);
12474 return NULL_TREE;
12476 case CALL_EXPR:
12477 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12478 of fold_ternary on them. */
12479 gcc_unreachable ();
12481 case BIT_FIELD_REF:
12482 if ((TREE_CODE (arg0) == VECTOR_CST
12483 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12484 && type == TREE_TYPE (TREE_TYPE (arg0))
12485 && host_integerp (arg1, 1)
12486 && host_integerp (op2, 1))
12488 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12489 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12491 if (width != 0
12492 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12493 && (idx % width) == 0
12494 && (idx = idx / width)
12495 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12497 tree elements = NULL_TREE;
12499 if (TREE_CODE (arg0) == VECTOR_CST)
12500 elements = TREE_VECTOR_CST_ELTS (arg0);
12501 else
12503 unsigned HOST_WIDE_INT idx;
12504 tree value;
12506 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12507 elements = tree_cons (NULL_TREE, value, elements);
12509 while (idx-- > 0 && elements)
12510 elements = TREE_CHAIN (elements);
12511 if (elements)
12512 return TREE_VALUE (elements);
12513 else
12514 return fold_convert (type, integer_zero_node);
12517 return NULL_TREE;
12519 default:
12520 return NULL_TREE;
12521 } /* switch (code) */
12524 /* Perform constant folding and related simplification of EXPR.
12525 The related simplifications include x*1 => x, x*0 => 0, etc.,
12526 and application of the associative law.
12527 NOP_EXPR conversions may be removed freely (as long as we
12528 are careful not to change the type of the overall expression).
12529 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12530 but we can constant-fold them if they have constant operands. */
12532 #ifdef ENABLE_FOLD_CHECKING
12533 # define fold(x) fold_1 (x)
12534 static tree fold_1 (tree);
12535 static
12536 #endif
12537 tree
12538 fold (tree expr)
12540 const tree t = expr;
12541 enum tree_code code = TREE_CODE (t);
12542 enum tree_code_class kind = TREE_CODE_CLASS (code);
12543 tree tem;
12545 /* Return right away if a constant. */
12546 if (kind == tcc_constant)
12547 return t;
12549 /* CALL_EXPR-like objects with variable numbers of operands are
12550 treated specially. */
12551 if (kind == tcc_vl_exp)
12553 if (code == CALL_EXPR)
12555 tem = fold_call_expr (expr, false);
12556 return tem ? tem : expr;
12558 return expr;
12561 if (IS_EXPR_CODE_CLASS (kind)
12562 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12564 tree type = TREE_TYPE (t);
12565 tree op0, op1, op2;
12567 switch (TREE_CODE_LENGTH (code))
12569 case 1:
12570 op0 = TREE_OPERAND (t, 0);
12571 tem = fold_unary (code, type, op0);
12572 return tem ? tem : expr;
12573 case 2:
12574 op0 = TREE_OPERAND (t, 0);
12575 op1 = TREE_OPERAND (t, 1);
12576 tem = fold_binary (code, type, op0, op1);
12577 return tem ? tem : expr;
12578 case 3:
12579 op0 = TREE_OPERAND (t, 0);
12580 op1 = TREE_OPERAND (t, 1);
12581 op2 = TREE_OPERAND (t, 2);
12582 tem = fold_ternary (code, type, op0, op1, op2);
12583 return tem ? tem : expr;
12584 default:
12585 break;
12589 switch (code)
12591 case CONST_DECL:
12592 return fold (DECL_INITIAL (t));
12594 default:
12595 return t;
12596 } /* switch (code) */
12599 #ifdef ENABLE_FOLD_CHECKING
12600 #undef fold
12602 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12603 static void fold_check_failed (tree, tree);
12604 void print_fold_checksum (tree);
12606 /* When --enable-checking=fold, compute a digest of expr before
12607 and after actual fold call to see if fold did not accidentally
12608 change original expr. */
12610 tree
12611 fold (tree expr)
12613 tree ret;
12614 struct md5_ctx ctx;
12615 unsigned char checksum_before[16], checksum_after[16];
12616 htab_t ht;
12618 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12619 md5_init_ctx (&ctx);
12620 fold_checksum_tree (expr, &ctx, ht);
12621 md5_finish_ctx (&ctx, checksum_before);
12622 htab_empty (ht);
12624 ret = fold_1 (expr);
12626 md5_init_ctx (&ctx);
12627 fold_checksum_tree (expr, &ctx, ht);
12628 md5_finish_ctx (&ctx, checksum_after);
12629 htab_delete (ht);
12631 if (memcmp (checksum_before, checksum_after, 16))
12632 fold_check_failed (expr, ret);
12634 return ret;
12637 void
12638 print_fold_checksum (tree expr)
12640 struct md5_ctx ctx;
12641 unsigned char checksum[16], cnt;
12642 htab_t ht;
12644 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12645 md5_init_ctx (&ctx);
12646 fold_checksum_tree (expr, &ctx, ht);
12647 md5_finish_ctx (&ctx, checksum);
12648 htab_delete (ht);
12649 for (cnt = 0; cnt < 16; ++cnt)
12650 fprintf (stderr, "%02x", checksum[cnt]);
12651 putc ('\n', stderr);
12654 static void
12655 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12657 internal_error ("fold check: original tree changed by fold");
12660 static void
12661 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12663 void **slot;
12664 enum tree_code code;
12665 struct tree_function_decl buf;
12666 int i, len;
12668 recursive_label:
12670 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12671 <= sizeof (struct tree_function_decl))
12672 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12673 if (expr == NULL)
12674 return;
12675 slot = htab_find_slot (ht, expr, INSERT);
12676 if (*slot != NULL)
12677 return;
12678 *slot = expr;
12679 code = TREE_CODE (expr);
12680 if (TREE_CODE_CLASS (code) == tcc_declaration
12681 && DECL_ASSEMBLER_NAME_SET_P (expr))
12683 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12684 memcpy ((char *) &buf, expr, tree_size (expr));
12685 expr = (tree) &buf;
12686 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12688 else if (TREE_CODE_CLASS (code) == tcc_type
12689 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12690 || TYPE_CACHED_VALUES_P (expr)
12691 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12693 /* Allow these fields to be modified. */
12694 memcpy ((char *) &buf, expr, tree_size (expr));
12695 expr = (tree) &buf;
12696 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12697 TYPE_POINTER_TO (expr) = NULL;
12698 TYPE_REFERENCE_TO (expr) = NULL;
12699 if (TYPE_CACHED_VALUES_P (expr))
12701 TYPE_CACHED_VALUES_P (expr) = 0;
12702 TYPE_CACHED_VALUES (expr) = NULL;
12705 md5_process_bytes (expr, tree_size (expr), ctx);
12706 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12707 if (TREE_CODE_CLASS (code) != tcc_type
12708 && TREE_CODE_CLASS (code) != tcc_declaration
12709 && code != TREE_LIST)
12710 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12711 switch (TREE_CODE_CLASS (code))
12713 case tcc_constant:
12714 switch (code)
12716 case STRING_CST:
12717 md5_process_bytes (TREE_STRING_POINTER (expr),
12718 TREE_STRING_LENGTH (expr), ctx);
12719 break;
12720 case COMPLEX_CST:
12721 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12722 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12723 break;
12724 case VECTOR_CST:
12725 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12726 break;
12727 default:
12728 break;
12730 break;
12731 case tcc_exceptional:
12732 switch (code)
12734 case TREE_LIST:
12735 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12736 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12737 expr = TREE_CHAIN (expr);
12738 goto recursive_label;
12739 break;
12740 case TREE_VEC:
12741 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12742 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12743 break;
12744 default:
12745 break;
12747 break;
12748 case tcc_expression:
12749 case tcc_reference:
12750 case tcc_comparison:
12751 case tcc_unary:
12752 case tcc_binary:
12753 case tcc_statement:
12754 case tcc_vl_exp:
12755 len = TREE_OPERAND_LENGTH (expr);
12756 for (i = 0; i < len; ++i)
12757 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12758 break;
12759 case tcc_declaration:
12760 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12761 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12762 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12764 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12765 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12766 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12767 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12768 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12770 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12771 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12773 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12775 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12776 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12777 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12779 break;
12780 case tcc_type:
12781 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12782 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12783 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12784 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12785 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12786 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12787 if (INTEGRAL_TYPE_P (expr)
12788 || SCALAR_FLOAT_TYPE_P (expr))
12790 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12791 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12793 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12794 if (TREE_CODE (expr) == RECORD_TYPE
12795 || TREE_CODE (expr) == UNION_TYPE
12796 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12797 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12798 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12799 break;
12800 default:
12801 break;
12805 #endif
12807 /* Fold a unary tree expression with code CODE of type TYPE with an
12808 operand OP0. Return a folded expression if successful. Otherwise,
12809 return a tree expression with code CODE of type TYPE with an
12810 operand OP0. */
12812 tree
12813 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12815 tree tem;
12816 #ifdef ENABLE_FOLD_CHECKING
12817 unsigned char checksum_before[16], checksum_after[16];
12818 struct md5_ctx ctx;
12819 htab_t ht;
12821 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12822 md5_init_ctx (&ctx);
12823 fold_checksum_tree (op0, &ctx, ht);
12824 md5_finish_ctx (&ctx, checksum_before);
12825 htab_empty (ht);
12826 #endif
12828 tem = fold_unary (code, type, op0);
12829 if (!tem)
12830 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12832 #ifdef ENABLE_FOLD_CHECKING
12833 md5_init_ctx (&ctx);
12834 fold_checksum_tree (op0, &ctx, ht);
12835 md5_finish_ctx (&ctx, checksum_after);
12836 htab_delete (ht);
12838 if (memcmp (checksum_before, checksum_after, 16))
12839 fold_check_failed (op0, tem);
12840 #endif
12841 return tem;
12844 /* Fold a binary tree expression with code CODE of type TYPE with
12845 operands OP0 and OP1. Return a folded expression if successful.
12846 Otherwise, return a tree expression with code CODE of type TYPE
12847 with operands OP0 and OP1. */
12849 tree
12850 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12851 MEM_STAT_DECL)
12853 tree tem;
12854 #ifdef ENABLE_FOLD_CHECKING
12855 unsigned char checksum_before_op0[16],
12856 checksum_before_op1[16],
12857 checksum_after_op0[16],
12858 checksum_after_op1[16];
12859 struct md5_ctx ctx;
12860 htab_t ht;
12862 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12863 md5_init_ctx (&ctx);
12864 fold_checksum_tree (op0, &ctx, ht);
12865 md5_finish_ctx (&ctx, checksum_before_op0);
12866 htab_empty (ht);
12868 md5_init_ctx (&ctx);
12869 fold_checksum_tree (op1, &ctx, ht);
12870 md5_finish_ctx (&ctx, checksum_before_op1);
12871 htab_empty (ht);
12872 #endif
12874 tem = fold_binary (code, type, op0, op1);
12875 if (!tem)
12876 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12878 #ifdef ENABLE_FOLD_CHECKING
12879 md5_init_ctx (&ctx);
12880 fold_checksum_tree (op0, &ctx, ht);
12881 md5_finish_ctx (&ctx, checksum_after_op0);
12882 htab_empty (ht);
12884 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12885 fold_check_failed (op0, tem);
12887 md5_init_ctx (&ctx);
12888 fold_checksum_tree (op1, &ctx, ht);
12889 md5_finish_ctx (&ctx, checksum_after_op1);
12890 htab_delete (ht);
12892 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12893 fold_check_failed (op1, tem);
12894 #endif
12895 return tem;
12898 /* Fold a ternary tree expression with code CODE of type TYPE with
12899 operands OP0, OP1, and OP2. Return a folded expression if
12900 successful. Otherwise, return a tree expression with code CODE of
12901 type TYPE with operands OP0, OP1, and OP2. */
12903 tree
12904 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12905 MEM_STAT_DECL)
12907 tree tem;
12908 #ifdef ENABLE_FOLD_CHECKING
12909 unsigned char checksum_before_op0[16],
12910 checksum_before_op1[16],
12911 checksum_before_op2[16],
12912 checksum_after_op0[16],
12913 checksum_after_op1[16],
12914 checksum_after_op2[16];
12915 struct md5_ctx ctx;
12916 htab_t ht;
12918 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12919 md5_init_ctx (&ctx);
12920 fold_checksum_tree (op0, &ctx, ht);
12921 md5_finish_ctx (&ctx, checksum_before_op0);
12922 htab_empty (ht);
12924 md5_init_ctx (&ctx);
12925 fold_checksum_tree (op1, &ctx, ht);
12926 md5_finish_ctx (&ctx, checksum_before_op1);
12927 htab_empty (ht);
12929 md5_init_ctx (&ctx);
12930 fold_checksum_tree (op2, &ctx, ht);
12931 md5_finish_ctx (&ctx, checksum_before_op2);
12932 htab_empty (ht);
12933 #endif
12935 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12936 tem = fold_ternary (code, type, op0, op1, op2);
12937 if (!tem)
12938 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12940 #ifdef ENABLE_FOLD_CHECKING
12941 md5_init_ctx (&ctx);
12942 fold_checksum_tree (op0, &ctx, ht);
12943 md5_finish_ctx (&ctx, checksum_after_op0);
12944 htab_empty (ht);
12946 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12947 fold_check_failed (op0, tem);
12949 md5_init_ctx (&ctx);
12950 fold_checksum_tree (op1, &ctx, ht);
12951 md5_finish_ctx (&ctx, checksum_after_op1);
12952 htab_empty (ht);
12954 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12955 fold_check_failed (op1, tem);
12957 md5_init_ctx (&ctx);
12958 fold_checksum_tree (op2, &ctx, ht);
12959 md5_finish_ctx (&ctx, checksum_after_op2);
12960 htab_delete (ht);
12962 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12963 fold_check_failed (op2, tem);
12964 #endif
12965 return tem;
12968 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12969 arguments in ARGARRAY, and a null static chain.
12970 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12971 of type TYPE from the given operands as constructed by build_call_array. */
12973 tree
12974 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
12976 tree tem;
12977 #ifdef ENABLE_FOLD_CHECKING
12978 unsigned char checksum_before_fn[16],
12979 checksum_before_arglist[16],
12980 checksum_after_fn[16],
12981 checksum_after_arglist[16];
12982 struct md5_ctx ctx;
12983 htab_t ht;
12984 int i;
12986 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12987 md5_init_ctx (&ctx);
12988 fold_checksum_tree (fn, &ctx, ht);
12989 md5_finish_ctx (&ctx, checksum_before_fn);
12990 htab_empty (ht);
12992 md5_init_ctx (&ctx);
12993 for (i = 0; i < nargs; i++)
12994 fold_checksum_tree (argarray[i], &ctx, ht);
12995 md5_finish_ctx (&ctx, checksum_before_arglist);
12996 htab_empty (ht);
12997 #endif
12999 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13001 #ifdef ENABLE_FOLD_CHECKING
13002 md5_init_ctx (&ctx);
13003 fold_checksum_tree (fn, &ctx, ht);
13004 md5_finish_ctx (&ctx, checksum_after_fn);
13005 htab_empty (ht);
13007 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13008 fold_check_failed (fn, tem);
13010 md5_init_ctx (&ctx);
13011 for (i = 0; i < nargs; i++)
13012 fold_checksum_tree (argarray[i], &ctx, ht);
13013 md5_finish_ctx (&ctx, checksum_after_arglist);
13014 htab_delete (ht);
13016 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13017 fold_check_failed (NULL_TREE, tem);
13018 #endif
13019 return tem;
13022 /* Perform constant folding and related simplification of initializer
13023 expression EXPR. These behave identically to "fold_buildN" but ignore
13024 potential run-time traps and exceptions that fold must preserve. */
13026 #define START_FOLD_INIT \
13027 int saved_signaling_nans = flag_signaling_nans;\
13028 int saved_trapping_math = flag_trapping_math;\
13029 int saved_rounding_math = flag_rounding_math;\
13030 int saved_trapv = flag_trapv;\
13031 int saved_folding_initializer = folding_initializer;\
13032 flag_signaling_nans = 0;\
13033 flag_trapping_math = 0;\
13034 flag_rounding_math = 0;\
13035 flag_trapv = 0;\
13036 folding_initializer = 1;
13038 #define END_FOLD_INIT \
13039 flag_signaling_nans = saved_signaling_nans;\
13040 flag_trapping_math = saved_trapping_math;\
13041 flag_rounding_math = saved_rounding_math;\
13042 flag_trapv = saved_trapv;\
13043 folding_initializer = saved_folding_initializer;
13045 tree
13046 fold_build1_initializer (enum tree_code code, tree type, tree op)
13048 tree result;
13049 START_FOLD_INIT;
13051 result = fold_build1 (code, type, op);
13053 END_FOLD_INIT;
13054 return result;
13057 tree
13058 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13060 tree result;
13061 START_FOLD_INIT;
13063 result = fold_build2 (code, type, op0, op1);
13065 END_FOLD_INIT;
13066 return result;
13069 tree
13070 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13071 tree op2)
13073 tree result;
13074 START_FOLD_INIT;
13076 result = fold_build3 (code, type, op0, op1, op2);
13078 END_FOLD_INIT;
13079 return result;
13082 tree
13083 fold_build_call_array_initializer (tree type, tree fn,
13084 int nargs, tree *argarray)
13086 tree result;
13087 START_FOLD_INIT;
13089 result = fold_build_call_array (type, fn, nargs, argarray);
13091 END_FOLD_INIT;
13092 return result;
13095 #undef START_FOLD_INIT
13096 #undef END_FOLD_INIT
13098 /* Determine if first argument is a multiple of second argument. Return 0 if
13099 it is not, or we cannot easily determined it to be.
13101 An example of the sort of thing we care about (at this point; this routine
13102 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13103 fold cases do now) is discovering that
13105 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13107 is a multiple of
13109 SAVE_EXPR (J * 8)
13111 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13113 This code also handles discovering that
13115 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13117 is a multiple of 8 so we don't have to worry about dealing with a
13118 possible remainder.
13120 Note that we *look* inside a SAVE_EXPR only to determine how it was
13121 calculated; it is not safe for fold to do much of anything else with the
13122 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13123 at run time. For example, the latter example above *cannot* be implemented
13124 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13125 evaluation time of the original SAVE_EXPR is not necessarily the same at
13126 the time the new expression is evaluated. The only optimization of this
13127 sort that would be valid is changing
13129 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13131 divided by 8 to
13133 SAVE_EXPR (I) * SAVE_EXPR (J)
13135 (where the same SAVE_EXPR (J) is used in the original and the
13136 transformed version). */
13139 multiple_of_p (tree type, tree top, tree bottom)
13141 if (operand_equal_p (top, bottom, 0))
13142 return 1;
13144 if (TREE_CODE (type) != INTEGER_TYPE)
13145 return 0;
13147 switch (TREE_CODE (top))
13149 case BIT_AND_EXPR:
13150 /* Bitwise and provides a power of two multiple. If the mask is
13151 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13152 if (!integer_pow2p (bottom))
13153 return 0;
13154 /* FALLTHRU */
13156 case MULT_EXPR:
13157 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13158 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13160 case PLUS_EXPR:
13161 case MINUS_EXPR:
13162 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13163 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13165 case LSHIFT_EXPR:
13166 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13168 tree op1, t1;
13170 op1 = TREE_OPERAND (top, 1);
13171 /* const_binop may not detect overflow correctly,
13172 so check for it explicitly here. */
13173 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13174 > TREE_INT_CST_LOW (op1)
13175 && TREE_INT_CST_HIGH (op1) == 0
13176 && 0 != (t1 = fold_convert (type,
13177 const_binop (LSHIFT_EXPR,
13178 size_one_node,
13179 op1, 0)))
13180 && !TREE_OVERFLOW (t1))
13181 return multiple_of_p (type, t1, bottom);
13183 return 0;
13185 case NOP_EXPR:
13186 /* Can't handle conversions from non-integral or wider integral type. */
13187 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13188 || (TYPE_PRECISION (type)
13189 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13190 return 0;
13192 /* .. fall through ... */
13194 case SAVE_EXPR:
13195 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13197 case INTEGER_CST:
13198 if (TREE_CODE (bottom) != INTEGER_CST
13199 || (TYPE_UNSIGNED (type)
13200 && (tree_int_cst_sgn (top) < 0
13201 || tree_int_cst_sgn (bottom) < 0)))
13202 return 0;
13203 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13204 top, bottom, 0));
13206 default:
13207 return 0;
13211 /* Return true if `t' is known to be non-negative. If the return
13212 value is based on the assumption that signed overflow is undefined,
13213 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13214 *STRICT_OVERFLOW_P. */
13216 bool
13217 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13219 if (t == error_mark_node)
13220 return false;
13222 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13223 return true;
13225 switch (TREE_CODE (t))
13227 case SSA_NAME:
13228 /* Query VRP to see if it has recorded any information about
13229 the range of this object. */
13230 return ssa_name_nonnegative_p (t);
13232 case ABS_EXPR:
13233 /* We can't return 1 if flag_wrapv is set because
13234 ABS_EXPR<INT_MIN> = INT_MIN. */
13235 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13236 return true;
13237 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13239 *strict_overflow_p = true;
13240 return true;
13242 break;
13244 case INTEGER_CST:
13245 return tree_int_cst_sgn (t) >= 0;
13247 case REAL_CST:
13248 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13250 case PLUS_EXPR:
13251 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13252 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13253 strict_overflow_p)
13254 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13255 strict_overflow_p));
13257 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13258 both unsigned and at least 2 bits shorter than the result. */
13259 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13260 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13261 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13263 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13264 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13265 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13266 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13268 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13269 TYPE_PRECISION (inner2)) + 1;
13270 return prec < TYPE_PRECISION (TREE_TYPE (t));
13273 break;
13275 case MULT_EXPR:
13276 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13278 /* x * x for floating point x is always non-negative. */
13279 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13280 return true;
13281 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13282 strict_overflow_p)
13283 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13284 strict_overflow_p));
13287 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13288 both unsigned and their total bits is shorter than the result. */
13289 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13290 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13291 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13293 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13294 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13295 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13296 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13297 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13298 < TYPE_PRECISION (TREE_TYPE (t));
13300 return false;
13302 case BIT_AND_EXPR:
13303 case MAX_EXPR:
13304 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13305 strict_overflow_p)
13306 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13307 strict_overflow_p));
13309 case BIT_IOR_EXPR:
13310 case BIT_XOR_EXPR:
13311 case MIN_EXPR:
13312 case RDIV_EXPR:
13313 case TRUNC_DIV_EXPR:
13314 case CEIL_DIV_EXPR:
13315 case FLOOR_DIV_EXPR:
13316 case ROUND_DIV_EXPR:
13317 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13318 strict_overflow_p)
13319 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13320 strict_overflow_p));
13322 case TRUNC_MOD_EXPR:
13323 case CEIL_MOD_EXPR:
13324 case FLOOR_MOD_EXPR:
13325 case ROUND_MOD_EXPR:
13326 case SAVE_EXPR:
13327 case NON_LVALUE_EXPR:
13328 case FLOAT_EXPR:
13329 case FIX_TRUNC_EXPR:
13330 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13331 strict_overflow_p);
13333 case COMPOUND_EXPR:
13334 case MODIFY_EXPR:
13335 case GIMPLE_MODIFY_STMT:
13336 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13337 strict_overflow_p);
13339 case BIND_EXPR:
13340 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13341 strict_overflow_p);
13343 case COND_EXPR:
13344 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13345 strict_overflow_p)
13346 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13347 strict_overflow_p));
13349 case NOP_EXPR:
13351 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13352 tree outer_type = TREE_TYPE (t);
13354 if (TREE_CODE (outer_type) == REAL_TYPE)
13356 if (TREE_CODE (inner_type) == REAL_TYPE)
13357 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13358 strict_overflow_p);
13359 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13361 if (TYPE_UNSIGNED (inner_type))
13362 return true;
13363 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13364 strict_overflow_p);
13367 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13369 if (TREE_CODE (inner_type) == REAL_TYPE)
13370 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13371 strict_overflow_p);
13372 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13373 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13374 && TYPE_UNSIGNED (inner_type);
13377 break;
13379 case TARGET_EXPR:
13381 tree temp = TARGET_EXPR_SLOT (t);
13382 t = TARGET_EXPR_INITIAL (t);
13384 /* If the initializer is non-void, then it's a normal expression
13385 that will be assigned to the slot. */
13386 if (!VOID_TYPE_P (t))
13387 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13389 /* Otherwise, the initializer sets the slot in some way. One common
13390 way is an assignment statement at the end of the initializer. */
13391 while (1)
13393 if (TREE_CODE (t) == BIND_EXPR)
13394 t = expr_last (BIND_EXPR_BODY (t));
13395 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13396 || TREE_CODE (t) == TRY_CATCH_EXPR)
13397 t = expr_last (TREE_OPERAND (t, 0));
13398 else if (TREE_CODE (t) == STATEMENT_LIST)
13399 t = expr_last (t);
13400 else
13401 break;
13403 if ((TREE_CODE (t) == MODIFY_EXPR
13404 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13405 && GENERIC_TREE_OPERAND (t, 0) == temp)
13406 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13407 strict_overflow_p);
13409 return false;
13412 case CALL_EXPR:
13414 tree fndecl = get_callee_fndecl (t);
13415 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13416 switch (DECL_FUNCTION_CODE (fndecl))
13418 CASE_FLT_FN (BUILT_IN_ACOS):
13419 CASE_FLT_FN (BUILT_IN_ACOSH):
13420 CASE_FLT_FN (BUILT_IN_CABS):
13421 CASE_FLT_FN (BUILT_IN_COSH):
13422 CASE_FLT_FN (BUILT_IN_ERFC):
13423 CASE_FLT_FN (BUILT_IN_EXP):
13424 CASE_FLT_FN (BUILT_IN_EXP10):
13425 CASE_FLT_FN (BUILT_IN_EXP2):
13426 CASE_FLT_FN (BUILT_IN_FABS):
13427 CASE_FLT_FN (BUILT_IN_FDIM):
13428 CASE_FLT_FN (BUILT_IN_HYPOT):
13429 CASE_FLT_FN (BUILT_IN_POW10):
13430 CASE_INT_FN (BUILT_IN_FFS):
13431 CASE_INT_FN (BUILT_IN_PARITY):
13432 CASE_INT_FN (BUILT_IN_POPCOUNT):
13433 case BUILT_IN_BSWAP32:
13434 case BUILT_IN_BSWAP64:
13435 /* Always true. */
13436 return true;
13438 CASE_FLT_FN (BUILT_IN_SQRT):
13439 /* sqrt(-0.0) is -0.0. */
13440 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13441 return true;
13442 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13443 strict_overflow_p);
13445 CASE_FLT_FN (BUILT_IN_ASINH):
13446 CASE_FLT_FN (BUILT_IN_ATAN):
13447 CASE_FLT_FN (BUILT_IN_ATANH):
13448 CASE_FLT_FN (BUILT_IN_CBRT):
13449 CASE_FLT_FN (BUILT_IN_CEIL):
13450 CASE_FLT_FN (BUILT_IN_ERF):
13451 CASE_FLT_FN (BUILT_IN_EXPM1):
13452 CASE_FLT_FN (BUILT_IN_FLOOR):
13453 CASE_FLT_FN (BUILT_IN_FMOD):
13454 CASE_FLT_FN (BUILT_IN_FREXP):
13455 CASE_FLT_FN (BUILT_IN_LCEIL):
13456 CASE_FLT_FN (BUILT_IN_LDEXP):
13457 CASE_FLT_FN (BUILT_IN_LFLOOR):
13458 CASE_FLT_FN (BUILT_IN_LLCEIL):
13459 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13460 CASE_FLT_FN (BUILT_IN_LLRINT):
13461 CASE_FLT_FN (BUILT_IN_LLROUND):
13462 CASE_FLT_FN (BUILT_IN_LRINT):
13463 CASE_FLT_FN (BUILT_IN_LROUND):
13464 CASE_FLT_FN (BUILT_IN_MODF):
13465 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13466 CASE_FLT_FN (BUILT_IN_RINT):
13467 CASE_FLT_FN (BUILT_IN_ROUND):
13468 CASE_FLT_FN (BUILT_IN_SCALB):
13469 CASE_FLT_FN (BUILT_IN_SCALBLN):
13470 CASE_FLT_FN (BUILT_IN_SCALBN):
13471 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13472 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13473 CASE_FLT_FN (BUILT_IN_SINH):
13474 CASE_FLT_FN (BUILT_IN_TANH):
13475 CASE_FLT_FN (BUILT_IN_TRUNC):
13476 /* True if the 1st argument is nonnegative. */
13477 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13478 strict_overflow_p);
13480 CASE_FLT_FN (BUILT_IN_FMAX):
13481 /* True if the 1st OR 2nd arguments are nonnegative. */
13482 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13483 strict_overflow_p)
13484 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13485 strict_overflow_p)));
13487 CASE_FLT_FN (BUILT_IN_FMIN):
13488 /* True if the 1st AND 2nd arguments are nonnegative. */
13489 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13490 strict_overflow_p)
13491 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13492 strict_overflow_p)));
13494 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13495 /* True if the 2nd argument is nonnegative. */
13496 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13497 strict_overflow_p);
13499 CASE_FLT_FN (BUILT_IN_POWI):
13500 /* True if the 1st argument is nonnegative or the second
13501 argument is an even integer. */
13502 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13504 tree arg1 = CALL_EXPR_ARG (t, 1);
13505 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13506 return true;
13508 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13509 strict_overflow_p);
13511 CASE_FLT_FN (BUILT_IN_POW):
13512 /* True if the 1st argument is nonnegative or the second
13513 argument is an even integer valued real. */
13514 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13516 REAL_VALUE_TYPE c;
13517 HOST_WIDE_INT n;
13519 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13520 n = real_to_integer (&c);
13521 if ((n & 1) == 0)
13523 REAL_VALUE_TYPE cint;
13524 real_from_integer (&cint, VOIDmode, n,
13525 n < 0 ? -1 : 0, 0);
13526 if (real_identical (&c, &cint))
13527 return true;
13530 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13531 strict_overflow_p);
13533 default:
13534 break;
13538 /* ... fall through ... */
13540 default:
13541 if (truth_value_p (TREE_CODE (t)))
13542 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13543 return true;
13546 /* We don't know sign of `t', so be conservative and return false. */
13547 return false;
13550 /* Return true if `t' is known to be non-negative. Handle warnings
13551 about undefined signed overflow. */
13553 bool
13554 tree_expr_nonnegative_p (tree t)
13556 bool ret, strict_overflow_p;
13558 strict_overflow_p = false;
13559 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13560 if (strict_overflow_p)
13561 fold_overflow_warning (("assuming signed overflow does not occur when "
13562 "determining that expression is always "
13563 "non-negative"),
13564 WARN_STRICT_OVERFLOW_MISC);
13565 return ret;
13568 /* Return true when T is an address and is known to be nonzero.
13569 For floating point we further ensure that T is not denormal.
13570 Similar logic is present in nonzero_address in rtlanal.h.
13572 If the return value is based on the assumption that signed overflow
13573 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13574 change *STRICT_OVERFLOW_P. */
13576 bool
13577 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13579 tree type = TREE_TYPE (t);
13580 bool sub_strict_overflow_p;
13582 /* Doing something useful for floating point would need more work. */
13583 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13584 return false;
13586 switch (TREE_CODE (t))
13588 case SSA_NAME:
13589 /* Query VRP to see if it has recorded any information about
13590 the range of this object. */
13591 return ssa_name_nonzero_p (t);
13593 case ABS_EXPR:
13594 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13595 strict_overflow_p);
13597 case INTEGER_CST:
13598 return !integer_zerop (t);
13600 case PLUS_EXPR:
13601 if (TYPE_OVERFLOW_UNDEFINED (type))
13603 /* With the presence of negative values it is hard
13604 to say something. */
13605 sub_strict_overflow_p = false;
13606 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13607 &sub_strict_overflow_p)
13608 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13609 &sub_strict_overflow_p))
13610 return false;
13611 /* One of operands must be positive and the other non-negative. */
13612 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13613 overflows, on a twos-complement machine the sum of two
13614 nonnegative numbers can never be zero. */
13615 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13616 strict_overflow_p)
13617 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13618 strict_overflow_p));
13620 break;
13622 case MULT_EXPR:
13623 if (TYPE_OVERFLOW_UNDEFINED (type))
13625 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13626 strict_overflow_p)
13627 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13628 strict_overflow_p))
13630 *strict_overflow_p = true;
13631 return true;
13634 break;
13636 case NOP_EXPR:
13638 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13639 tree outer_type = TREE_TYPE (t);
13641 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13642 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13643 strict_overflow_p));
13645 break;
13647 case ADDR_EXPR:
13649 tree base = get_base_address (TREE_OPERAND (t, 0));
13651 if (!base)
13652 return false;
13654 /* Weak declarations may link to NULL. */
13655 if (VAR_OR_FUNCTION_DECL_P (base))
13656 return !DECL_WEAK (base);
13658 /* Constants are never weak. */
13659 if (CONSTANT_CLASS_P (base))
13660 return true;
13662 return false;
13665 case COND_EXPR:
13666 sub_strict_overflow_p = false;
13667 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13668 &sub_strict_overflow_p)
13669 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13670 &sub_strict_overflow_p))
13672 if (sub_strict_overflow_p)
13673 *strict_overflow_p = true;
13674 return true;
13676 break;
13678 case MIN_EXPR:
13679 sub_strict_overflow_p = false;
13680 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13681 &sub_strict_overflow_p)
13682 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13683 &sub_strict_overflow_p))
13685 if (sub_strict_overflow_p)
13686 *strict_overflow_p = true;
13688 break;
13690 case MAX_EXPR:
13691 sub_strict_overflow_p = false;
13692 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13693 &sub_strict_overflow_p))
13695 if (sub_strict_overflow_p)
13696 *strict_overflow_p = true;
13698 /* When both operands are nonzero, then MAX must be too. */
13699 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13700 strict_overflow_p))
13701 return true;
13703 /* MAX where operand 0 is positive is positive. */
13704 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13705 strict_overflow_p);
13707 /* MAX where operand 1 is positive is positive. */
13708 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13709 &sub_strict_overflow_p)
13710 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13711 &sub_strict_overflow_p))
13713 if (sub_strict_overflow_p)
13714 *strict_overflow_p = true;
13715 return true;
13717 break;
13719 case COMPOUND_EXPR:
13720 case MODIFY_EXPR:
13721 case GIMPLE_MODIFY_STMT:
13722 case BIND_EXPR:
13723 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13724 strict_overflow_p);
13726 case SAVE_EXPR:
13727 case NON_LVALUE_EXPR:
13728 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13729 strict_overflow_p);
13731 case BIT_IOR_EXPR:
13732 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13733 strict_overflow_p)
13734 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13735 strict_overflow_p));
13737 case CALL_EXPR:
13738 return alloca_call_p (t);
13740 default:
13741 break;
13743 return false;
13746 /* Return true when T is an address and is known to be nonzero.
13747 Handle warnings about undefined signed overflow. */
13749 bool
13750 tree_expr_nonzero_p (tree t)
13752 bool ret, strict_overflow_p;
13754 strict_overflow_p = false;
13755 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13756 if (strict_overflow_p)
13757 fold_overflow_warning (("assuming signed overflow does not occur when "
13758 "determining that expression is always "
13759 "non-zero"),
13760 WARN_STRICT_OVERFLOW_MISC);
13761 return ret;
13764 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13765 attempt to fold the expression to a constant without modifying TYPE,
13766 OP0 or OP1.
13768 If the expression could be simplified to a constant, then return
13769 the constant. If the expression would not be simplified to a
13770 constant, then return NULL_TREE. */
13772 tree
13773 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13775 tree tem = fold_binary (code, type, op0, op1);
13776 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13779 /* Given the components of a unary expression CODE, TYPE and OP0,
13780 attempt to fold the expression to a constant without modifying
13781 TYPE or OP0.
13783 If the expression could be simplified to a constant, then return
13784 the constant. If the expression would not be simplified to a
13785 constant, then return NULL_TREE. */
13787 tree
13788 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13790 tree tem = fold_unary (code, type, op0);
13791 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13794 /* If EXP represents referencing an element in a constant string
13795 (either via pointer arithmetic or array indexing), return the
13796 tree representing the value accessed, otherwise return NULL. */
13798 tree
13799 fold_read_from_constant_string (tree exp)
13801 if ((TREE_CODE (exp) == INDIRECT_REF
13802 || TREE_CODE (exp) == ARRAY_REF)
13803 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13805 tree exp1 = TREE_OPERAND (exp, 0);
13806 tree index;
13807 tree string;
13809 if (TREE_CODE (exp) == INDIRECT_REF)
13810 string = string_constant (exp1, &index);
13811 else
13813 tree low_bound = array_ref_low_bound (exp);
13814 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13816 /* Optimize the special-case of a zero lower bound.
13818 We convert the low_bound to sizetype to avoid some problems
13819 with constant folding. (E.g. suppose the lower bound is 1,
13820 and its mode is QI. Without the conversion,l (ARRAY
13821 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13822 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13823 if (! integer_zerop (low_bound))
13824 index = size_diffop (index, fold_convert (sizetype, low_bound));
13826 string = exp1;
13829 if (string
13830 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13831 && TREE_CODE (string) == STRING_CST
13832 && TREE_CODE (index) == INTEGER_CST
13833 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13834 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13835 == MODE_INT)
13836 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13837 return fold_convert (TREE_TYPE (exp),
13838 build_int_cst (NULL_TREE,
13839 (TREE_STRING_POINTER (string)
13840 [TREE_INT_CST_LOW (index)])));
13842 return NULL;
13845 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13846 an integer constant or real constant.
13848 TYPE is the type of the result. */
13850 static tree
13851 fold_negate_const (tree arg0, tree type)
13853 tree t = NULL_TREE;
13855 switch (TREE_CODE (arg0))
13857 case INTEGER_CST:
13859 unsigned HOST_WIDE_INT low;
13860 HOST_WIDE_INT high;
13861 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13862 TREE_INT_CST_HIGH (arg0),
13863 &low, &high);
13864 t = force_fit_type_double (type, low, high, 1,
13865 (overflow | TREE_OVERFLOW (arg0))
13866 && !TYPE_UNSIGNED (type));
13867 break;
13870 case REAL_CST:
13871 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13872 break;
13874 default:
13875 gcc_unreachable ();
13878 return t;
13881 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13882 an integer constant or real constant.
13884 TYPE is the type of the result. */
13886 tree
13887 fold_abs_const (tree arg0, tree type)
13889 tree t = NULL_TREE;
13891 switch (TREE_CODE (arg0))
13893 case INTEGER_CST:
13894 /* If the value is unsigned, then the absolute value is
13895 the same as the ordinary value. */
13896 if (TYPE_UNSIGNED (type))
13897 t = arg0;
13898 /* Similarly, if the value is non-negative. */
13899 else if (INT_CST_LT (integer_minus_one_node, arg0))
13900 t = arg0;
13901 /* If the value is negative, then the absolute value is
13902 its negation. */
13903 else
13905 unsigned HOST_WIDE_INT low;
13906 HOST_WIDE_INT high;
13907 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13908 TREE_INT_CST_HIGH (arg0),
13909 &low, &high);
13910 t = force_fit_type_double (type, low, high, -1,
13911 overflow | TREE_OVERFLOW (arg0));
13913 break;
13915 case REAL_CST:
13916 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13917 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13918 else
13919 t = arg0;
13920 break;
13922 default:
13923 gcc_unreachable ();
13926 return t;
13929 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13930 constant. TYPE is the type of the result. */
13932 static tree
13933 fold_not_const (tree arg0, tree type)
13935 tree t = NULL_TREE;
13937 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13939 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13940 ~TREE_INT_CST_HIGH (arg0), 0,
13941 TREE_OVERFLOW (arg0));
13943 return t;
13946 /* Given CODE, a relational operator, the target type, TYPE and two
13947 constant operands OP0 and OP1, return the result of the
13948 relational operation. If the result is not a compile time
13949 constant, then return NULL_TREE. */
13951 static tree
13952 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13954 int result, invert;
13956 /* From here on, the only cases we handle are when the result is
13957 known to be a constant. */
13959 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13961 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13962 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13964 /* Handle the cases where either operand is a NaN. */
13965 if (real_isnan (c0) || real_isnan (c1))
13967 switch (code)
13969 case EQ_EXPR:
13970 case ORDERED_EXPR:
13971 result = 0;
13972 break;
13974 case NE_EXPR:
13975 case UNORDERED_EXPR:
13976 case UNLT_EXPR:
13977 case UNLE_EXPR:
13978 case UNGT_EXPR:
13979 case UNGE_EXPR:
13980 case UNEQ_EXPR:
13981 result = 1;
13982 break;
13984 case LT_EXPR:
13985 case LE_EXPR:
13986 case GT_EXPR:
13987 case GE_EXPR:
13988 case LTGT_EXPR:
13989 if (flag_trapping_math)
13990 return NULL_TREE;
13991 result = 0;
13992 break;
13994 default:
13995 gcc_unreachable ();
13998 return constant_boolean_node (result, type);
14001 return constant_boolean_node (real_compare (code, c0, c1), type);
14004 /* Handle equality/inequality of complex constants. */
14005 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14007 tree rcond = fold_relational_const (code, type,
14008 TREE_REALPART (op0),
14009 TREE_REALPART (op1));
14010 tree icond = fold_relational_const (code, type,
14011 TREE_IMAGPART (op0),
14012 TREE_IMAGPART (op1));
14013 if (code == EQ_EXPR)
14014 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14015 else if (code == NE_EXPR)
14016 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14017 else
14018 return NULL_TREE;
14021 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14023 To compute GT, swap the arguments and do LT.
14024 To compute GE, do LT and invert the result.
14025 To compute LE, swap the arguments, do LT and invert the result.
14026 To compute NE, do EQ and invert the result.
14028 Therefore, the code below must handle only EQ and LT. */
14030 if (code == LE_EXPR || code == GT_EXPR)
14032 tree tem = op0;
14033 op0 = op1;
14034 op1 = tem;
14035 code = swap_tree_comparison (code);
14038 /* Note that it is safe to invert for real values here because we
14039 have already handled the one case that it matters. */
14041 invert = 0;
14042 if (code == NE_EXPR || code == GE_EXPR)
14044 invert = 1;
14045 code = invert_tree_comparison (code, false);
14048 /* Compute a result for LT or EQ if args permit;
14049 Otherwise return T. */
14050 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14052 if (code == EQ_EXPR)
14053 result = tree_int_cst_equal (op0, op1);
14054 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14055 result = INT_CST_LT_UNSIGNED (op0, op1);
14056 else
14057 result = INT_CST_LT (op0, op1);
14059 else
14060 return NULL_TREE;
14062 if (invert)
14063 result ^= 1;
14064 return constant_boolean_node (result, type);
14067 /* Build an expression for the a clean point containing EXPR with type TYPE.
14068 Don't build a cleanup point expression for EXPR which don't have side
14069 effects. */
14071 tree
14072 fold_build_cleanup_point_expr (tree type, tree expr)
14074 /* If the expression does not have side effects then we don't have to wrap
14075 it with a cleanup point expression. */
14076 if (!TREE_SIDE_EFFECTS (expr))
14077 return expr;
14079 /* If the expression is a return, check to see if the expression inside the
14080 return has no side effects or the right hand side of the modify expression
14081 inside the return. If either don't have side effects set we don't need to
14082 wrap the expression in a cleanup point expression. Note we don't check the
14083 left hand side of the modify because it should always be a return decl. */
14084 if (TREE_CODE (expr) == RETURN_EXPR)
14086 tree op = TREE_OPERAND (expr, 0);
14087 if (!op || !TREE_SIDE_EFFECTS (op))
14088 return expr;
14089 op = TREE_OPERAND (op, 1);
14090 if (!TREE_SIDE_EFFECTS (op))
14091 return expr;
14094 return build1 (CLEANUP_POINT_EXPR, type, expr);
14097 /* Build an expression for the address of T. Folds away INDIRECT_REF to
14098 avoid confusing the gimplify process. */
14100 tree
14101 build_fold_addr_expr_with_type (tree t, tree ptrtype)
14103 /* The size of the object is not relevant when talking about its address. */
14104 if (TREE_CODE (t) == WITH_SIZE_EXPR)
14105 t = TREE_OPERAND (t, 0);
14107 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
14108 if (TREE_CODE (t) == INDIRECT_REF
14109 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
14111 t = TREE_OPERAND (t, 0);
14112 if (TREE_TYPE (t) != ptrtype)
14113 t = build1 (NOP_EXPR, ptrtype, t);
14115 else
14117 tree base = t;
14119 while (handled_component_p (base))
14120 base = TREE_OPERAND (base, 0);
14121 if (DECL_P (base))
14122 TREE_ADDRESSABLE (base) = 1;
14124 t = build1 (ADDR_EXPR, ptrtype, t);
14127 return t;
14130 tree
14131 build_fold_addr_expr (tree t)
14133 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
14136 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14137 of an indirection through OP0, or NULL_TREE if no simplification is
14138 possible. */
14140 tree
14141 fold_indirect_ref_1 (tree type, tree op0)
14143 tree sub = op0;
14144 tree subtype;
14146 STRIP_NOPS (sub);
14147 subtype = TREE_TYPE (sub);
14148 if (!POINTER_TYPE_P (subtype))
14149 return NULL_TREE;
14151 if (TREE_CODE (sub) == ADDR_EXPR)
14153 tree op = TREE_OPERAND (sub, 0);
14154 tree optype = TREE_TYPE (op);
14155 /* *&CONST_DECL -> to the value of the const decl. */
14156 if (TREE_CODE (op) == CONST_DECL)
14157 return DECL_INITIAL (op);
14158 /* *&p => p; make sure to handle *&"str"[cst] here. */
14159 if (type == optype)
14161 tree fop = fold_read_from_constant_string (op);
14162 if (fop)
14163 return fop;
14164 else
14165 return op;
14167 /* *(foo *)&fooarray => fooarray[0] */
14168 else if (TREE_CODE (optype) == ARRAY_TYPE
14169 && type == TREE_TYPE (optype))
14171 tree type_domain = TYPE_DOMAIN (optype);
14172 tree min_val = size_zero_node;
14173 if (type_domain && TYPE_MIN_VALUE (type_domain))
14174 min_val = TYPE_MIN_VALUE (type_domain);
14175 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14177 /* *(foo *)&complexfoo => __real__ complexfoo */
14178 else if (TREE_CODE (optype) == COMPLEX_TYPE
14179 && type == TREE_TYPE (optype))
14180 return fold_build1 (REALPART_EXPR, type, op);
14181 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14182 else if (TREE_CODE (optype) == VECTOR_TYPE
14183 && type == TREE_TYPE (optype))
14185 tree part_width = TYPE_SIZE (type);
14186 tree index = bitsize_int (0);
14187 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14191 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14192 if (TREE_CODE (sub) == PLUS_EXPR
14193 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14195 tree op00 = TREE_OPERAND (sub, 0);
14196 tree op01 = TREE_OPERAND (sub, 1);
14197 tree op00type;
14199 STRIP_NOPS (op00);
14200 op00type = TREE_TYPE (op00);
14201 if (TREE_CODE (op00) == ADDR_EXPR
14202 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14203 && type == TREE_TYPE (TREE_TYPE (op00type)))
14205 tree size = TYPE_SIZE_UNIT (type);
14206 if (tree_int_cst_equal (size, op01))
14207 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14211 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14212 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14213 && type == TREE_TYPE (TREE_TYPE (subtype)))
14215 tree type_domain;
14216 tree min_val = size_zero_node;
14217 sub = build_fold_indirect_ref (sub);
14218 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14219 if (type_domain && TYPE_MIN_VALUE (type_domain))
14220 min_val = TYPE_MIN_VALUE (type_domain);
14221 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14224 return NULL_TREE;
14227 /* Builds an expression for an indirection through T, simplifying some
14228 cases. */
14230 tree
14231 build_fold_indirect_ref (tree t)
14233 tree type = TREE_TYPE (TREE_TYPE (t));
14234 tree sub = fold_indirect_ref_1 (type, t);
14236 if (sub)
14237 return sub;
14238 else
14239 return build1 (INDIRECT_REF, type, t);
14242 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14244 tree
14245 fold_indirect_ref (tree t)
14247 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14249 if (sub)
14250 return sub;
14251 else
14252 return t;
14255 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14256 whose result is ignored. The type of the returned tree need not be
14257 the same as the original expression. */
14259 tree
14260 fold_ignored_result (tree t)
14262 if (!TREE_SIDE_EFFECTS (t))
14263 return integer_zero_node;
14265 for (;;)
14266 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14268 case tcc_unary:
14269 t = TREE_OPERAND (t, 0);
14270 break;
14272 case tcc_binary:
14273 case tcc_comparison:
14274 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14275 t = TREE_OPERAND (t, 0);
14276 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14277 t = TREE_OPERAND (t, 1);
14278 else
14279 return t;
14280 break;
14282 case tcc_expression:
14283 switch (TREE_CODE (t))
14285 case COMPOUND_EXPR:
14286 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14287 return t;
14288 t = TREE_OPERAND (t, 0);
14289 break;
14291 case COND_EXPR:
14292 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14293 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14294 return t;
14295 t = TREE_OPERAND (t, 0);
14296 break;
14298 default:
14299 return t;
14301 break;
14303 default:
14304 return t;
14308 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14309 This can only be applied to objects of a sizetype. */
14311 tree
14312 round_up (tree value, int divisor)
14314 tree div = NULL_TREE;
14316 gcc_assert (divisor > 0);
14317 if (divisor == 1)
14318 return value;
14320 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14321 have to do anything. Only do this when we are not given a const,
14322 because in that case, this check is more expensive than just
14323 doing it. */
14324 if (TREE_CODE (value) != INTEGER_CST)
14326 div = build_int_cst (TREE_TYPE (value), divisor);
14328 if (multiple_of_p (TREE_TYPE (value), value, div))
14329 return value;
14332 /* If divisor is a power of two, simplify this to bit manipulation. */
14333 if (divisor == (divisor & -divisor))
14335 if (TREE_CODE (value) == INTEGER_CST)
14337 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14338 unsigned HOST_WIDE_INT high;
14339 bool overflow_p;
14341 if ((low & (divisor - 1)) == 0)
14342 return value;
14344 overflow_p = TREE_OVERFLOW (value);
14345 high = TREE_INT_CST_HIGH (value);
14346 low &= ~(divisor - 1);
14347 low += divisor;
14348 if (low == 0)
14350 high++;
14351 if (high == 0)
14352 overflow_p = true;
14355 return force_fit_type_double (TREE_TYPE (value), low, high,
14356 -1, overflow_p);
14358 else
14360 tree t;
14362 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14363 value = size_binop (PLUS_EXPR, value, t);
14364 t = build_int_cst (TREE_TYPE (value), -divisor);
14365 value = size_binop (BIT_AND_EXPR, value, t);
14368 else
14370 if (!div)
14371 div = build_int_cst (TREE_TYPE (value), divisor);
14372 value = size_binop (CEIL_DIV_EXPR, value, div);
14373 value = size_binop (MULT_EXPR, value, div);
14376 return value;
14379 /* Likewise, but round down. */
14381 tree
14382 round_down (tree value, int divisor)
14384 tree div = NULL_TREE;
14386 gcc_assert (divisor > 0);
14387 if (divisor == 1)
14388 return value;
14390 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14391 have to do anything. Only do this when we are not given a const,
14392 because in that case, this check is more expensive than just
14393 doing it. */
14394 if (TREE_CODE (value) != INTEGER_CST)
14396 div = build_int_cst (TREE_TYPE (value), divisor);
14398 if (multiple_of_p (TREE_TYPE (value), value, div))
14399 return value;
14402 /* If divisor is a power of two, simplify this to bit manipulation. */
14403 if (divisor == (divisor & -divisor))
14405 tree t;
14407 t = build_int_cst (TREE_TYPE (value), -divisor);
14408 value = size_binop (BIT_AND_EXPR, value, t);
14410 else
14412 if (!div)
14413 div = build_int_cst (TREE_TYPE (value), divisor);
14414 value = size_binop (FLOOR_DIV_EXPR, value, div);
14415 value = size_binop (MULT_EXPR, value, div);
14418 return value;
14421 /* Returns the pointer to the base of the object addressed by EXP and
14422 extracts the information about the offset of the access, storing it
14423 to PBITPOS and POFFSET. */
14425 static tree
14426 split_address_to_core_and_offset (tree exp,
14427 HOST_WIDE_INT *pbitpos, tree *poffset)
14429 tree core;
14430 enum machine_mode mode;
14431 int unsignedp, volatilep;
14432 HOST_WIDE_INT bitsize;
14434 if (TREE_CODE (exp) == ADDR_EXPR)
14436 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14437 poffset, &mode, &unsignedp, &volatilep,
14438 false);
14439 core = build_fold_addr_expr (core);
14441 else
14443 core = exp;
14444 *pbitpos = 0;
14445 *poffset = NULL_TREE;
14448 return core;
14451 /* Returns true if addresses of E1 and E2 differ by a constant, false
14452 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14454 bool
14455 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14457 tree core1, core2;
14458 HOST_WIDE_INT bitpos1, bitpos2;
14459 tree toffset1, toffset2, tdiff, type;
14461 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14462 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14464 if (bitpos1 % BITS_PER_UNIT != 0
14465 || bitpos2 % BITS_PER_UNIT != 0
14466 || !operand_equal_p (core1, core2, 0))
14467 return false;
14469 if (toffset1 && toffset2)
14471 type = TREE_TYPE (toffset1);
14472 if (type != TREE_TYPE (toffset2))
14473 toffset2 = fold_convert (type, toffset2);
14475 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14476 if (!cst_and_fits_in_hwi (tdiff))
14477 return false;
14479 *diff = int_cst_value (tdiff);
14481 else if (toffset1 || toffset2)
14483 /* If only one of the offsets is non-constant, the difference cannot
14484 be a constant. */
14485 return false;
14487 else
14488 *diff = 0;
14490 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14491 return true;
14494 /* Simplify the floating point expression EXP when the sign of the
14495 result is not significant. Return NULL_TREE if no simplification
14496 is possible. */
14498 tree
14499 fold_strip_sign_ops (tree exp)
14501 tree arg0, arg1;
14503 switch (TREE_CODE (exp))
14505 case ABS_EXPR:
14506 case NEGATE_EXPR:
14507 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14508 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14510 case MULT_EXPR:
14511 case RDIV_EXPR:
14512 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14513 return NULL_TREE;
14514 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14515 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14516 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14517 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14518 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14519 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14520 break;
14522 case COMPOUND_EXPR:
14523 arg0 = TREE_OPERAND (exp, 0);
14524 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14525 if (arg1)
14526 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14527 break;
14529 case COND_EXPR:
14530 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14531 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14532 if (arg0 || arg1)
14533 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14534 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14535 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14536 break;
14538 case CALL_EXPR:
14540 const enum built_in_function fcode = builtin_mathfn_code (exp);
14541 switch (fcode)
14543 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14544 /* Strip copysign function call, return the 1st argument. */
14545 arg0 = CALL_EXPR_ARG (exp, 0);
14546 arg1 = CALL_EXPR_ARG (exp, 1);
14547 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14549 default:
14550 /* Strip sign ops from the argument of "odd" math functions. */
14551 if (negate_mathfn_p (fcode))
14553 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14554 if (arg0)
14555 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
14557 break;
14560 break;
14562 default:
14563 break;
14565 return NULL_TREE;